|
{ |
|
"best_metric": 1.2524751424789429, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-250", |
|
"epoch": 3.005586592178771, |
|
"eval_steps": 50, |
|
"global_step": 269, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0111731843575419, |
|
"grad_norm": 162.40016174316406, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 7.5612, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0111731843575419, |
|
"eval_loss": 10.110641479492188, |
|
"eval_runtime": 3.5559, |
|
"eval_samples_per_second": 42.465, |
|
"eval_steps_per_second": 21.373, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0223463687150838, |
|
"grad_norm": 201.2489776611328, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 8.9985, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0335195530726257, |
|
"grad_norm": 181.89503479003906, |
|
"learning_rate": 1e-05, |
|
"loss": 9.1837, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0446927374301676, |
|
"grad_norm": 153.2060546875, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 8.9975, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.055865921787709494, |
|
"grad_norm": 104.02288818359375, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 7.9499, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0670391061452514, |
|
"grad_norm": 92.24451446533203, |
|
"learning_rate": 2e-05, |
|
"loss": 7.5897, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0782122905027933, |
|
"grad_norm": 65.57331085205078, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 6.5642, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0893854748603352, |
|
"grad_norm": 28.319116592407227, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 5.9872, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.1005586592178771, |
|
"grad_norm": 84.72834777832031, |
|
"learning_rate": 3e-05, |
|
"loss": 5.6777, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.11173184357541899, |
|
"grad_norm": 62.04957962036133, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 5.452, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.12290502793296089, |
|
"grad_norm": 23.43031883239746, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 5.0966, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.1340782122905028, |
|
"grad_norm": 11.170083045959473, |
|
"learning_rate": 4e-05, |
|
"loss": 4.9892, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1452513966480447, |
|
"grad_norm": 9.359170913696289, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 4.8074, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.1564245810055866, |
|
"grad_norm": 7.227917194366455, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 4.5527, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.16759776536312848, |
|
"grad_norm": 5.746039867401123, |
|
"learning_rate": 5e-05, |
|
"loss": 4.4023, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.1787709497206704, |
|
"grad_norm": 5.6375274658203125, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 4.2542, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.18994413407821228, |
|
"grad_norm": 5.626579761505127, |
|
"learning_rate": 5.666666666666667e-05, |
|
"loss": 4.205, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.2011173184357542, |
|
"grad_norm": 6.251049041748047, |
|
"learning_rate": 6e-05, |
|
"loss": 4.1443, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.2122905027932961, |
|
"grad_norm": 5.178546905517578, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 3.7947, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.22346368715083798, |
|
"grad_norm": 5.792481899261475, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 3.6496, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.2346368715083799, |
|
"grad_norm": 8.204561233520508, |
|
"learning_rate": 7e-05, |
|
"loss": 3.4046, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.24581005586592178, |
|
"grad_norm": 6.862538814544678, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 3.2466, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.2569832402234637, |
|
"grad_norm": 6.1007490158081055, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 3.1117, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.2681564245810056, |
|
"grad_norm": 4.2520365715026855, |
|
"learning_rate": 8e-05, |
|
"loss": 3.1683, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.27932960893854747, |
|
"grad_norm": 16.54338264465332, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 3.0407, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2905027932960894, |
|
"grad_norm": 5.3217034339904785, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 2.8435, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.3016759776536313, |
|
"grad_norm": 5.066921234130859, |
|
"learning_rate": 9e-05, |
|
"loss": 2.8639, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.3128491620111732, |
|
"grad_norm": 4.023467063903809, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 2.7216, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.3240223463687151, |
|
"grad_norm": 5.351987361907959, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 2.503, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.33519553072625696, |
|
"grad_norm": 10.897500038146973, |
|
"learning_rate": 0.0001, |
|
"loss": 2.5283, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.3463687150837989, |
|
"grad_norm": 3.5467631816864014, |
|
"learning_rate": 9.999568045802217e-05, |
|
"loss": 2.4514, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.3575418994413408, |
|
"grad_norm": 4.198688507080078, |
|
"learning_rate": 9.998272257842641e-05, |
|
"loss": 2.6802, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.3687150837988827, |
|
"grad_norm": 2.829094409942627, |
|
"learning_rate": 9.996112860009688e-05, |
|
"loss": 2.359, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.37988826815642457, |
|
"grad_norm": 2.836956262588501, |
|
"learning_rate": 9.993090225407743e-05, |
|
"loss": 2.1674, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.39106145251396646, |
|
"grad_norm": 3.0385210514068604, |
|
"learning_rate": 9.989204876292688e-05, |
|
"loss": 2.1289, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.4022346368715084, |
|
"grad_norm": 3.1165988445281982, |
|
"learning_rate": 9.984457483981669e-05, |
|
"loss": 2.0466, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.4134078212290503, |
|
"grad_norm": 2.5134077072143555, |
|
"learning_rate": 9.978848868737098e-05, |
|
"loss": 2.0756, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.4245810055865922, |
|
"grad_norm": 2.617610454559326, |
|
"learning_rate": 9.972379999624936e-05, |
|
"loss": 1.8898, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.43575418994413406, |
|
"grad_norm": 2.49375581741333, |
|
"learning_rate": 9.96505199434725e-05, |
|
"loss": 1.8914, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.44692737430167595, |
|
"grad_norm": 2.5499541759490967, |
|
"learning_rate": 9.956866119049095e-05, |
|
"loss": 1.74, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.4581005586592179, |
|
"grad_norm": 2.702899217605591, |
|
"learning_rate": 9.947823788099753e-05, |
|
"loss": 1.6642, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.4692737430167598, |
|
"grad_norm": 2.9417200088500977, |
|
"learning_rate": 9.937926563848346e-05, |
|
"loss": 1.7848, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.48044692737430167, |
|
"grad_norm": 2.8817129135131836, |
|
"learning_rate": 9.927176156353899e-05, |
|
"loss": 1.7686, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.49162011173184356, |
|
"grad_norm": 2.706786632537842, |
|
"learning_rate": 9.91557442308987e-05, |
|
"loss": 1.5918, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.5027932960893855, |
|
"grad_norm": 4.0628342628479, |
|
"learning_rate": 9.903123368623216e-05, |
|
"loss": 2.0585, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.5139664804469274, |
|
"grad_norm": 2.9532899856567383, |
|
"learning_rate": 9.889825144268029e-05, |
|
"loss": 1.9343, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.5251396648044693, |
|
"grad_norm": 2.189305305480957, |
|
"learning_rate": 9.875682047713846e-05, |
|
"loss": 1.9791, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.5363128491620112, |
|
"grad_norm": 2.71520733833313, |
|
"learning_rate": 9.860696522628639e-05, |
|
"loss": 2.0729, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.547486033519553, |
|
"grad_norm": 2.677870988845825, |
|
"learning_rate": 9.844871158236591e-05, |
|
"loss": 1.8758, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.5586592178770949, |
|
"grad_norm": 2.1260600090026855, |
|
"learning_rate": 9.828208688870735e-05, |
|
"loss": 1.8366, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5586592178770949, |
|
"eval_loss": 1.7909045219421387, |
|
"eval_runtime": 3.541, |
|
"eval_samples_per_second": 42.643, |
|
"eval_steps_per_second": 21.463, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5698324022346368, |
|
"grad_norm": 1.9885830879211426, |
|
"learning_rate": 9.810711993500507e-05, |
|
"loss": 1.7649, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.5810055865921788, |
|
"grad_norm": 1.8383235931396484, |
|
"learning_rate": 9.792384095234313e-05, |
|
"loss": 1.6706, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.5921787709497207, |
|
"grad_norm": 2.144374370574951, |
|
"learning_rate": 9.773228160797188e-05, |
|
"loss": 1.5816, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.6033519553072626, |
|
"grad_norm": 1.8016228675842285, |
|
"learning_rate": 9.753247499983649e-05, |
|
"loss": 1.7056, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.6145251396648045, |
|
"grad_norm": 1.934396505355835, |
|
"learning_rate": 9.732445565085824e-05, |
|
"loss": 1.8101, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.6256983240223464, |
|
"grad_norm": 1.7126082181930542, |
|
"learning_rate": 9.71082595029695e-05, |
|
"loss": 1.7412, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.6368715083798883, |
|
"grad_norm": 2.053506851196289, |
|
"learning_rate": 9.688392391090373e-05, |
|
"loss": 1.6484, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.6480446927374302, |
|
"grad_norm": 1.8453326225280762, |
|
"learning_rate": 9.665148763574123e-05, |
|
"loss": 1.7627, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.659217877094972, |
|
"grad_norm": 1.7451444864273071, |
|
"learning_rate": 9.64109908382119e-05, |
|
"loss": 1.4411, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.6703910614525139, |
|
"grad_norm": 1.7417210340499878, |
|
"learning_rate": 9.616247507175623e-05, |
|
"loss": 1.546, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.6815642458100558, |
|
"grad_norm": 2.221048355102539, |
|
"learning_rate": 9.590598327534564e-05, |
|
"loss": 1.7249, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.6927374301675978, |
|
"grad_norm": 2.038527727127075, |
|
"learning_rate": 9.564155976606339e-05, |
|
"loss": 1.8133, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.7039106145251397, |
|
"grad_norm": 2.2194125652313232, |
|
"learning_rate": 9.536925023144742e-05, |
|
"loss": 1.7806, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.7150837988826816, |
|
"grad_norm": 1.9220144748687744, |
|
"learning_rate": 9.508910172159635e-05, |
|
"loss": 1.5446, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.7262569832402235, |
|
"grad_norm": 1.841299057006836, |
|
"learning_rate": 9.480116264104011e-05, |
|
"loss": 1.4487, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.7374301675977654, |
|
"grad_norm": 2.3878183364868164, |
|
"learning_rate": 9.450548274037653e-05, |
|
"loss": 1.4077, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.7486033519553073, |
|
"grad_norm": 2.5445780754089355, |
|
"learning_rate": 9.420211310767533e-05, |
|
"loss": 1.8375, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.7597765363128491, |
|
"grad_norm": 1.9894568920135498, |
|
"learning_rate": 9.389110615965102e-05, |
|
"loss": 1.5472, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.770949720670391, |
|
"grad_norm": 1.5543500185012817, |
|
"learning_rate": 9.35725156326063e-05, |
|
"loss": 1.5146, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.7821229050279329, |
|
"grad_norm": 1.6577750444412231, |
|
"learning_rate": 9.324639657314742e-05, |
|
"loss": 1.5323, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.7932960893854749, |
|
"grad_norm": 1.6345881223678589, |
|
"learning_rate": 9.291280532867302e-05, |
|
"loss": 1.6763, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.8044692737430168, |
|
"grad_norm": 1.576483130455017, |
|
"learning_rate": 9.257179953763845e-05, |
|
"loss": 1.6414, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.8156424581005587, |
|
"grad_norm": 1.661361813545227, |
|
"learning_rate": 9.222343811959693e-05, |
|
"loss": 1.6203, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.8268156424581006, |
|
"grad_norm": 1.7419025897979736, |
|
"learning_rate": 9.186778126501916e-05, |
|
"loss": 1.4892, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.8379888268156425, |
|
"grad_norm": 1.5462729930877686, |
|
"learning_rate": 9.150489042489367e-05, |
|
"loss": 1.6116, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.8491620111731844, |
|
"grad_norm": 1.5897197723388672, |
|
"learning_rate": 9.113482830010918e-05, |
|
"loss": 1.5627, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.8603351955307262, |
|
"grad_norm": 1.6474636793136597, |
|
"learning_rate": 9.075765883062093e-05, |
|
"loss": 1.5695, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.8715083798882681, |
|
"grad_norm": 1.5242871046066284, |
|
"learning_rate": 9.037344718440322e-05, |
|
"loss": 1.5686, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.88268156424581, |
|
"grad_norm": 1.4273405075073242, |
|
"learning_rate": 8.99822597461894e-05, |
|
"loss": 1.2552, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.8938547486033519, |
|
"grad_norm": 1.63188636302948, |
|
"learning_rate": 8.958416410600187e-05, |
|
"loss": 1.379, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.9050279329608939, |
|
"grad_norm": 1.5915851593017578, |
|
"learning_rate": 8.917922904747384e-05, |
|
"loss": 1.5448, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.9162011173184358, |
|
"grad_norm": 1.4518282413482666, |
|
"learning_rate": 8.876752453596462e-05, |
|
"loss": 1.3964, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.9273743016759777, |
|
"grad_norm": 1.5592926740646362, |
|
"learning_rate": 8.834912170647101e-05, |
|
"loss": 1.3952, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.9385474860335196, |
|
"grad_norm": 1.836943507194519, |
|
"learning_rate": 8.792409285133642e-05, |
|
"loss": 1.542, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.9497206703910615, |
|
"grad_norm": 1.6657090187072754, |
|
"learning_rate": 8.749251140776016e-05, |
|
"loss": 1.4903, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.9608938547486033, |
|
"grad_norm": 1.8024177551269531, |
|
"learning_rate": 8.705445194510868e-05, |
|
"loss": 1.4487, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.9720670391061452, |
|
"grad_norm": 1.9545961618423462, |
|
"learning_rate": 8.66099901520315e-05, |
|
"loss": 1.5293, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.9832402234636871, |
|
"grad_norm": 2.1410531997680664, |
|
"learning_rate": 8.615920282338355e-05, |
|
"loss": 1.4279, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.994413407821229, |
|
"grad_norm": 1.8293300867080688, |
|
"learning_rate": 8.570216784695637e-05, |
|
"loss": 1.5349, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.005586592178771, |
|
"grad_norm": 2.8246774673461914, |
|
"learning_rate": 8.52389641900206e-05, |
|
"loss": 2.6159, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.0167597765363128, |
|
"grad_norm": 1.4292484521865845, |
|
"learning_rate": 8.476967188568188e-05, |
|
"loss": 1.6759, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.0279329608938548, |
|
"grad_norm": 1.2621123790740967, |
|
"learning_rate": 8.429437201905254e-05, |
|
"loss": 1.4449, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.0391061452513966, |
|
"grad_norm": 1.4738688468933105, |
|
"learning_rate": 8.381314671324159e-05, |
|
"loss": 1.4352, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.0502793296089385, |
|
"grad_norm": 1.4630380868911743, |
|
"learning_rate": 8.332607911516545e-05, |
|
"loss": 1.4337, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.0614525139664805, |
|
"grad_norm": 1.4351942539215088, |
|
"learning_rate": 8.283325338118153e-05, |
|
"loss": 1.4363, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.0726256983240223, |
|
"grad_norm": 1.1830869913101196, |
|
"learning_rate": 8.233475466254765e-05, |
|
"loss": 1.4274, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.0837988826815643, |
|
"grad_norm": 1.3410512208938599, |
|
"learning_rate": 8.183066909070947e-05, |
|
"loss": 1.3677, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.094972067039106, |
|
"grad_norm": 1.4120603799819946, |
|
"learning_rate": 8.132108376241849e-05, |
|
"loss": 1.3283, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.106145251396648, |
|
"grad_norm": 1.2132238149642944, |
|
"learning_rate": 8.08060867246834e-05, |
|
"loss": 1.3961, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.1173184357541899, |
|
"grad_norm": 1.466185450553894, |
|
"learning_rate": 8.028576695955711e-05, |
|
"loss": 1.3326, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.1173184357541899, |
|
"eval_loss": 1.4376939535140991, |
|
"eval_runtime": 3.541, |
|
"eval_samples_per_second": 42.644, |
|
"eval_steps_per_second": 21.463, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.1284916201117319, |
|
"grad_norm": 1.4509429931640625, |
|
"learning_rate": 7.97602143687623e-05, |
|
"loss": 1.3226, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.1396648044692737, |
|
"grad_norm": 1.453986406326294, |
|
"learning_rate": 7.922951975815811e-05, |
|
"loss": 1.2928, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.1508379888268156, |
|
"grad_norm": 1.3562222719192505, |
|
"learning_rate": 7.869377482205042e-05, |
|
"loss": 1.2992, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.1620111731843576, |
|
"grad_norm": 1.3154160976409912, |
|
"learning_rate": 7.815307212734888e-05, |
|
"loss": 1.2855, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.1731843575418994, |
|
"grad_norm": 1.438166856765747, |
|
"learning_rate": 7.760750509757298e-05, |
|
"loss": 1.1333, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.1843575418994414, |
|
"grad_norm": 1.4575860500335693, |
|
"learning_rate": 7.705716799671019e-05, |
|
"loss": 1.2916, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.1955307262569832, |
|
"grad_norm": 1.525395154953003, |
|
"learning_rate": 7.650215591292888e-05, |
|
"loss": 1.2428, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.2067039106145252, |
|
"grad_norm": 1.7797048091888428, |
|
"learning_rate": 7.594256474214882e-05, |
|
"loss": 1.3893, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.217877094972067, |
|
"grad_norm": 1.5947117805480957, |
|
"learning_rate": 7.537849117147212e-05, |
|
"loss": 1.3861, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.229050279329609, |
|
"grad_norm": 1.948651671409607, |
|
"learning_rate": 7.481003266247744e-05, |
|
"loss": 1.3417, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.2402234636871508, |
|
"grad_norm": 1.8325766324996948, |
|
"learning_rate": 7.423728743438048e-05, |
|
"loss": 1.361, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.2513966480446927, |
|
"grad_norm": 1.443632960319519, |
|
"learning_rate": 7.366035444706347e-05, |
|
"loss": 1.3431, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.2625698324022347, |
|
"grad_norm": 1.3943829536437988, |
|
"learning_rate": 7.307933338397667e-05, |
|
"loss": 1.3545, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.2737430167597765, |
|
"grad_norm": 1.4311888217926025, |
|
"learning_rate": 7.249432463491498e-05, |
|
"loss": 1.3241, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.2849162011173183, |
|
"grad_norm": 1.2513173818588257, |
|
"learning_rate": 7.190542927867234e-05, |
|
"loss": 1.4081, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.2960893854748603, |
|
"grad_norm": 1.2229642868041992, |
|
"learning_rate": 7.131274906557725e-05, |
|
"loss": 1.5218, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.3072625698324023, |
|
"grad_norm": 1.3140783309936523, |
|
"learning_rate": 7.071638639991207e-05, |
|
"loss": 1.4195, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.318435754189944, |
|
"grad_norm": 1.3129864931106567, |
|
"learning_rate": 7.011644432221958e-05, |
|
"loss": 1.4635, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.329608938547486, |
|
"grad_norm": 1.3215774297714233, |
|
"learning_rate": 6.95130264914993e-05, |
|
"loss": 1.447, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.3407821229050279, |
|
"grad_norm": 1.2216203212738037, |
|
"learning_rate": 6.890623716729724e-05, |
|
"loss": 1.1941, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.3519553072625698, |
|
"grad_norm": 1.3087091445922852, |
|
"learning_rate": 6.82961811916917e-05, |
|
"loss": 1.4038, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.3631284916201118, |
|
"grad_norm": 1.1932587623596191, |
|
"learning_rate": 6.768296397117848e-05, |
|
"loss": 1.3638, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.3743016759776536, |
|
"grad_norm": 1.3358490467071533, |
|
"learning_rate": 6.706669145845863e-05, |
|
"loss": 1.3817, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.3854748603351954, |
|
"grad_norm": 1.1948127746582031, |
|
"learning_rate": 6.644747013413168e-05, |
|
"loss": 1.2415, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.3966480446927374, |
|
"grad_norm": 1.1789873838424683, |
|
"learning_rate": 6.582540698829781e-05, |
|
"loss": 1.0901, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.4078212290502794, |
|
"grad_norm": 1.2974153757095337, |
|
"learning_rate": 6.520060950207185e-05, |
|
"loss": 1.2798, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.4189944134078212, |
|
"grad_norm": 1.408875584602356, |
|
"learning_rate": 6.457318562901256e-05, |
|
"loss": 1.3745, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.4301675977653632, |
|
"grad_norm": 1.4454888105392456, |
|
"learning_rate": 6.394324377647028e-05, |
|
"loss": 1.3028, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.441340782122905, |
|
"grad_norm": 1.549099087715149, |
|
"learning_rate": 6.331089278685599e-05, |
|
"loss": 1.3293, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.452513966480447, |
|
"grad_norm": 1.4389153718948364, |
|
"learning_rate": 6.26762419188355e-05, |
|
"loss": 1.1637, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.463687150837989, |
|
"grad_norm": 1.408833622932434, |
|
"learning_rate": 6.203940082845144e-05, |
|
"loss": 1.1842, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.4748603351955307, |
|
"grad_norm": 1.5742685794830322, |
|
"learning_rate": 6.140047955017671e-05, |
|
"loss": 1.2201, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.4860335195530725, |
|
"grad_norm": 1.8778325319290161, |
|
"learning_rate": 6.075958847790262e-05, |
|
"loss": 1.1914, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.4972067039106145, |
|
"grad_norm": 1.5008203983306885, |
|
"learning_rate": 6.011683834586473e-05, |
|
"loss": 1.2746, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.5083798882681565, |
|
"grad_norm": 1.3712409734725952, |
|
"learning_rate": 5.947234020951015e-05, |
|
"loss": 1.244, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.5195530726256983, |
|
"grad_norm": 1.298775315284729, |
|
"learning_rate": 5.882620542630901e-05, |
|
"loss": 1.213, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.5307262569832403, |
|
"grad_norm": 1.2305636405944824, |
|
"learning_rate": 5.8178545636514145e-05, |
|
"loss": 1.3607, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.541899441340782, |
|
"grad_norm": 1.1469392776489258, |
|
"learning_rate": 5.752947274387147e-05, |
|
"loss": 1.349, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.553072625698324, |
|
"grad_norm": 1.1323195695877075, |
|
"learning_rate": 5.687909889628529e-05, |
|
"loss": 1.3132, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.564245810055866, |
|
"grad_norm": 1.0949897766113281, |
|
"learning_rate": 5.622753646644102e-05, |
|
"loss": 1.2518, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.5754189944134078, |
|
"grad_norm": 1.1683642864227295, |
|
"learning_rate": 5.557489803238933e-05, |
|
"loss": 1.3319, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.5865921787709496, |
|
"grad_norm": 1.2218724489212036, |
|
"learning_rate": 5.492129635809473e-05, |
|
"loss": 1.3427, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.5977653631284916, |
|
"grad_norm": 1.133186936378479, |
|
"learning_rate": 5.426684437395196e-05, |
|
"loss": 1.2725, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.6089385474860336, |
|
"grad_norm": 1.2256780862808228, |
|
"learning_rate": 5.361165515727374e-05, |
|
"loss": 1.2708, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.6201117318435754, |
|
"grad_norm": 1.2726577520370483, |
|
"learning_rate": 5.295584191275308e-05, |
|
"loss": 1.3795, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.6312849162011172, |
|
"grad_norm": 1.2547286748886108, |
|
"learning_rate": 5.229951795290353e-05, |
|
"loss": 1.3061, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.6424581005586592, |
|
"grad_norm": 1.2261368036270142, |
|
"learning_rate": 5.164279667848094e-05, |
|
"loss": 1.245, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.6536312849162011, |
|
"grad_norm": 1.2689229249954224, |
|
"learning_rate": 5.0985791558889785e-05, |
|
"loss": 1.3172, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.6648044692737431, |
|
"grad_norm": 1.2573802471160889, |
|
"learning_rate": 5.032861611257783e-05, |
|
"loss": 1.2967, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.675977653631285, |
|
"grad_norm": 1.4018381834030151, |
|
"learning_rate": 4.967138388742218e-05, |
|
"loss": 1.2735, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.675977653631285, |
|
"eval_loss": 1.326945424079895, |
|
"eval_runtime": 3.5376, |
|
"eval_samples_per_second": 42.685, |
|
"eval_steps_per_second": 21.484, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.6871508379888267, |
|
"grad_norm": 1.5333133935928345, |
|
"learning_rate": 4.901420844111021e-05, |
|
"loss": 1.3852, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.6983240223463687, |
|
"grad_norm": 1.3856674432754517, |
|
"learning_rate": 4.835720332151907e-05, |
|
"loss": 1.1126, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.7094972067039107, |
|
"grad_norm": 1.6033830642700195, |
|
"learning_rate": 4.770048204709648e-05, |
|
"loss": 1.3011, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.7206703910614525, |
|
"grad_norm": 1.6162670850753784, |
|
"learning_rate": 4.7044158087246926e-05, |
|
"loss": 1.2179, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.7318435754189943, |
|
"grad_norm": 1.744236946105957, |
|
"learning_rate": 4.6388344842726264e-05, |
|
"loss": 1.1941, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.7430167597765363, |
|
"grad_norm": 1.5789906978607178, |
|
"learning_rate": 4.5733155626048036e-05, |
|
"loss": 1.3045, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.7541899441340782, |
|
"grad_norm": 1.226691722869873, |
|
"learning_rate": 4.507870364190527e-05, |
|
"loss": 1.3675, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.7653631284916202, |
|
"grad_norm": 1.3142743110656738, |
|
"learning_rate": 4.4425101967610674e-05, |
|
"loss": 1.4387, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.776536312849162, |
|
"grad_norm": 1.0727818012237549, |
|
"learning_rate": 4.377246353355899e-05, |
|
"loss": 1.2175, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.7877094972067038, |
|
"grad_norm": 1.1169428825378418, |
|
"learning_rate": 4.312090110371473e-05, |
|
"loss": 1.365, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.7988826815642458, |
|
"grad_norm": 1.0804024934768677, |
|
"learning_rate": 4.247052725612852e-05, |
|
"loss": 1.3312, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.8100558659217878, |
|
"grad_norm": 1.2123122215270996, |
|
"learning_rate": 4.1821454363485866e-05, |
|
"loss": 1.3837, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.8212290502793296, |
|
"grad_norm": 1.1932836771011353, |
|
"learning_rate": 4.1173794573690996e-05, |
|
"loss": 1.3426, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.8324022346368714, |
|
"grad_norm": 1.0667084455490112, |
|
"learning_rate": 4.052765979048986e-05, |
|
"loss": 1.223, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.8435754189944134, |
|
"grad_norm": 1.1312137842178345, |
|
"learning_rate": 3.988316165413528e-05, |
|
"loss": 1.2159, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.8547486033519553, |
|
"grad_norm": 1.135401725769043, |
|
"learning_rate": 3.924041152209739e-05, |
|
"loss": 1.1931, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.8659217877094973, |
|
"grad_norm": 1.2866289615631104, |
|
"learning_rate": 3.859952044982329e-05, |
|
"loss": 1.3266, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.8770949720670391, |
|
"grad_norm": 1.3166481256484985, |
|
"learning_rate": 3.7960599171548574e-05, |
|
"loss": 1.2229, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.888268156424581, |
|
"grad_norm": 1.2265543937683105, |
|
"learning_rate": 3.732375808116451e-05, |
|
"loss": 1.2899, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.899441340782123, |
|
"grad_norm": 1.2559617757797241, |
|
"learning_rate": 3.668910721314402e-05, |
|
"loss": 1.13, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.910614525139665, |
|
"grad_norm": 1.2486090660095215, |
|
"learning_rate": 3.605675622352973e-05, |
|
"loss": 1.2083, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.9217877094972067, |
|
"grad_norm": 1.406841516494751, |
|
"learning_rate": 3.542681437098745e-05, |
|
"loss": 1.2352, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.9329608938547485, |
|
"grad_norm": 1.4469358921051025, |
|
"learning_rate": 3.479939049792817e-05, |
|
"loss": 1.2662, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.9441340782122905, |
|
"grad_norm": 1.4758639335632324, |
|
"learning_rate": 3.417459301170219e-05, |
|
"loss": 1.2432, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.9553072625698324, |
|
"grad_norm": 1.3574011325836182, |
|
"learning_rate": 3.355252986586832e-05, |
|
"loss": 1.1044, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.9664804469273744, |
|
"grad_norm": 1.8598283529281616, |
|
"learning_rate": 3.293330854154136e-05, |
|
"loss": 1.3518, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.9776536312849162, |
|
"grad_norm": 1.7371652126312256, |
|
"learning_rate": 3.2317036028821523e-05, |
|
"loss": 1.2855, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.988826815642458, |
|
"grad_norm": 1.4092578887939453, |
|
"learning_rate": 3.1703818808308324e-05, |
|
"loss": 1.3558, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 2.122119188308716, |
|
"learning_rate": 3.109376283270277e-05, |
|
"loss": 1.6615, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.011173184357542, |
|
"grad_norm": 1.219295859336853, |
|
"learning_rate": 3.0486973508500727e-05, |
|
"loss": 1.4017, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.022346368715084, |
|
"grad_norm": 1.1538641452789307, |
|
"learning_rate": 2.988355567778043e-05, |
|
"loss": 1.2616, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.0335195530726256, |
|
"grad_norm": 1.0700401067733765, |
|
"learning_rate": 2.9283613600087933e-05, |
|
"loss": 1.2648, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.0446927374301676, |
|
"grad_norm": 0.9880838394165039, |
|
"learning_rate": 2.8687250934422772e-05, |
|
"loss": 1.1885, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.0558659217877095, |
|
"grad_norm": 1.051222801208496, |
|
"learning_rate": 2.8094570721327662e-05, |
|
"loss": 1.1746, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.0670391061452515, |
|
"grad_norm": 1.0639920234680176, |
|
"learning_rate": 2.750567536508504e-05, |
|
"loss": 1.2354, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.078212290502793, |
|
"grad_norm": 1.0196352005004883, |
|
"learning_rate": 2.6920666616023327e-05, |
|
"loss": 1.2534, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.089385474860335, |
|
"grad_norm": 1.0681345462799072, |
|
"learning_rate": 2.6339645552936536e-05, |
|
"loss": 1.1096, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.100558659217877, |
|
"grad_norm": 1.196368932723999, |
|
"learning_rate": 2.5762712565619528e-05, |
|
"loss": 1.1021, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.111731843575419, |
|
"grad_norm": 1.0771485567092896, |
|
"learning_rate": 2.5189967337522573e-05, |
|
"loss": 1.0798, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.122905027932961, |
|
"grad_norm": 1.11617910861969, |
|
"learning_rate": 2.46215088285279e-05, |
|
"loss": 1.1074, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.1340782122905027, |
|
"grad_norm": 1.1497374773025513, |
|
"learning_rate": 2.4057435257851175e-05, |
|
"loss": 1.1183, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.1452513966480447, |
|
"grad_norm": 1.1393492221832275, |
|
"learning_rate": 2.349784408707112e-05, |
|
"loss": 1.0711, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.1564245810055866, |
|
"grad_norm": 1.0959515571594238, |
|
"learning_rate": 2.2942832003289823e-05, |
|
"loss": 1.0437, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.1675977653631286, |
|
"grad_norm": 1.155680775642395, |
|
"learning_rate": 2.2392494902427025e-05, |
|
"loss": 1.047, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.17877094972067, |
|
"grad_norm": 1.157484531402588, |
|
"learning_rate": 2.1846927872651137e-05, |
|
"loss": 1.0649, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.189944134078212, |
|
"grad_norm": 1.2944828271865845, |
|
"learning_rate": 2.1306225177949585e-05, |
|
"loss": 1.1103, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.201117318435754, |
|
"grad_norm": 1.5556319952011108, |
|
"learning_rate": 2.07704802418419e-05, |
|
"loss": 1.2251, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.212290502793296, |
|
"grad_norm": 1.4562413692474365, |
|
"learning_rate": 2.0239785631237705e-05, |
|
"loss": 1.0627, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.223463687150838, |
|
"grad_norm": 1.393690824508667, |
|
"learning_rate": 1.9714233040442915e-05, |
|
"loss": 0.9858, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.2346368715083798, |
|
"grad_norm": 1.6887412071228027, |
|
"learning_rate": 1.9193913275316626e-05, |
|
"loss": 1.1138, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.2346368715083798, |
|
"eval_loss": 1.2823649644851685, |
|
"eval_runtime": 3.5435, |
|
"eval_samples_per_second": 42.613, |
|
"eval_steps_per_second": 21.448, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.2458100558659218, |
|
"grad_norm": 2.0551750659942627, |
|
"learning_rate": 1.8678916237581522e-05, |
|
"loss": 1.0606, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.2569832402234637, |
|
"grad_norm": 1.3848985433578491, |
|
"learning_rate": 1.816933090929055e-05, |
|
"loss": 1.2715, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.2681564245810057, |
|
"grad_norm": 1.1580418348312378, |
|
"learning_rate": 1.7665245337452368e-05, |
|
"loss": 1.1072, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.2793296089385473, |
|
"grad_norm": 1.1287815570831299, |
|
"learning_rate": 1.716674661881848e-05, |
|
"loss": 1.1187, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.2905027932960893, |
|
"grad_norm": 1.1857539415359497, |
|
"learning_rate": 1.667392088483456e-05, |
|
"loss": 1.3102, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.3016759776536313, |
|
"grad_norm": 1.0973467826843262, |
|
"learning_rate": 1.6186853286758397e-05, |
|
"loss": 1.2696, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.3128491620111733, |
|
"grad_norm": 1.0390197038650513, |
|
"learning_rate": 1.570562798094747e-05, |
|
"loss": 1.2218, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.3240223463687153, |
|
"grad_norm": 1.2470310926437378, |
|
"learning_rate": 1.5230328114318127e-05, |
|
"loss": 1.3041, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.335195530726257, |
|
"grad_norm": 1.1841509342193604, |
|
"learning_rate": 1.4761035809979395e-05, |
|
"loss": 1.2008, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.346368715083799, |
|
"grad_norm": 1.027030110359192, |
|
"learning_rate": 1.4297832153043656e-05, |
|
"loss": 1.0652, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.357541899441341, |
|
"grad_norm": 1.1887884140014648, |
|
"learning_rate": 1.3840797176616466e-05, |
|
"loss": 1.1927, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.368715083798883, |
|
"grad_norm": 1.1091079711914062, |
|
"learning_rate": 1.3390009847968504e-05, |
|
"loss": 1.157, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.3798882681564244, |
|
"grad_norm": 1.1894493103027344, |
|
"learning_rate": 1.2945548054891321e-05, |
|
"loss": 1.1287, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.3910614525139664, |
|
"grad_norm": 1.2934393882751465, |
|
"learning_rate": 1.2507488592239847e-05, |
|
"loss": 1.127, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.4022346368715084, |
|
"grad_norm": 1.1570240259170532, |
|
"learning_rate": 1.2075907148663579e-05, |
|
"loss": 1.0553, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.4134078212290504, |
|
"grad_norm": 1.172692060470581, |
|
"learning_rate": 1.1650878293528994e-05, |
|
"loss": 1.0155, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.4245810055865924, |
|
"grad_norm": 1.197474718093872, |
|
"learning_rate": 1.1232475464035385e-05, |
|
"loss": 1.135, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.435754189944134, |
|
"grad_norm": 1.1875431537628174, |
|
"learning_rate": 1.0820770952526155e-05, |
|
"loss": 1.1129, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.446927374301676, |
|
"grad_norm": 1.4438897371292114, |
|
"learning_rate": 1.0415835893998116e-05, |
|
"loss": 1.2491, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.458100558659218, |
|
"grad_norm": 1.3826245069503784, |
|
"learning_rate": 1.0017740253810609e-05, |
|
"loss": 1.1198, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.46927374301676, |
|
"grad_norm": 1.3730559349060059, |
|
"learning_rate": 9.62655281559679e-06, |
|
"loss": 1.0396, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.4804469273743015, |
|
"grad_norm": 1.6272450685501099, |
|
"learning_rate": 9.242341169379076e-06, |
|
"loss": 1.1171, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.4916201117318435, |
|
"grad_norm": 1.9734584093093872, |
|
"learning_rate": 8.865171699890834e-06, |
|
"loss": 1.1374, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.5027932960893855, |
|
"grad_norm": 1.3264243602752686, |
|
"learning_rate": 8.49510957510633e-06, |
|
"loss": 1.3275, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.5139664804469275, |
|
"grad_norm": 1.3713715076446533, |
|
"learning_rate": 8.132218734980852e-06, |
|
"loss": 1.2514, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.5251396648044695, |
|
"grad_norm": 1.2266292572021484, |
|
"learning_rate": 7.776561880403072e-06, |
|
"loss": 1.1443, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.536312849162011, |
|
"grad_norm": 1.2601666450500488, |
|
"learning_rate": 7.4282004623615396e-06, |
|
"loss": 1.2684, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.547486033519553, |
|
"grad_norm": 1.222358226776123, |
|
"learning_rate": 7.0871946713269856e-06, |
|
"loss": 1.1953, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.558659217877095, |
|
"grad_norm": 1.1266566514968872, |
|
"learning_rate": 6.753603426852589e-06, |
|
"loss": 1.1542, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.5698324022346366, |
|
"grad_norm": 1.035483479499817, |
|
"learning_rate": 6.427484367393699e-06, |
|
"loss": 1.1903, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.5810055865921786, |
|
"grad_norm": 1.0014451742172241, |
|
"learning_rate": 6.108893840348995e-06, |
|
"loss": 1.0823, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.5921787709497206, |
|
"grad_norm": 1.1314361095428467, |
|
"learning_rate": 5.797886892324694e-06, |
|
"loss": 1.2311, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.6033519553072626, |
|
"grad_norm": 1.0532621145248413, |
|
"learning_rate": 5.494517259623477e-06, |
|
"loss": 1.1181, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.6145251396648046, |
|
"grad_norm": 1.0458093881607056, |
|
"learning_rate": 5.198837358959901e-06, |
|
"loss": 1.1474, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.6256983240223466, |
|
"grad_norm": 1.1469138860702515, |
|
"learning_rate": 4.910898278403669e-06, |
|
"loss": 1.1677, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.636871508379888, |
|
"grad_norm": 1.135291337966919, |
|
"learning_rate": 4.630749768552589e-06, |
|
"loss": 1.1351, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.64804469273743, |
|
"grad_norm": 1.09344482421875, |
|
"learning_rate": 4.358440233936617e-06, |
|
"loss": 1.1221, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.659217877094972, |
|
"grad_norm": 1.190507411956787, |
|
"learning_rate": 4.094016724654359e-06, |
|
"loss": 1.0893, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.6703910614525137, |
|
"grad_norm": 1.1962109804153442, |
|
"learning_rate": 3.837524928243774e-06, |
|
"loss": 1.0827, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.6815642458100557, |
|
"grad_norm": 1.3094158172607422, |
|
"learning_rate": 3.589009161788104e-06, |
|
"loss": 1.0457, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.6927374301675977, |
|
"grad_norm": 1.3989758491516113, |
|
"learning_rate": 3.3485123642587658e-06, |
|
"loss": 1.0812, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.7039106145251397, |
|
"grad_norm": 1.4350848197937012, |
|
"learning_rate": 3.116076089096265e-06, |
|
"loss": 1.0437, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.7150837988826817, |
|
"grad_norm": 1.4926385879516602, |
|
"learning_rate": 2.8917404970305097e-06, |
|
"loss": 1.0832, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.7262569832402237, |
|
"grad_norm": 1.7393081188201904, |
|
"learning_rate": 2.675544349141779e-06, |
|
"loss": 1.0668, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.7374301675977653, |
|
"grad_norm": 1.9382210969924927, |
|
"learning_rate": 2.4675250001635232e-06, |
|
"loss": 1.0107, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.7486033519553073, |
|
"grad_norm": 0.7568252086639404, |
|
"learning_rate": 2.2677183920281343e-06, |
|
"loss": 1.0928, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.7597765363128492, |
|
"grad_norm": 0.9040629267692566, |
|
"learning_rate": 2.076159047656889e-06, |
|
"loss": 1.3023, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.770949720670391, |
|
"grad_norm": 0.9287111163139343, |
|
"learning_rate": 1.892880064994934e-06, |
|
"loss": 1.1539, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.782122905027933, |
|
"grad_norm": 0.9362537860870361, |
|
"learning_rate": 1.7179131112926627e-06, |
|
"loss": 1.1163, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.793296089385475, |
|
"grad_norm": 0.9989909529685974, |
|
"learning_rate": 1.551288417634106e-06, |
|
"loss": 1.1343, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.793296089385475, |
|
"eval_loss": 1.2524751424789429, |
|
"eval_runtime": 3.5444, |
|
"eval_samples_per_second": 42.603, |
|
"eval_steps_per_second": 21.442, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.804469273743017, |
|
"grad_norm": 0.999933660030365, |
|
"learning_rate": 1.3930347737136196e-06, |
|
"loss": 1.1728, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.815642458100559, |
|
"grad_norm": 1.084299921989441, |
|
"learning_rate": 1.2431795228615372e-06, |
|
"loss": 1.1947, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.826815642458101, |
|
"grad_norm": 1.0853486061096191, |
|
"learning_rate": 1.101748557319715e-06, |
|
"loss": 1.1514, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.8379888268156424, |
|
"grad_norm": 1.0919440984725952, |
|
"learning_rate": 9.687663137678604e-07, |
|
"loss": 1.1536, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.8491620111731844, |
|
"grad_norm": 1.0306580066680908, |
|
"learning_rate": 8.442557691013043e-07, |
|
"loss": 1.1416, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.8603351955307263, |
|
"grad_norm": 1.1208820343017578, |
|
"learning_rate": 7.282384364610206e-07, |
|
"loss": 1.1873, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.871508379888268, |
|
"grad_norm": 1.0364559888839722, |
|
"learning_rate": 6.207343615165561e-07, |
|
"loss": 1.0184, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.88268156424581, |
|
"grad_norm": 0.9816321730613708, |
|
"learning_rate": 5.217621190024779e-07, |
|
"loss": 1.017, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.893854748603352, |
|
"grad_norm": 1.1331318616867065, |
|
"learning_rate": 4.3133880950905205e-07, |
|
"loss": 1.1131, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.905027932960894, |
|
"grad_norm": 1.209434151649475, |
|
"learning_rate": 3.494800565275125e-07, |
|
"loss": 1.18, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.916201117318436, |
|
"grad_norm": 1.2905470132827759, |
|
"learning_rate": 2.762000037506485e-07, |
|
"loss": 1.1171, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.927374301675978, |
|
"grad_norm": 1.216124176979065, |
|
"learning_rate": 2.115113126290258e-07, |
|
"loss": 1.0804, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.9385474860335195, |
|
"grad_norm": 1.300574541091919, |
|
"learning_rate": 1.554251601833201e-07, |
|
"loss": 1.0849, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.9497206703910615, |
|
"grad_norm": 1.376492977142334, |
|
"learning_rate": 1.0795123707312283e-07, |
|
"loss": 1.0931, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.9608938547486034, |
|
"grad_norm": 1.507287859916687, |
|
"learning_rate": 6.909774592258056e-08, |
|
"loss": 1.1502, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.972067039106145, |
|
"grad_norm": 1.6715766191482544, |
|
"learning_rate": 3.8871399903134265e-08, |
|
"loss": 1.0179, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.983240223463687, |
|
"grad_norm": 2.2043421268463135, |
|
"learning_rate": 1.7277421573608232e-08, |
|
"loss": 0.9667, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.994413407821229, |
|
"grad_norm": 1.0078221559524536, |
|
"learning_rate": 4.319541977831909e-09, |
|
"loss": 1.1912, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 3.005586592178771, |
|
"grad_norm": 2.012565851211548, |
|
"learning_rate": 0.0, |
|
"loss": 1.8972, |
|
"step": 269 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 269, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.738167723150541e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|