|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.5078125, |
|
"eval_steps": 39, |
|
"global_step": 195, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0026041666666666665, |
|
"grad_norm": 0.3952034496250024, |
|
"learning_rate": 5e-06, |
|
"loss": 2.4903, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0026041666666666665, |
|
"eval_loss": 2.554558753967285, |
|
"eval_runtime": 65.1161, |
|
"eval_samples_per_second": 1.229, |
|
"eval_steps_per_second": 0.154, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.005208333333333333, |
|
"grad_norm": 0.3885918424668888, |
|
"learning_rate": 1e-05, |
|
"loss": 2.4825, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0078125, |
|
"grad_norm": 0.3847099050493279, |
|
"learning_rate": 1.5e-05, |
|
"loss": 2.5035, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.010416666666666666, |
|
"grad_norm": 0.4235545463515579, |
|
"learning_rate": 2e-05, |
|
"loss": 2.5577, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.013020833333333334, |
|
"grad_norm": 0.3801745772810421, |
|
"learning_rate": 2.5e-05, |
|
"loss": 2.5297, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.015625, |
|
"grad_norm": 0.3512277500716236, |
|
"learning_rate": 3e-05, |
|
"loss": 2.4777, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.018229166666666668, |
|
"grad_norm": 0.28888015786994375, |
|
"learning_rate": 3.5e-05, |
|
"loss": 2.4556, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.020833333333333332, |
|
"grad_norm": 0.25350975750236143, |
|
"learning_rate": 4e-05, |
|
"loss": 2.5022, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0234375, |
|
"grad_norm": 0.22691977505165736, |
|
"learning_rate": 4.5e-05, |
|
"loss": 2.4143, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.026041666666666668, |
|
"grad_norm": 0.20864896327745933, |
|
"learning_rate": 5e-05, |
|
"loss": 2.4545, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.028645833333333332, |
|
"grad_norm": 0.20008113121425344, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 2.4939, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03125, |
|
"grad_norm": 0.24785418789288546, |
|
"learning_rate": 6e-05, |
|
"loss": 2.3184, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.033854166666666664, |
|
"grad_norm": 0.2153715911656377, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 2.4738, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.036458333333333336, |
|
"grad_norm": 0.22249287774026938, |
|
"learning_rate": 7e-05, |
|
"loss": 2.4814, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0390625, |
|
"grad_norm": 0.2076426443768558, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 2.3529, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.041666666666666664, |
|
"grad_norm": 0.2246776060857838, |
|
"learning_rate": 8e-05, |
|
"loss": 2.2371, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.044270833333333336, |
|
"grad_norm": 0.18383862266336323, |
|
"learning_rate": 8.5e-05, |
|
"loss": 2.4787, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.046875, |
|
"grad_norm": 0.20631688287583885, |
|
"learning_rate": 9e-05, |
|
"loss": 2.6178, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.049479166666666664, |
|
"grad_norm": 0.18016643348558348, |
|
"learning_rate": 9.5e-05, |
|
"loss": 2.4891, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.052083333333333336, |
|
"grad_norm": 0.14534618682097772, |
|
"learning_rate": 0.0001, |
|
"loss": 2.4522, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0546875, |
|
"grad_norm": 0.15428537805246717, |
|
"learning_rate": 9.999832398924833e-05, |
|
"loss": 2.4111, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.057291666666666664, |
|
"grad_norm": 0.1631103718894274, |
|
"learning_rate": 9.999329608183822e-05, |
|
"loss": 2.387, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.059895833333333336, |
|
"grad_norm": 0.1552602669275131, |
|
"learning_rate": 9.998491665229539e-05, |
|
"loss": 2.3648, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0625, |
|
"grad_norm": 0.1704183704935776, |
|
"learning_rate": 9.997318632479817e-05, |
|
"loss": 2.4997, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.06510416666666667, |
|
"grad_norm": 0.1585395745370806, |
|
"learning_rate": 9.995810597313128e-05, |
|
"loss": 2.5326, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06770833333333333, |
|
"grad_norm": 0.1344528253347993, |
|
"learning_rate": 9.993967672062052e-05, |
|
"loss": 2.5174, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0703125, |
|
"grad_norm": 0.14705487245235002, |
|
"learning_rate": 9.991789994004929e-05, |
|
"loss": 2.3446, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.07291666666666667, |
|
"grad_norm": 0.1461054029585455, |
|
"learning_rate": 9.989277725355615e-05, |
|
"loss": 2.4945, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.07552083333333333, |
|
"grad_norm": 0.14508526627249885, |
|
"learning_rate": 9.986431053251411e-05, |
|
"loss": 2.3801, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.078125, |
|
"grad_norm": 0.14473824067661442, |
|
"learning_rate": 9.983250189739119e-05, |
|
"loss": 2.4877, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08072916666666667, |
|
"grad_norm": 0.18945595196350265, |
|
"learning_rate": 9.97973537175925e-05, |
|
"loss": 2.5833, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.08333333333333333, |
|
"grad_norm": 0.1541697712272958, |
|
"learning_rate": 9.975886861128368e-05, |
|
"loss": 2.4619, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0859375, |
|
"grad_norm": 0.16052736172586615, |
|
"learning_rate": 9.971704944519594e-05, |
|
"loss": 2.5037, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.08854166666666667, |
|
"grad_norm": 0.15543243937875972, |
|
"learning_rate": 9.967189933441243e-05, |
|
"loss": 2.4374, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.09114583333333333, |
|
"grad_norm": 0.16705335802827087, |
|
"learning_rate": 9.962342164213639e-05, |
|
"loss": 2.5955, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.09375, |
|
"grad_norm": 0.14789348505066385, |
|
"learning_rate": 9.957161997944034e-05, |
|
"loss": 2.4322, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.09635416666666667, |
|
"grad_norm": 0.15495422007897772, |
|
"learning_rate": 9.95164982049974e-05, |
|
"loss": 2.4307, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.09895833333333333, |
|
"grad_norm": 0.15049524925558916, |
|
"learning_rate": 9.94580604247936e-05, |
|
"loss": 2.3423, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1015625, |
|
"grad_norm": 0.1444251980172912, |
|
"learning_rate": 9.939631099182219e-05, |
|
"loss": 2.2882, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1015625, |
|
"eval_loss": 2.4301750659942627, |
|
"eval_runtime": 65.3323, |
|
"eval_samples_per_second": 1.225, |
|
"eval_steps_per_second": 0.153, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.10416666666666667, |
|
"grad_norm": 0.1475367612200888, |
|
"learning_rate": 9.933125450575932e-05, |
|
"loss": 2.4509, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.10677083333333333, |
|
"grad_norm": 0.16356380426266176, |
|
"learning_rate": 9.926289581262147e-05, |
|
"loss": 2.5307, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.109375, |
|
"grad_norm": 0.166302003601292, |
|
"learning_rate": 9.919124000440438e-05, |
|
"loss": 2.526, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.11197916666666667, |
|
"grad_norm": 0.14645402434215465, |
|
"learning_rate": 9.91162924187038e-05, |
|
"loss": 2.2958, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.11458333333333333, |
|
"grad_norm": 0.2119539720051403, |
|
"learning_rate": 9.903805863831799e-05, |
|
"loss": 2.3299, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.1171875, |
|
"grad_norm": 0.151653634781893, |
|
"learning_rate": 9.895654449083166e-05, |
|
"loss": 2.4133, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.11979166666666667, |
|
"grad_norm": 0.1547044553169797, |
|
"learning_rate": 9.887175604818206e-05, |
|
"loss": 2.5952, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.12239583333333333, |
|
"grad_norm": 0.16664262013704687, |
|
"learning_rate": 9.87836996262066e-05, |
|
"loss": 2.4714, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 0.15551192268579045, |
|
"learning_rate": 9.869238178417235e-05, |
|
"loss": 2.3432, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.12760416666666666, |
|
"grad_norm": 0.14968525834314664, |
|
"learning_rate": 9.859780932428756e-05, |
|
"loss": 2.4037, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.13020833333333334, |
|
"grad_norm": 0.1685655914159011, |
|
"learning_rate": 9.849998929119488e-05, |
|
"loss": 2.4032, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1328125, |
|
"grad_norm": 0.13644233785328275, |
|
"learning_rate": 9.839892897144663e-05, |
|
"loss": 2.4095, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.13541666666666666, |
|
"grad_norm": 0.14364815720124613, |
|
"learning_rate": 9.829463589296203e-05, |
|
"loss": 2.4398, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.13802083333333334, |
|
"grad_norm": 0.14915929369602085, |
|
"learning_rate": 9.818711782446645e-05, |
|
"loss": 2.4816, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.140625, |
|
"grad_norm": 0.14935461072359194, |
|
"learning_rate": 9.80763827749127e-05, |
|
"loss": 2.3426, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.14322916666666666, |
|
"grad_norm": 0.15190176296605723, |
|
"learning_rate": 9.796243899288456e-05, |
|
"loss": 2.4705, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.14583333333333334, |
|
"grad_norm": 0.14303795896023264, |
|
"learning_rate": 9.784529496598214e-05, |
|
"loss": 2.5108, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.1484375, |
|
"grad_norm": 0.1556316812368993, |
|
"learning_rate": 9.772495942018985e-05, |
|
"loss": 2.3112, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.15104166666666666, |
|
"grad_norm": 0.1483632196038671, |
|
"learning_rate": 9.760144131922628e-05, |
|
"loss": 2.2325, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.15364583333333334, |
|
"grad_norm": 0.12410345584803052, |
|
"learning_rate": 9.747474986387654e-05, |
|
"loss": 2.3749, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.15625, |
|
"grad_norm": 0.16335694957610425, |
|
"learning_rate": 9.734489449130695e-05, |
|
"loss": 2.4645, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.15885416666666666, |
|
"grad_norm": 0.1479375196060614, |
|
"learning_rate": 9.721188487436195e-05, |
|
"loss": 2.3834, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.16145833333333334, |
|
"grad_norm": 0.13750323689269844, |
|
"learning_rate": 9.707573092084368e-05, |
|
"loss": 2.3592, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.1640625, |
|
"grad_norm": 0.16027916915470333, |
|
"learning_rate": 9.693644277277391e-05, |
|
"loss": 2.4954, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.16666666666666666, |
|
"grad_norm": 0.14229365410212697, |
|
"learning_rate": 9.679403080563861e-05, |
|
"loss": 2.4404, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.16927083333333334, |
|
"grad_norm": 0.153686499622388, |
|
"learning_rate": 9.6648505627615e-05, |
|
"loss": 2.2282, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.171875, |
|
"grad_norm": 0.16590644992700104, |
|
"learning_rate": 9.649987807878148e-05, |
|
"loss": 2.5126, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.17447916666666666, |
|
"grad_norm": 0.15502972088699477, |
|
"learning_rate": 9.634815923030997e-05, |
|
"loss": 2.3749, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.17708333333333334, |
|
"grad_norm": 0.14756465898580834, |
|
"learning_rate": 9.61933603836415e-05, |
|
"loss": 2.3476, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.1796875, |
|
"grad_norm": 0.14919525754908758, |
|
"learning_rate": 9.603549306964407e-05, |
|
"loss": 2.3509, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.18229166666666666, |
|
"grad_norm": 0.13861494211266717, |
|
"learning_rate": 9.587456904775393e-05, |
|
"loss": 2.2449, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.18489583333333334, |
|
"grad_norm": 0.14821518394829722, |
|
"learning_rate": 9.57106003050996e-05, |
|
"loss": 2.3813, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.1875, |
|
"grad_norm": 0.16324527196573113, |
|
"learning_rate": 9.554359905560886e-05, |
|
"loss": 2.477, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.19010416666666666, |
|
"grad_norm": 0.14892748528676938, |
|
"learning_rate": 9.537357773909906e-05, |
|
"loss": 2.264, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.19270833333333334, |
|
"grad_norm": 0.1284546434475265, |
|
"learning_rate": 9.520054902035035e-05, |
|
"loss": 2.4267, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.1953125, |
|
"grad_norm": 0.14293551215488642, |
|
"learning_rate": 9.502452578816244e-05, |
|
"loss": 2.5131, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.19791666666666666, |
|
"grad_norm": 0.14775108859068506, |
|
"learning_rate": 9.484552115439445e-05, |
|
"loss": 2.4076, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.20052083333333334, |
|
"grad_norm": 0.15478800535673032, |
|
"learning_rate": 9.466354845298817e-05, |
|
"loss": 2.5762, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.203125, |
|
"grad_norm": 0.16835921991226063, |
|
"learning_rate": 9.44786212389749e-05, |
|
"loss": 2.3752, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.203125, |
|
"eval_loss": 2.4171054363250732, |
|
"eval_runtime": 65.4109, |
|
"eval_samples_per_second": 1.223, |
|
"eval_steps_per_second": 0.153, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.20572916666666666, |
|
"grad_norm": 0.14467889640535875, |
|
"learning_rate": 9.42907532874657e-05, |
|
"loss": 2.4769, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.20833333333333334, |
|
"grad_norm": 0.13503613492125413, |
|
"learning_rate": 9.40999585926253e-05, |
|
"loss": 2.4346, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2109375, |
|
"grad_norm": 0.1517728973900508, |
|
"learning_rate": 9.390625136662972e-05, |
|
"loss": 2.261, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.21354166666666666, |
|
"grad_norm": 0.1466563383653839, |
|
"learning_rate": 9.370964603860753e-05, |
|
"loss": 2.5261, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.21614583333333334, |
|
"grad_norm": 0.14618631155344838, |
|
"learning_rate": 9.351015725356514e-05, |
|
"loss": 2.4269, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.21875, |
|
"grad_norm": 0.15601390292939937, |
|
"learning_rate": 9.33077998712958e-05, |
|
"loss": 2.3773, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.22135416666666666, |
|
"grad_norm": 0.14415725413526403, |
|
"learning_rate": 9.310258896527278e-05, |
|
"loss": 2.3473, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.22395833333333334, |
|
"grad_norm": 0.13470412653239092, |
|
"learning_rate": 9.289453982152653e-05, |
|
"loss": 2.3472, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2265625, |
|
"grad_norm": 0.15570776289510233, |
|
"learning_rate": 9.2683667937506e-05, |
|
"loss": 2.4414, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.22916666666666666, |
|
"grad_norm": 0.14612626399487072, |
|
"learning_rate": 9.246998902092428e-05, |
|
"loss": 2.4655, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.23177083333333334, |
|
"grad_norm": 0.18267625807633459, |
|
"learning_rate": 9.22535189885886e-05, |
|
"loss": 2.2771, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.234375, |
|
"grad_norm": 0.16065010725957032, |
|
"learning_rate": 9.203427396521454e-05, |
|
"loss": 2.4194, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.23697916666666666, |
|
"grad_norm": 0.18321820032984756, |
|
"learning_rate": 9.181227028222508e-05, |
|
"loss": 2.4456, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.23958333333333334, |
|
"grad_norm": 0.13929464732035163, |
|
"learning_rate": 9.158752447653397e-05, |
|
"loss": 2.3989, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.2421875, |
|
"grad_norm": 0.15585898969003706, |
|
"learning_rate": 9.136005328931395e-05, |
|
"loss": 2.4777, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.24479166666666666, |
|
"grad_norm": 0.14793073723102618, |
|
"learning_rate": 9.112987366474972e-05, |
|
"loss": 2.379, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.24739583333333334, |
|
"grad_norm": 0.14075868568691305, |
|
"learning_rate": 9.089700274877574e-05, |
|
"loss": 2.3281, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.15327339815586916, |
|
"learning_rate": 9.066145788779908e-05, |
|
"loss": 2.4517, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.2526041666666667, |
|
"grad_norm": 0.15505433166839452, |
|
"learning_rate": 9.042325662740726e-05, |
|
"loss": 2.2919, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.2552083333333333, |
|
"grad_norm": 0.1468856057426848, |
|
"learning_rate": 9.018241671106134e-05, |
|
"loss": 2.3889, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.2578125, |
|
"grad_norm": 0.1478488710337695, |
|
"learning_rate": 8.993895607877418e-05, |
|
"loss": 2.3595, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.2604166666666667, |
|
"grad_norm": 0.175359520279898, |
|
"learning_rate": 8.969289286577408e-05, |
|
"loss": 2.553, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.2630208333333333, |
|
"grad_norm": 0.14503182442700818, |
|
"learning_rate": 8.9444245401154e-05, |
|
"loss": 2.3573, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.265625, |
|
"grad_norm": 0.14177870868430487, |
|
"learning_rate": 8.919303220650606e-05, |
|
"loss": 2.3274, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.2682291666666667, |
|
"grad_norm": 0.1779007031093982, |
|
"learning_rate": 8.893927199454207e-05, |
|
"loss": 2.2008, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.2708333333333333, |
|
"grad_norm": 0.14578869126284813, |
|
"learning_rate": 8.868298366769954e-05, |
|
"loss": 2.444, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.2734375, |
|
"grad_norm": 0.14046914929392473, |
|
"learning_rate": 8.842418631673365e-05, |
|
"loss": 2.2844, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.2760416666666667, |
|
"grad_norm": 0.13720708755401478, |
|
"learning_rate": 8.816289921929516e-05, |
|
"loss": 2.5047, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.2786458333333333, |
|
"grad_norm": 0.13901967393067377, |
|
"learning_rate": 8.789914183849449e-05, |
|
"loss": 2.4602, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.28125, |
|
"grad_norm": 0.14165507438668587, |
|
"learning_rate": 8.763293382145195e-05, |
|
"loss": 2.3135, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.2838541666666667, |
|
"grad_norm": 0.1588574639328753, |
|
"learning_rate": 8.73642949978341e-05, |
|
"loss": 2.3731, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.2864583333333333, |
|
"grad_norm": 0.1905241886636867, |
|
"learning_rate": 8.709324537837684e-05, |
|
"loss": 2.4152, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2890625, |
|
"grad_norm": 0.16919809232424363, |
|
"learning_rate": 8.681980515339464e-05, |
|
"loss": 2.2948, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.2916666666666667, |
|
"grad_norm": 0.15114015395920236, |
|
"learning_rate": 8.654399469127673e-05, |
|
"loss": 2.4309, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.2942708333333333, |
|
"grad_norm": 0.13890547261939354, |
|
"learning_rate": 8.626583453696976e-05, |
|
"loss": 2.4866, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.296875, |
|
"grad_norm": 0.1490334985258922, |
|
"learning_rate": 8.598534541044747e-05, |
|
"loss": 2.4748, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.2994791666666667, |
|
"grad_norm": 0.15113023404371403, |
|
"learning_rate": 8.570254820516728e-05, |
|
"loss": 2.3583, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.3020833333333333, |
|
"grad_norm": 0.14358757342098682, |
|
"learning_rate": 8.541746398651395e-05, |
|
"loss": 2.4005, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.3046875, |
|
"grad_norm": 0.1494340804949735, |
|
"learning_rate": 8.513011399023036e-05, |
|
"loss": 2.3249, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.3046875, |
|
"eval_loss": 2.4119415283203125, |
|
"eval_runtime": 65.4329, |
|
"eval_samples_per_second": 1.223, |
|
"eval_steps_per_second": 0.153, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.3072916666666667, |
|
"grad_norm": 0.14857253164474957, |
|
"learning_rate": 8.484051962083579e-05, |
|
"loss": 2.3431, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.3098958333333333, |
|
"grad_norm": 0.14191988741229736, |
|
"learning_rate": 8.454870245003141e-05, |
|
"loss": 2.3861, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.3125, |
|
"grad_norm": 0.1516024307266096, |
|
"learning_rate": 8.425468421509349e-05, |
|
"loss": 2.411, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3151041666666667, |
|
"grad_norm": 0.15002982823803926, |
|
"learning_rate": 8.395848681725416e-05, |
|
"loss": 2.49, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3177083333333333, |
|
"grad_norm": 0.14904411125984457, |
|
"learning_rate": 8.366013232007002e-05, |
|
"loss": 2.2998, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.3203125, |
|
"grad_norm": 0.15513959182528284, |
|
"learning_rate": 8.335964294777862e-05, |
|
"loss": 2.4748, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.3229166666666667, |
|
"grad_norm": 0.16028597088352084, |
|
"learning_rate": 8.305704108364301e-05, |
|
"loss": 2.5415, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.3255208333333333, |
|
"grad_norm": 0.1382845005964039, |
|
"learning_rate": 8.275234926828446e-05, |
|
"loss": 2.3863, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.328125, |
|
"grad_norm": 0.13146327268638525, |
|
"learning_rate": 8.244559019800328e-05, |
|
"loss": 2.2718, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.3307291666666667, |
|
"grad_norm": 0.14918540441469405, |
|
"learning_rate": 8.213678672308841e-05, |
|
"loss": 2.29, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.14572964949235084, |
|
"learning_rate": 8.182596184611514e-05, |
|
"loss": 2.3865, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.3359375, |
|
"grad_norm": 0.1467709610422986, |
|
"learning_rate": 8.151313872023172e-05, |
|
"loss": 2.3566, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.3385416666666667, |
|
"grad_norm": 0.1696428030171741, |
|
"learning_rate": 8.119834064743469e-05, |
|
"loss": 2.4145, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3411458333333333, |
|
"grad_norm": 0.1567212274267596, |
|
"learning_rate": 8.088159107683314e-05, |
|
"loss": 2.3996, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.34375, |
|
"grad_norm": 0.16059171688564705, |
|
"learning_rate": 8.056291360290201e-05, |
|
"loss": 2.4796, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.3463541666666667, |
|
"grad_norm": 0.17528470844982455, |
|
"learning_rate": 8.024233196372453e-05, |
|
"loss": 2.3711, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.3489583333333333, |
|
"grad_norm": 0.15733889602776618, |
|
"learning_rate": 7.9919870039224e-05, |
|
"loss": 2.2677, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.3515625, |
|
"grad_norm": 0.14873870088112393, |
|
"learning_rate": 7.959555184938495e-05, |
|
"loss": 2.4515, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.3541666666666667, |
|
"grad_norm": 0.15417965735327782, |
|
"learning_rate": 7.926940155246397e-05, |
|
"loss": 2.4285, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.3567708333333333, |
|
"grad_norm": 0.1431617281765595, |
|
"learning_rate": 7.894144344319014e-05, |
|
"loss": 2.2096, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.359375, |
|
"grad_norm": 0.1582812354896834, |
|
"learning_rate": 7.861170195095537e-05, |
|
"loss": 2.5397, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.3619791666666667, |
|
"grad_norm": 0.17372786439002758, |
|
"learning_rate": 7.828020163799455e-05, |
|
"loss": 2.4293, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.3645833333333333, |
|
"grad_norm": 0.12985922414383083, |
|
"learning_rate": 7.794696719755612e-05, |
|
"loss": 2.3064, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3671875, |
|
"grad_norm": 0.14444453543114258, |
|
"learning_rate": 7.761202345206249e-05, |
|
"loss": 2.4924, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.3697916666666667, |
|
"grad_norm": 0.14361378320872797, |
|
"learning_rate": 7.727539535126118e-05, |
|
"loss": 2.485, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.3723958333333333, |
|
"grad_norm": 0.14506765397834698, |
|
"learning_rate": 7.69371079703662e-05, |
|
"loss": 2.4081, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 0.15082242594669004, |
|
"learning_rate": 7.65971865081904e-05, |
|
"loss": 2.4042, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.3776041666666667, |
|
"grad_norm": 0.14811116555183387, |
|
"learning_rate": 7.625565628526818e-05, |
|
"loss": 2.5335, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.3802083333333333, |
|
"grad_norm": 0.14726265807121236, |
|
"learning_rate": 7.591254274196959e-05, |
|
"loss": 2.2424, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.3828125, |
|
"grad_norm": 0.1582839530592079, |
|
"learning_rate": 7.556787143660521e-05, |
|
"loss": 2.3499, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.3854166666666667, |
|
"grad_norm": 0.15670436913091046, |
|
"learning_rate": 7.522166804352226e-05, |
|
"loss": 2.4295, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.3880208333333333, |
|
"grad_norm": 0.14730612216140554, |
|
"learning_rate": 7.487395835119231e-05, |
|
"loss": 2.3101, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.390625, |
|
"grad_norm": 0.15120580103738476, |
|
"learning_rate": 7.452476826029011e-05, |
|
"loss": 2.4888, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3932291666666667, |
|
"grad_norm": 0.15081791485599214, |
|
"learning_rate": 7.417412378176446e-05, |
|
"loss": 2.3946, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.3958333333333333, |
|
"grad_norm": 0.13244742107918075, |
|
"learning_rate": 7.382205103490043e-05, |
|
"loss": 2.2704, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.3984375, |
|
"grad_norm": 0.14571632653572605, |
|
"learning_rate": 7.346857624537407e-05, |
|
"loss": 2.4644, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.4010416666666667, |
|
"grad_norm": 0.15252862923031207, |
|
"learning_rate": 7.311372574329854e-05, |
|
"loss": 2.554, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.4036458333333333, |
|
"grad_norm": 0.14927980854491554, |
|
"learning_rate": 7.275752596126308e-05, |
|
"loss": 2.3804, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.40625, |
|
"grad_norm": 0.1406590744757482, |
|
"learning_rate": 7.240000343236385e-05, |
|
"loss": 2.2504, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.40625, |
|
"eval_loss": 2.408146619796753, |
|
"eval_runtime": 65.4856, |
|
"eval_samples_per_second": 1.222, |
|
"eval_steps_per_second": 0.153, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.4088541666666667, |
|
"grad_norm": 0.13753178031418029, |
|
"learning_rate": 7.204118478822766e-05, |
|
"loss": 2.3063, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.4114583333333333, |
|
"grad_norm": 0.13719355865812055, |
|
"learning_rate": 7.168109675702806e-05, |
|
"loss": 2.1826, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.4140625, |
|
"grad_norm": 0.14916916897375052, |
|
"learning_rate": 7.131976616149445e-05, |
|
"loss": 2.3635, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.4166666666666667, |
|
"grad_norm": 0.1544520288957668, |
|
"learning_rate": 7.095721991691411e-05, |
|
"loss": 2.6067, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4192708333333333, |
|
"grad_norm": 0.1466811081471814, |
|
"learning_rate": 7.05934850291272e-05, |
|
"loss": 2.2636, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.421875, |
|
"grad_norm": 0.14245354657490636, |
|
"learning_rate": 7.022858859251517e-05, |
|
"loss": 2.3278, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.4244791666666667, |
|
"grad_norm": 0.14310350882179557, |
|
"learning_rate": 6.986255778798253e-05, |
|
"loss": 2.2951, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.4270833333333333, |
|
"grad_norm": 0.15028525220062852, |
|
"learning_rate": 6.949541988093208e-05, |
|
"loss": 2.3647, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.4296875, |
|
"grad_norm": 0.13536590220383313, |
|
"learning_rate": 6.912720221923405e-05, |
|
"loss": 2.3635, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.4322916666666667, |
|
"grad_norm": 0.14965390120450028, |
|
"learning_rate": 6.875793223118888e-05, |
|
"loss": 2.3191, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.4348958333333333, |
|
"grad_norm": 0.15696846625410496, |
|
"learning_rate": 6.838763742348415e-05, |
|
"loss": 2.4342, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.4375, |
|
"grad_norm": 0.16266249005182767, |
|
"learning_rate": 6.801634537914555e-05, |
|
"loss": 2.4487, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.4401041666666667, |
|
"grad_norm": 0.15099304315918932, |
|
"learning_rate": 6.764408375548237e-05, |
|
"loss": 2.4774, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.4427083333333333, |
|
"grad_norm": 0.1437502300792439, |
|
"learning_rate": 6.727088028202723e-05, |
|
"loss": 2.4369, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.4453125, |
|
"grad_norm": 0.13339677555582233, |
|
"learning_rate": 6.68967627584705e-05, |
|
"loss": 2.3039, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.4479166666666667, |
|
"grad_norm": 0.1416720700803862, |
|
"learning_rate": 6.652175905258963e-05, |
|
"loss": 2.2844, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.4505208333333333, |
|
"grad_norm": 0.14502909820943563, |
|
"learning_rate": 6.614589709817317e-05, |
|
"loss": 2.4192, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.453125, |
|
"grad_norm": 0.14499193575249233, |
|
"learning_rate": 6.576920489294011e-05, |
|
"loss": 2.3057, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.4557291666666667, |
|
"grad_norm": 0.15038851044223103, |
|
"learning_rate": 6.539171049645426e-05, |
|
"loss": 2.4237, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.4583333333333333, |
|
"grad_norm": 0.14553942382157312, |
|
"learning_rate": 6.501344202803414e-05, |
|
"loss": 2.4642, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.4609375, |
|
"grad_norm": 0.13896484063095102, |
|
"learning_rate": 6.463442766465847e-05, |
|
"loss": 2.4132, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.4635416666666667, |
|
"grad_norm": 0.14978646915320232, |
|
"learning_rate": 6.425469563886715e-05, |
|
"loss": 2.3692, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.4661458333333333, |
|
"grad_norm": 0.13794418734075914, |
|
"learning_rate": 6.387427423665829e-05, |
|
"loss": 2.3524, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.46875, |
|
"grad_norm": 0.1467143240226456, |
|
"learning_rate": 6.349319179538126e-05, |
|
"loss": 2.359, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.4713541666666667, |
|
"grad_norm": 0.14326937356779176, |
|
"learning_rate": 6.311147670162576e-05, |
|
"loss": 2.5547, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.4739583333333333, |
|
"grad_norm": 0.141220074236358, |
|
"learning_rate": 6.272915738910743e-05, |
|
"loss": 2.4299, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.4765625, |
|
"grad_norm": 0.148901889828269, |
|
"learning_rate": 6.234626233654973e-05, |
|
"loss": 2.5296, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.4791666666666667, |
|
"grad_norm": 0.15238094995154358, |
|
"learning_rate": 6.196282006556266e-05, |
|
"loss": 2.2642, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.4817708333333333, |
|
"grad_norm": 0.14562487124583376, |
|
"learning_rate": 6.157885913851818e-05, |
|
"loss": 2.2485, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.484375, |
|
"grad_norm": 0.1557482340822006, |
|
"learning_rate": 6.119440815642258e-05, |
|
"loss": 2.5018, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.4869791666666667, |
|
"grad_norm": 0.15557904288598579, |
|
"learning_rate": 6.080949575678606e-05, |
|
"loss": 2.3121, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.4895833333333333, |
|
"grad_norm": 0.132624820989215, |
|
"learning_rate": 6.042415061148954e-05, |
|
"loss": 2.4456, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.4921875, |
|
"grad_norm": 0.14540093060758663, |
|
"learning_rate": 6.003840142464886e-05, |
|
"loss": 2.4166, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.4947916666666667, |
|
"grad_norm": 0.13698817848218522, |
|
"learning_rate": 5.9652276930476656e-05, |
|
"loss": 2.4569, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4973958333333333, |
|
"grad_norm": 0.13530133541335626, |
|
"learning_rate": 5.926580589114201e-05, |
|
"loss": 2.521, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.13985911131684764, |
|
"learning_rate": 5.8879017094627874e-05, |
|
"loss": 2.4112, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.5026041666666666, |
|
"grad_norm": 0.13203633038116566, |
|
"learning_rate": 5.849193935258679e-05, |
|
"loss": 2.1839, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.5052083333333334, |
|
"grad_norm": 0.16221179568567085, |
|
"learning_rate": 5.810460149819462e-05, |
|
"loss": 2.2793, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.5078125, |
|
"grad_norm": 0.15402021583934355, |
|
"learning_rate": 5.771703238400288e-05, |
|
"loss": 2.3905, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.5078125, |
|
"eval_loss": 2.4029593467712402, |
|
"eval_runtime": 65.2935, |
|
"eval_samples_per_second": 1.225, |
|
"eval_steps_per_second": 0.153, |
|
"step": 195 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 384, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 39, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 64462761492480.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|