|
{ |
|
"best_metric": 0.7339931726455688, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.11228070175438597, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0005614035087719298, |
|
"grad_norm": 0.4464113712310791, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.0783, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0005614035087719298, |
|
"eval_loss": 1.550947904586792, |
|
"eval_runtime": 122.3583, |
|
"eval_samples_per_second": 24.518, |
|
"eval_steps_per_second": 12.259, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0011228070175438596, |
|
"grad_norm": 0.4096737205982208, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.7808, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0016842105263157896, |
|
"grad_norm": 0.3668423891067505, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7447, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.002245614035087719, |
|
"grad_norm": 0.39215508103370667, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.8963, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.002807017543859649, |
|
"grad_norm": 0.43132394552230835, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.8422, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.003368421052631579, |
|
"grad_norm": 0.41981956362724304, |
|
"learning_rate": 2e-05, |
|
"loss": 0.9042, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.003929824561403509, |
|
"grad_norm": 0.3964403569698334, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 0.9516, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.004491228070175438, |
|
"grad_norm": 0.33498990535736084, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.779, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0050526315789473685, |
|
"grad_norm": 0.29876577854156494, |
|
"learning_rate": 3e-05, |
|
"loss": 0.7478, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.005614035087719298, |
|
"grad_norm": 0.29719725251197815, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.8966, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.006175438596491228, |
|
"grad_norm": 0.24591504037380219, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 0.9361, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.006736842105263158, |
|
"grad_norm": 0.28394192457199097, |
|
"learning_rate": 4e-05, |
|
"loss": 0.7647, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.007298245614035088, |
|
"grad_norm": 0.38288387656211853, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 0.7759, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.007859649122807018, |
|
"grad_norm": 0.37495705485343933, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 0.816, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.008421052631578947, |
|
"grad_norm": 0.40812909603118896, |
|
"learning_rate": 5e-05, |
|
"loss": 0.8382, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.008982456140350877, |
|
"grad_norm": 0.38002440333366394, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 0.8044, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.009543859649122808, |
|
"grad_norm": 0.33160534501075745, |
|
"learning_rate": 5.666666666666667e-05, |
|
"loss": 0.823, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.010105263157894737, |
|
"grad_norm": 0.41194266080856323, |
|
"learning_rate": 6e-05, |
|
"loss": 0.7865, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.010666666666666666, |
|
"grad_norm": 0.35672563314437866, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 0.8629, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.011228070175438596, |
|
"grad_norm": 0.3618670701980591, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 0.8547, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.011789473684210527, |
|
"grad_norm": 0.3907402753829956, |
|
"learning_rate": 7e-05, |
|
"loss": 0.8765, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.012350877192982456, |
|
"grad_norm": 0.40214502811431885, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 0.9029, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.012912280701754385, |
|
"grad_norm": 0.4234139025211334, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 1.0402, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.013473684210526317, |
|
"grad_norm": 0.4222796559333801, |
|
"learning_rate": 8e-05, |
|
"loss": 1.0914, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.014035087719298246, |
|
"grad_norm": 0.49873098731040955, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 1.144, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.014596491228070175, |
|
"grad_norm": 0.5681342482566833, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 1.1029, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.015157894736842105, |
|
"grad_norm": 0.5003555417060852, |
|
"learning_rate": 9e-05, |
|
"loss": 1.1661, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.015719298245614036, |
|
"grad_norm": 0.5160950422286987, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 1.1766, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.016280701754385965, |
|
"grad_norm": 0.5502928495407104, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 1.1296, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.016842105263157894, |
|
"grad_norm": 0.5528545379638672, |
|
"learning_rate": 0.0001, |
|
"loss": 1.0385, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.017403508771929824, |
|
"grad_norm": 0.6849154233932495, |
|
"learning_rate": 9.999146252290264e-05, |
|
"loss": 0.8271, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.017964912280701753, |
|
"grad_norm": 0.692389965057373, |
|
"learning_rate": 9.996585300715116e-05, |
|
"loss": 0.8881, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.018526315789473686, |
|
"grad_norm": 0.9055421352386475, |
|
"learning_rate": 9.99231801983717e-05, |
|
"loss": 0.8995, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.019087719298245615, |
|
"grad_norm": 0.861546516418457, |
|
"learning_rate": 9.986345866928941e-05, |
|
"loss": 0.6501, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.019649122807017545, |
|
"grad_norm": 1.2089025974273682, |
|
"learning_rate": 9.978670881475172e-05, |
|
"loss": 0.6468, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.020210526315789474, |
|
"grad_norm": 1.254661202430725, |
|
"learning_rate": 9.96929568447637e-05, |
|
"loss": 0.859, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.020771929824561403, |
|
"grad_norm": 0.9925673604011536, |
|
"learning_rate": 9.958223477553714e-05, |
|
"loss": 0.7599, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.021333333333333333, |
|
"grad_norm": 0.929550051689148, |
|
"learning_rate": 9.94545804185573e-05, |
|
"loss": 0.8544, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.021894736842105262, |
|
"grad_norm": 1.0358238220214844, |
|
"learning_rate": 9.931003736767013e-05, |
|
"loss": 0.9166, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.02245614035087719, |
|
"grad_norm": 1.7126189470291138, |
|
"learning_rate": 9.91486549841951e-05, |
|
"loss": 1.4532, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.023017543859649124, |
|
"grad_norm": 2.377314567565918, |
|
"learning_rate": 9.89704883800683e-05, |
|
"loss": 2.2551, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.023578947368421053, |
|
"grad_norm": 1.5144628286361694, |
|
"learning_rate": 9.877559839902184e-05, |
|
"loss": 1.5257, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.024140350877192983, |
|
"grad_norm": 1.193682074546814, |
|
"learning_rate": 9.85640515958057e-05, |
|
"loss": 1.1618, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.024701754385964912, |
|
"grad_norm": 1.2293355464935303, |
|
"learning_rate": 9.833592021345937e-05, |
|
"loss": 1.1371, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.02526315789473684, |
|
"grad_norm": 1.7391295433044434, |
|
"learning_rate": 9.809128215864097e-05, |
|
"loss": 0.9594, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02582456140350877, |
|
"grad_norm": 1.3019633293151855, |
|
"learning_rate": 9.783022097502204e-05, |
|
"loss": 0.9934, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0263859649122807, |
|
"grad_norm": 1.7207362651824951, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 1.0664, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.026947368421052633, |
|
"grad_norm": 1.6913920640945435, |
|
"learning_rate": 9.725919140804099e-05, |
|
"loss": 1.4405, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.027508771929824562, |
|
"grad_norm": 2.155933380126953, |
|
"learning_rate": 9.694941803075283e-05, |
|
"loss": 1.8355, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.028070175438596492, |
|
"grad_norm": 3.120522975921631, |
|
"learning_rate": 9.662361147021779e-05, |
|
"loss": 2.4255, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.028070175438596492, |
|
"eval_loss": 0.9342920184135437, |
|
"eval_runtime": 122.4157, |
|
"eval_samples_per_second": 24.507, |
|
"eval_steps_per_second": 12.253, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02863157894736842, |
|
"grad_norm": 0.5891591310501099, |
|
"learning_rate": 9.628188298907782e-05, |
|
"loss": 1.0629, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.02919298245614035, |
|
"grad_norm": 0.39871206879615784, |
|
"learning_rate": 9.592434928729616e-05, |
|
"loss": 0.7556, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.02975438596491228, |
|
"grad_norm": 0.37089481949806213, |
|
"learning_rate": 9.555113246230442e-05, |
|
"loss": 0.7065, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.03031578947368421, |
|
"grad_norm": 0.38295838236808777, |
|
"learning_rate": 9.516235996730645e-05, |
|
"loss": 0.7221, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.030877192982456142, |
|
"grad_norm": 0.29920482635498047, |
|
"learning_rate": 9.475816456775313e-05, |
|
"loss": 0.7926, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03143859649122807, |
|
"grad_norm": 0.23950375616550446, |
|
"learning_rate": 9.43386842960031e-05, |
|
"loss": 0.793, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.032, |
|
"grad_norm": 0.27886685729026794, |
|
"learning_rate": 9.39040624041849e-05, |
|
"loss": 0.8602, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.03256140350877193, |
|
"grad_norm": 0.317699134349823, |
|
"learning_rate": 9.345444731527642e-05, |
|
"loss": 1.0003, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.03312280701754386, |
|
"grad_norm": 0.22910217940807343, |
|
"learning_rate": 9.298999257241863e-05, |
|
"loss": 0.8108, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.03368421052631579, |
|
"grad_norm": 0.26130184531211853, |
|
"learning_rate": 9.251085678648072e-05, |
|
"loss": 0.9345, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03424561403508772, |
|
"grad_norm": 0.2834413945674896, |
|
"learning_rate": 9.201720358189464e-05, |
|
"loss": 0.885, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.03480701754385965, |
|
"grad_norm": 0.2586613595485687, |
|
"learning_rate": 9.150920154077754e-05, |
|
"loss": 0.7197, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.03536842105263158, |
|
"grad_norm": 0.2519567906856537, |
|
"learning_rate": 9.098702414536107e-05, |
|
"loss": 0.6892, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.035929824561403506, |
|
"grad_norm": 0.29119372367858887, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.7979, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.036491228070175435, |
|
"grad_norm": 0.26617690920829773, |
|
"learning_rate": 8.9900861364012e-05, |
|
"loss": 0.7058, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03705263157894737, |
|
"grad_norm": 0.3442516326904297, |
|
"learning_rate": 8.933724690167417e-05, |
|
"loss": 0.8994, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.0376140350877193, |
|
"grad_norm": 0.2873605787754059, |
|
"learning_rate": 8.876019880555649e-05, |
|
"loss": 0.7479, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.03817543859649123, |
|
"grad_norm": 0.29819053411483765, |
|
"learning_rate": 8.816991413705516e-05, |
|
"loss": 0.7455, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.03873684210526316, |
|
"grad_norm": 0.3056037425994873, |
|
"learning_rate": 8.756659447784368e-05, |
|
"loss": 0.8467, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.03929824561403509, |
|
"grad_norm": 0.3602258861064911, |
|
"learning_rate": 8.695044586103296e-05, |
|
"loss": 0.8658, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03985964912280702, |
|
"grad_norm": 0.40999269485473633, |
|
"learning_rate": 8.632167870081121e-05, |
|
"loss": 0.7375, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.04042105263157895, |
|
"grad_norm": 0.3525683879852295, |
|
"learning_rate": 8.568050772058762e-05, |
|
"loss": 0.8655, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.04098245614035088, |
|
"grad_norm": 0.4073101282119751, |
|
"learning_rate": 8.502715187966455e-05, |
|
"loss": 0.9714, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.04154385964912281, |
|
"grad_norm": 0.4740641713142395, |
|
"learning_rate": 8.436183429846313e-05, |
|
"loss": 0.7718, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.042105263157894736, |
|
"grad_norm": 0.4567849934101105, |
|
"learning_rate": 8.368478218232787e-05, |
|
"loss": 0.899, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.042666666666666665, |
|
"grad_norm": 0.439581036567688, |
|
"learning_rate": 8.299622674393614e-05, |
|
"loss": 0.9626, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.043228070175438595, |
|
"grad_norm": 0.537564218044281, |
|
"learning_rate": 8.229640312433937e-05, |
|
"loss": 0.8511, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.043789473684210524, |
|
"grad_norm": 0.5121639370918274, |
|
"learning_rate": 8.158555031266254e-05, |
|
"loss": 0.9197, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.04435087719298245, |
|
"grad_norm": 0.5545501112937927, |
|
"learning_rate": 8.086391106448965e-05, |
|
"loss": 0.8807, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.04491228070175438, |
|
"grad_norm": 0.5333970189094543, |
|
"learning_rate": 8.013173181896283e-05, |
|
"loss": 0.6605, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04547368421052632, |
|
"grad_norm": 0.5599662065505981, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.5374, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.04603508771929825, |
|
"grad_norm": 0.599372148513794, |
|
"learning_rate": 7.863675700402526e-05, |
|
"loss": 0.7062, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.04659649122807018, |
|
"grad_norm": 0.6970666646957397, |
|
"learning_rate": 7.787447196714427e-05, |
|
"loss": 0.5079, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.04715789473684211, |
|
"grad_norm": 0.5597441792488098, |
|
"learning_rate": 7.710266782362247e-05, |
|
"loss": 0.6036, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.047719298245614036, |
|
"grad_norm": 0.6313605904579163, |
|
"learning_rate": 7.63216081438678e-05, |
|
"loss": 0.4645, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.048280701754385966, |
|
"grad_norm": 0.8942999839782715, |
|
"learning_rate": 7.553155965904535e-05, |
|
"loss": 0.5454, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.048842105263157895, |
|
"grad_norm": 0.6143205165863037, |
|
"learning_rate": 7.473279216998895e-05, |
|
"loss": 0.5586, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.049403508771929824, |
|
"grad_norm": 1.0239964723587036, |
|
"learning_rate": 7.392557845506432e-05, |
|
"loss": 0.6921, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.049964912280701754, |
|
"grad_norm": 1.09725022315979, |
|
"learning_rate": 7.311019417701566e-05, |
|
"loss": 1.0064, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.05052631578947368, |
|
"grad_norm": 1.4291660785675049, |
|
"learning_rate": 7.228691778882693e-05, |
|
"loss": 0.9842, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05108771929824561, |
|
"grad_norm": 1.2897967100143433, |
|
"learning_rate": 7.145603043863045e-05, |
|
"loss": 0.9226, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.05164912280701754, |
|
"grad_norm": 0.8228785991668701, |
|
"learning_rate": 7.061781587369519e-05, |
|
"loss": 0.789, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.05221052631578947, |
|
"grad_norm": 0.937737226486206, |
|
"learning_rate": 6.977256034352712e-05, |
|
"loss": 0.6699, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0527719298245614, |
|
"grad_norm": 1.0278384685516357, |
|
"learning_rate": 6.892055250211552e-05, |
|
"loss": 0.6023, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.05333333333333334, |
|
"grad_norm": 0.9393802881240845, |
|
"learning_rate": 6.806208330935766e-05, |
|
"loss": 0.7863, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.053894736842105266, |
|
"grad_norm": 1.0484689474105835, |
|
"learning_rate": 6.719744593169641e-05, |
|
"loss": 0.7728, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.054456140350877195, |
|
"grad_norm": 1.1387441158294678, |
|
"learning_rate": 6.632693564200416e-05, |
|
"loss": 0.8077, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.055017543859649125, |
|
"grad_norm": 1.6136704683303833, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 1.1051, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.055578947368421054, |
|
"grad_norm": 1.555825114250183, |
|
"learning_rate": 6.456948734446624e-05, |
|
"loss": 1.1191, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.056140350877192984, |
|
"grad_norm": 2.404278516769409, |
|
"learning_rate": 6.368314950360415e-05, |
|
"loss": 1.9483, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.056140350877192984, |
|
"eval_loss": 0.8115013241767883, |
|
"eval_runtime": 122.6214, |
|
"eval_samples_per_second": 24.466, |
|
"eval_steps_per_second": 12.233, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05670175438596491, |
|
"grad_norm": 0.4372669458389282, |
|
"learning_rate": 6.279213887972179e-05, |
|
"loss": 1.0286, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.05726315789473684, |
|
"grad_norm": 0.4397050738334656, |
|
"learning_rate": 6.189675975213094e-05, |
|
"loss": 0.8791, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.05782456140350877, |
|
"grad_norm": 0.32385021448135376, |
|
"learning_rate": 6.099731789198344e-05, |
|
"loss": 0.6672, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.0583859649122807, |
|
"grad_norm": 0.2907547950744629, |
|
"learning_rate": 6.009412045785051e-05, |
|
"loss": 0.6619, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.05894736842105263, |
|
"grad_norm": 0.31022536754608154, |
|
"learning_rate": 5.918747589082853e-05, |
|
"loss": 0.6714, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.05950877192982456, |
|
"grad_norm": 0.2706989347934723, |
|
"learning_rate": 5.82776938092065e-05, |
|
"loss": 0.7354, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.06007017543859649, |
|
"grad_norm": 0.2617419362068176, |
|
"learning_rate": 5.736508490273188e-05, |
|
"loss": 0.7251, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.06063157894736842, |
|
"grad_norm": 0.2399427741765976, |
|
"learning_rate": 5.644996082651017e-05, |
|
"loss": 0.7174, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.06119298245614035, |
|
"grad_norm": 0.2220396101474762, |
|
"learning_rate": 5.553263409457504e-05, |
|
"loss": 0.6251, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.061754385964912284, |
|
"grad_norm": 0.23411913216114044, |
|
"learning_rate": 5.4613417973165106e-05, |
|
"loss": 0.6905, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06231578947368421, |
|
"grad_norm": 0.2873174250125885, |
|
"learning_rate": 5.3692626373743706e-05, |
|
"loss": 0.6993, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.06287719298245614, |
|
"grad_norm": 0.24679617583751678, |
|
"learning_rate": 5.27705737457985e-05, |
|
"loss": 0.7005, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.06343859649122807, |
|
"grad_norm": 0.23201341927051544, |
|
"learning_rate": 5.184757496945726e-05, |
|
"loss": 0.6078, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.064, |
|
"grad_norm": 0.2771409749984741, |
|
"learning_rate": 5.092394524795649e-05, |
|
"loss": 0.77, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.06456140350877193, |
|
"grad_norm": 0.2589285373687744, |
|
"learning_rate": 5e-05, |
|
"loss": 0.6893, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.06512280701754386, |
|
"grad_norm": 0.2741367816925049, |
|
"learning_rate": 4.907605475204352e-05, |
|
"loss": 0.7777, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.06568421052631579, |
|
"grad_norm": 0.331800639629364, |
|
"learning_rate": 4.8152425030542766e-05, |
|
"loss": 0.7478, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.06624561403508772, |
|
"grad_norm": 0.34980151057243347, |
|
"learning_rate": 4.72294262542015e-05, |
|
"loss": 0.6952, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.06680701754385965, |
|
"grad_norm": 0.32478198409080505, |
|
"learning_rate": 4.6307373626256306e-05, |
|
"loss": 0.7286, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.06736842105263158, |
|
"grad_norm": 0.36208805441856384, |
|
"learning_rate": 4.5386582026834906e-05, |
|
"loss": 0.8807, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.0679298245614035, |
|
"grad_norm": 0.37494510412216187, |
|
"learning_rate": 4.446736590542497e-05, |
|
"loss": 0.7629, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.06849122807017544, |
|
"grad_norm": 0.3828965723514557, |
|
"learning_rate": 4.3550039173489845e-05, |
|
"loss": 0.7838, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.06905263157894737, |
|
"grad_norm": 0.464336097240448, |
|
"learning_rate": 4.2634915097268115e-05, |
|
"loss": 0.8454, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.0696140350877193, |
|
"grad_norm": 0.414644718170166, |
|
"learning_rate": 4.1722306190793495e-05, |
|
"loss": 0.753, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.07017543859649122, |
|
"grad_norm": 0.4704475402832031, |
|
"learning_rate": 4.0812524109171476e-05, |
|
"loss": 0.8551, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07073684210526315, |
|
"grad_norm": 0.5488113760948181, |
|
"learning_rate": 3.99058795421495e-05, |
|
"loss": 0.8103, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.07129824561403508, |
|
"grad_norm": 0.5949589014053345, |
|
"learning_rate": 3.9002682108016585e-05, |
|
"loss": 0.676, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.07185964912280701, |
|
"grad_norm": 0.7315034866333008, |
|
"learning_rate": 3.8103240247869075e-05, |
|
"loss": 0.899, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.07242105263157894, |
|
"grad_norm": 0.5491634011268616, |
|
"learning_rate": 3.720786112027822e-05, |
|
"loss": 0.5835, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.07298245614035087, |
|
"grad_norm": 0.5259478092193604, |
|
"learning_rate": 3.631685049639586e-05, |
|
"loss": 0.6361, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.0735438596491228, |
|
"grad_norm": 0.5950855016708374, |
|
"learning_rate": 3.543051265553377e-05, |
|
"loss": 0.8745, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.07410526315789474, |
|
"grad_norm": 0.5459845662117004, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.5167, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.07466666666666667, |
|
"grad_norm": 0.5504123568534851, |
|
"learning_rate": 3.367306435799584e-05, |
|
"loss": 0.729, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.0752280701754386, |
|
"grad_norm": 0.5301131010055542, |
|
"learning_rate": 3.2802554068303596e-05, |
|
"loss": 0.54, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.07578947368421053, |
|
"grad_norm": 0.6610441207885742, |
|
"learning_rate": 3.1937916690642356e-05, |
|
"loss": 0.545, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.07635087719298246, |
|
"grad_norm": 0.4902953803539276, |
|
"learning_rate": 3.107944749788449e-05, |
|
"loss": 0.5694, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.07691228070175439, |
|
"grad_norm": 0.702129065990448, |
|
"learning_rate": 3.0227439656472877e-05, |
|
"loss": 0.4447, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.07747368421052632, |
|
"grad_norm": 0.7584903836250305, |
|
"learning_rate": 2.9382184126304834e-05, |
|
"loss": 0.589, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.07803508771929825, |
|
"grad_norm": 1.4466608762741089, |
|
"learning_rate": 2.8543969561369556e-05, |
|
"loss": 0.5408, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.07859649122807018, |
|
"grad_norm": 1.2781710624694824, |
|
"learning_rate": 2.771308221117309e-05, |
|
"loss": 0.5487, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07915789473684211, |
|
"grad_norm": 0.9161723256111145, |
|
"learning_rate": 2.688980582298435e-05, |
|
"loss": 0.5468, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.07971929824561404, |
|
"grad_norm": 0.7036490440368652, |
|
"learning_rate": 2.607442154493568e-05, |
|
"loss": 0.631, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.08028070175438597, |
|
"grad_norm": 0.7849149703979492, |
|
"learning_rate": 2.5267207830011068e-05, |
|
"loss": 0.5692, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.0808421052631579, |
|
"grad_norm": 0.8792390823364258, |
|
"learning_rate": 2.446844034095466e-05, |
|
"loss": 0.4891, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.08140350877192983, |
|
"grad_norm": 0.8697530031204224, |
|
"learning_rate": 2.3678391856132204e-05, |
|
"loss": 0.7373, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.08196491228070175, |
|
"grad_norm": 0.8269962668418884, |
|
"learning_rate": 2.2897332176377528e-05, |
|
"loss": 0.539, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.08252631578947368, |
|
"grad_norm": 1.1171008348464966, |
|
"learning_rate": 2.2125528032855724e-05, |
|
"loss": 0.9844, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.08308771929824561, |
|
"grad_norm": 1.0775487422943115, |
|
"learning_rate": 2.136324299597474e-05, |
|
"loss": 1.0029, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.08364912280701754, |
|
"grad_norm": 1.273962378501892, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 1.2998, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.08421052631578947, |
|
"grad_norm": 1.928801417350769, |
|
"learning_rate": 1.9868268181037185e-05, |
|
"loss": 1.7007, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08421052631578947, |
|
"eval_loss": 0.7458124756813049, |
|
"eval_runtime": 122.7727, |
|
"eval_samples_per_second": 24.435, |
|
"eval_steps_per_second": 12.218, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0847719298245614, |
|
"grad_norm": 0.3122590482234955, |
|
"learning_rate": 1.9136088935510362e-05, |
|
"loss": 0.8948, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.08533333333333333, |
|
"grad_norm": 0.3245391845703125, |
|
"learning_rate": 1.8414449687337464e-05, |
|
"loss": 0.7903, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.08589473684210526, |
|
"grad_norm": 0.28587210178375244, |
|
"learning_rate": 1.7703596875660645e-05, |
|
"loss": 0.7252, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.08645614035087719, |
|
"grad_norm": 0.3171529173851013, |
|
"learning_rate": 1.700377325606388e-05, |
|
"loss": 0.6687, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.08701754385964912, |
|
"grad_norm": 0.2921241223812103, |
|
"learning_rate": 1.631521781767214e-05, |
|
"loss": 0.6201, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.08757894736842105, |
|
"grad_norm": 0.26473841071128845, |
|
"learning_rate": 1.5638165701536868e-05, |
|
"loss": 0.6518, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.08814035087719298, |
|
"grad_norm": 0.33115243911743164, |
|
"learning_rate": 1.4972848120335453e-05, |
|
"loss": 0.8362, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.0887017543859649, |
|
"grad_norm": 0.2714337110519409, |
|
"learning_rate": 1.4319492279412388e-05, |
|
"loss": 0.6746, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.08926315789473684, |
|
"grad_norm": 0.2988300025463104, |
|
"learning_rate": 1.3678321299188801e-05, |
|
"loss": 0.7037, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.08982456140350877, |
|
"grad_norm": 0.31018441915512085, |
|
"learning_rate": 1.3049554138967051e-05, |
|
"loss": 0.7861, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.09038596491228071, |
|
"grad_norm": 0.2746640145778656, |
|
"learning_rate": 1.2433405522156332e-05, |
|
"loss": 0.7249, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.09094736842105264, |
|
"grad_norm": 0.2654082179069519, |
|
"learning_rate": 1.183008586294485e-05, |
|
"loss": 0.7455, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.09150877192982457, |
|
"grad_norm": 0.24473796784877777, |
|
"learning_rate": 1.1239801194443506e-05, |
|
"loss": 0.6227, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.0920701754385965, |
|
"grad_norm": 0.2504828870296478, |
|
"learning_rate": 1.066275309832584e-05, |
|
"loss": 0.7073, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.09263157894736843, |
|
"grad_norm": 0.30534666776657104, |
|
"learning_rate": 1.0099138635988026e-05, |
|
"loss": 0.6981, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.09319298245614036, |
|
"grad_norm": 0.30247023701667786, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.6736, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.09375438596491228, |
|
"grad_norm": 0.4104022979736328, |
|
"learning_rate": 9.012975854638949e-06, |
|
"loss": 0.754, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.09431578947368421, |
|
"grad_norm": 0.3200322091579437, |
|
"learning_rate": 8.490798459222476e-06, |
|
"loss": 0.5872, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.09487719298245614, |
|
"grad_norm": 0.353659063577652, |
|
"learning_rate": 7.982796418105371e-06, |
|
"loss": 0.8617, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.09543859649122807, |
|
"grad_norm": 0.3442855179309845, |
|
"learning_rate": 7.489143213519301e-06, |
|
"loss": 0.7441, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.096, |
|
"grad_norm": 0.3479902744293213, |
|
"learning_rate": 7.010007427581378e-06, |
|
"loss": 0.7429, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.09656140350877193, |
|
"grad_norm": 0.3734584152698517, |
|
"learning_rate": 6.5455526847235825e-06, |
|
"loss": 0.7839, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.09712280701754386, |
|
"grad_norm": 0.3731192648410797, |
|
"learning_rate": 6.0959375958151045e-06, |
|
"loss": 0.8681, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.09768421052631579, |
|
"grad_norm": 0.4046560525894165, |
|
"learning_rate": 5.6613157039969055e-06, |
|
"loss": 0.9747, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.09824561403508772, |
|
"grad_norm": 0.5003874897956848, |
|
"learning_rate": 5.241835432246889e-06, |
|
"loss": 0.7606, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.09880701754385965, |
|
"grad_norm": 0.5039113759994507, |
|
"learning_rate": 4.837640032693558e-06, |
|
"loss": 0.7671, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.09936842105263158, |
|
"grad_norm": 0.5854450464248657, |
|
"learning_rate": 4.448867537695578e-06, |
|
"loss": 0.7763, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.09992982456140351, |
|
"grad_norm": 0.5254133343696594, |
|
"learning_rate": 4.075650712703849e-06, |
|
"loss": 0.7363, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.10049122807017544, |
|
"grad_norm": 0.5278907418251038, |
|
"learning_rate": 3.71811701092219e-06, |
|
"loss": 0.7107, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.10105263157894737, |
|
"grad_norm": 0.6159325242042542, |
|
"learning_rate": 3.376388529782215e-06, |
|
"loss": 0.6757, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1016140350877193, |
|
"grad_norm": 0.6477799415588379, |
|
"learning_rate": 3.0505819692471792e-06, |
|
"loss": 0.6614, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.10217543859649122, |
|
"grad_norm": 0.5958523154258728, |
|
"learning_rate": 2.7408085919590264e-06, |
|
"loss": 0.7575, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.10273684210526315, |
|
"grad_norm": 0.6421895027160645, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.4513, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.10329824561403508, |
|
"grad_norm": 0.7550863027572632, |
|
"learning_rate": 2.1697790249779636e-06, |
|
"loss": 0.6119, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.10385964912280701, |
|
"grad_norm": 0.7374166250228882, |
|
"learning_rate": 1.908717841359048e-06, |
|
"loss": 0.5968, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.10442105263157894, |
|
"grad_norm": 0.755599319934845, |
|
"learning_rate": 1.6640797865406288e-06, |
|
"loss": 0.5619, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.10498245614035087, |
|
"grad_norm": 0.8369899988174438, |
|
"learning_rate": 1.4359484041943038e-06, |
|
"loss": 0.5447, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.1055438596491228, |
|
"grad_norm": 0.875713050365448, |
|
"learning_rate": 1.2244016009781701e-06, |
|
"loss": 0.5324, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.10610526315789473, |
|
"grad_norm": 0.8864160776138306, |
|
"learning_rate": 1.0295116199317057e-06, |
|
"loss": 0.5216, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.10666666666666667, |
|
"grad_norm": 0.9906832575798035, |
|
"learning_rate": 8.513450158049108e-07, |
|
"loss": 0.6048, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1072280701754386, |
|
"grad_norm": 1.3534337282180786, |
|
"learning_rate": 6.899626323298713e-07, |
|
"loss": 0.5319, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.10778947368421053, |
|
"grad_norm": 1.093711256980896, |
|
"learning_rate": 5.454195814427021e-07, |
|
"loss": 0.4393, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.10835087719298246, |
|
"grad_norm": 0.874680757522583, |
|
"learning_rate": 4.177652244628627e-07, |
|
"loss": 0.5225, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.10891228070175439, |
|
"grad_norm": 0.8974077105522156, |
|
"learning_rate": 3.0704315523631953e-07, |
|
"loss": 0.7351, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.10947368421052632, |
|
"grad_norm": 0.8559723496437073, |
|
"learning_rate": 2.1329118524827662e-07, |
|
"loss": 0.7442, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.11003508771929825, |
|
"grad_norm": 0.8494490385055542, |
|
"learning_rate": 1.3654133071059893e-07, |
|
"loss": 0.638, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.11059649122807018, |
|
"grad_norm": 0.8759176731109619, |
|
"learning_rate": 7.681980162830282e-08, |
|
"loss": 0.8537, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.11115789473684211, |
|
"grad_norm": 1.0472686290740967, |
|
"learning_rate": 3.4146992848854695e-08, |
|
"loss": 0.909, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.11171929824561404, |
|
"grad_norm": 1.3545598983764648, |
|
"learning_rate": 8.537477097364522e-09, |
|
"loss": 1.1257, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.11228070175438597, |
|
"grad_norm": 1.787885069847107, |
|
"learning_rate": 0.0, |
|
"loss": 1.7459, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.11228070175438597, |
|
"eval_loss": 0.7339931726455688, |
|
"eval_runtime": 122.6295, |
|
"eval_samples_per_second": 24.464, |
|
"eval_steps_per_second": 12.232, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.579098864320512e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|