Training in progress, step 1125, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 161714
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8ee0ed64be9b747a78bcf1b178e9490c1d478b78f187b2d0f71171e083954d26
|
3 |
size 161714
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 64814
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e8dab045fdf4a53ad60be1294c2e1db0fb862e9afd88c55745e7fabaff95bbb9
|
3 |
size 64814
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c91ecadd3c2a743ed912676ea330151768380e4c0c6579b4a23444cdbba33b06
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:966cc7c77768098c8af029c048689ac1de1ed0de938435a2a7a5601075335dbd
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 375,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -5273,6 +5273,2639 @@
|
|
5273 |
"eval_samples_per_second": 443.0,
|
5274 |
"eval_steps_per_second": 221.516,
|
5275 |
"step": 750
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5276 |
}
|
5277 |
],
|
5278 |
"logging_steps": 1,
|
@@ -5292,7 +7925,7 @@
|
|
5292 |
"attributes": {}
|
5293 |
}
|
5294 |
},
|
5295 |
-
"total_flos":
|
5296 |
"train_batch_size": 2,
|
5297 |
"trial_name": null,
|
5298 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 0.14076466494099615,
|
5 |
"eval_steps": 375,
|
6 |
+
"global_step": 1125,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
5273 |
"eval_samples_per_second": 443.0,
|
5274 |
"eval_steps_per_second": 221.516,
|
5275 |
"step": 750
|
5276 |
+
},
|
5277 |
+
{
|
5278 |
+
"epoch": 0.09396823410727831,
|
5279 |
+
"grad_norm": 0.12596431374549866,
|
5280 |
+
"learning_rate": 0.00010084337058003303,
|
5281 |
+
"loss": 6.8612,
|
5282 |
+
"step": 751
|
5283 |
+
},
|
5284 |
+
{
|
5285 |
+
"epoch": 0.09409335825389253,
|
5286 |
+
"grad_norm": 0.09819183498620987,
|
5287 |
+
"learning_rate": 0.00010063253121564868,
|
5288 |
+
"loss": 6.8665,
|
5289 |
+
"step": 752
|
5290 |
+
},
|
5291 |
+
{
|
5292 |
+
"epoch": 0.09421848240050676,
|
5293 |
+
"grad_norm": 0.08606826514005661,
|
5294 |
+
"learning_rate": 0.00010042168903930514,
|
5295 |
+
"loss": 6.8698,
|
5296 |
+
"step": 753
|
5297 |
+
},
|
5298 |
+
{
|
5299 |
+
"epoch": 0.09434360654712097,
|
5300 |
+
"grad_norm": 0.08439600467681885,
|
5301 |
+
"learning_rate": 0.00010021084498831522,
|
5302 |
+
"loss": 6.8662,
|
5303 |
+
"step": 754
|
5304 |
+
},
|
5305 |
+
{
|
5306 |
+
"epoch": 0.0944687306937352,
|
5307 |
+
"grad_norm": 0.10085678100585938,
|
5308 |
+
"learning_rate": 0.0001,
|
5309 |
+
"loss": 6.8609,
|
5310 |
+
"step": 755
|
5311 |
+
},
|
5312 |
+
{
|
5313 |
+
"epoch": 0.0945938548403494,
|
5314 |
+
"grad_norm": 0.06968796998262405,
|
5315 |
+
"learning_rate": 9.97891550116848e-05,
|
5316 |
+
"loss": 6.8704,
|
5317 |
+
"step": 756
|
5318 |
+
},
|
5319 |
+
{
|
5320 |
+
"epoch": 0.09471897898696363,
|
5321 |
+
"grad_norm": 0.08123313635587692,
|
5322 |
+
"learning_rate": 9.957831096069488e-05,
|
5323 |
+
"loss": 6.8705,
|
5324 |
+
"step": 757
|
5325 |
+
},
|
5326 |
+
{
|
5327 |
+
"epoch": 0.09484410313357784,
|
5328 |
+
"grad_norm": 0.06296410411596298,
|
5329 |
+
"learning_rate": 9.936746878435136e-05,
|
5330 |
+
"loss": 6.8656,
|
5331 |
+
"step": 758
|
5332 |
+
},
|
5333 |
+
{
|
5334 |
+
"epoch": 0.09496922728019207,
|
5335 |
+
"grad_norm": 0.07284566015005112,
|
5336 |
+
"learning_rate": 9.915662941996699e-05,
|
5337 |
+
"loss": 6.8718,
|
5338 |
+
"step": 759
|
5339 |
+
},
|
5340 |
+
{
|
5341 |
+
"epoch": 0.09509435142680628,
|
5342 |
+
"grad_norm": 0.0826282799243927,
|
5343 |
+
"learning_rate": 9.894579380484204e-05,
|
5344 |
+
"loss": 6.8661,
|
5345 |
+
"step": 760
|
5346 |
+
},
|
5347 |
+
{
|
5348 |
+
"epoch": 0.0952194755734205,
|
5349 |
+
"grad_norm": 0.051579512655735016,
|
5350 |
+
"learning_rate": 9.873496287626019e-05,
|
5351 |
+
"loss": 6.8656,
|
5352 |
+
"step": 761
|
5353 |
+
},
|
5354 |
+
{
|
5355 |
+
"epoch": 0.09534459972003473,
|
5356 |
+
"grad_norm": 0.05063299462199211,
|
5357 |
+
"learning_rate": 9.852413757148417e-05,
|
5358 |
+
"loss": 6.8611,
|
5359 |
+
"step": 762
|
5360 |
+
},
|
5361 |
+
{
|
5362 |
+
"epoch": 0.09546972386664894,
|
5363 |
+
"grad_norm": 0.0630897656083107,
|
5364 |
+
"learning_rate": 9.831331882775178e-05,
|
5365 |
+
"loss": 6.8632,
|
5366 |
+
"step": 763
|
5367 |
+
},
|
5368 |
+
{
|
5369 |
+
"epoch": 0.09559484801326316,
|
5370 |
+
"grad_norm": 0.05601769685745239,
|
5371 |
+
"learning_rate": 9.81025075822716e-05,
|
5372 |
+
"loss": 6.8573,
|
5373 |
+
"step": 764
|
5374 |
+
},
|
5375 |
+
{
|
5376 |
+
"epoch": 0.09571997215987738,
|
5377 |
+
"grad_norm": 0.0629180446267128,
|
5378 |
+
"learning_rate": 9.789170477221891e-05,
|
5379 |
+
"loss": 6.86,
|
5380 |
+
"step": 765
|
5381 |
+
},
|
5382 |
+
{
|
5383 |
+
"epoch": 0.0958450963064916,
|
5384 |
+
"grad_norm": 0.06247006729245186,
|
5385 |
+
"learning_rate": 9.76809113347315e-05,
|
5386 |
+
"loss": 6.8551,
|
5387 |
+
"step": 766
|
5388 |
+
},
|
5389 |
+
{
|
5390 |
+
"epoch": 0.09597022045310581,
|
5391 |
+
"grad_norm": 0.07091257721185684,
|
5392 |
+
"learning_rate": 9.747012820690543e-05,
|
5393 |
+
"loss": 6.858,
|
5394 |
+
"step": 767
|
5395 |
+
},
|
5396 |
+
{
|
5397 |
+
"epoch": 0.09609534459972004,
|
5398 |
+
"grad_norm": 0.05969550460577011,
|
5399 |
+
"learning_rate": 9.725935632579104e-05,
|
5400 |
+
"loss": 6.8518,
|
5401 |
+
"step": 768
|
5402 |
+
},
|
5403 |
+
{
|
5404 |
+
"epoch": 0.09622046874633425,
|
5405 |
+
"grad_norm": 0.07379163801670074,
|
5406 |
+
"learning_rate": 9.704859662838855e-05,
|
5407 |
+
"loss": 6.8564,
|
5408 |
+
"step": 769
|
5409 |
+
},
|
5410 |
+
{
|
5411 |
+
"epoch": 0.09634559289294847,
|
5412 |
+
"grad_norm": 0.057554204016923904,
|
5413 |
+
"learning_rate": 9.683785005164411e-05,
|
5414 |
+
"loss": 6.852,
|
5415 |
+
"step": 770
|
5416 |
+
},
|
5417 |
+
{
|
5418 |
+
"epoch": 0.0964707170395627,
|
5419 |
+
"grad_norm": 0.05670370161533356,
|
5420 |
+
"learning_rate": 9.662711753244551e-05,
|
5421 |
+
"loss": 6.8473,
|
5422 |
+
"step": 771
|
5423 |
+
},
|
5424 |
+
{
|
5425 |
+
"epoch": 0.09659584118617691,
|
5426 |
+
"grad_norm": 0.060861892998218536,
|
5427 |
+
"learning_rate": 9.641640000761802e-05,
|
5428 |
+
"loss": 6.8475,
|
5429 |
+
"step": 772
|
5430 |
+
},
|
5431 |
+
{
|
5432 |
+
"epoch": 0.09672096533279113,
|
5433 |
+
"grad_norm": 0.06020105630159378,
|
5434 |
+
"learning_rate": 9.620569841392029e-05,
|
5435 |
+
"loss": 6.8497,
|
5436 |
+
"step": 773
|
5437 |
+
},
|
5438 |
+
{
|
5439 |
+
"epoch": 0.09684608947940535,
|
5440 |
+
"grad_norm": 0.051159922033548355,
|
5441 |
+
"learning_rate": 9.59950136880401e-05,
|
5442 |
+
"loss": 6.8491,
|
5443 |
+
"step": 774
|
5444 |
+
},
|
5445 |
+
{
|
5446 |
+
"epoch": 0.09697121362601957,
|
5447 |
+
"grad_norm": 0.056930724531412125,
|
5448 |
+
"learning_rate": 9.57843467665903e-05,
|
5449 |
+
"loss": 6.8431,
|
5450 |
+
"step": 775
|
5451 |
+
},
|
5452 |
+
{
|
5453 |
+
"epoch": 0.09709633777263378,
|
5454 |
+
"grad_norm": 0.05780670419335365,
|
5455 |
+
"learning_rate": 9.557369858610453e-05,
|
5456 |
+
"loss": 6.8491,
|
5457 |
+
"step": 776
|
5458 |
+
},
|
5459 |
+
{
|
5460 |
+
"epoch": 0.097221461919248,
|
5461 |
+
"grad_norm": 0.06336930394172668,
|
5462 |
+
"learning_rate": 9.53630700830332e-05,
|
5463 |
+
"loss": 6.844,
|
5464 |
+
"step": 777
|
5465 |
+
},
|
5466 |
+
{
|
5467 |
+
"epoch": 0.09734658606586222,
|
5468 |
+
"grad_norm": 0.06081291288137436,
|
5469 |
+
"learning_rate": 9.51524621937391e-05,
|
5470 |
+
"loss": 6.8431,
|
5471 |
+
"step": 778
|
5472 |
+
},
|
5473 |
+
{
|
5474 |
+
"epoch": 0.09747171021247644,
|
5475 |
+
"grad_norm": 0.10846126079559326,
|
5476 |
+
"learning_rate": 9.494187585449358e-05,
|
5477 |
+
"loss": 6.8397,
|
5478 |
+
"step": 779
|
5479 |
+
},
|
5480 |
+
{
|
5481 |
+
"epoch": 0.09759683435909065,
|
5482 |
+
"grad_norm": 0.0634780079126358,
|
5483 |
+
"learning_rate": 9.473131200147205e-05,
|
5484 |
+
"loss": 6.8395,
|
5485 |
+
"step": 780
|
5486 |
+
},
|
5487 |
+
{
|
5488 |
+
"epoch": 0.09772195850570488,
|
5489 |
+
"grad_norm": 0.05900888890028,
|
5490 |
+
"learning_rate": 9.452077157074994e-05,
|
5491 |
+
"loss": 6.8366,
|
5492 |
+
"step": 781
|
5493 |
+
},
|
5494 |
+
{
|
5495 |
+
"epoch": 0.0978470826523191,
|
5496 |
+
"grad_norm": 0.08791916072368622,
|
5497 |
+
"learning_rate": 9.431025549829862e-05,
|
5498 |
+
"loss": 6.8367,
|
5499 |
+
"step": 782
|
5500 |
+
},
|
5501 |
+
{
|
5502 |
+
"epoch": 0.09797220679893331,
|
5503 |
+
"grad_norm": 0.06639428436756134,
|
5504 |
+
"learning_rate": 9.409976471998118e-05,
|
5505 |
+
"loss": 6.8359,
|
5506 |
+
"step": 783
|
5507 |
+
},
|
5508 |
+
{
|
5509 |
+
"epoch": 0.09809733094554754,
|
5510 |
+
"grad_norm": 0.0607585683465004,
|
5511 |
+
"learning_rate": 9.388930017154819e-05,
|
5512 |
+
"loss": 6.8365,
|
5513 |
+
"step": 784
|
5514 |
+
},
|
5515 |
+
{
|
5516 |
+
"epoch": 0.09822245509216175,
|
5517 |
+
"grad_norm": 0.08171333372592926,
|
5518 |
+
"learning_rate": 9.367886278863366e-05,
|
5519 |
+
"loss": 6.8346,
|
5520 |
+
"step": 785
|
5521 |
+
},
|
5522 |
+
{
|
5523 |
+
"epoch": 0.09834757923877598,
|
5524 |
+
"grad_norm": 0.07056614011526108,
|
5525 |
+
"learning_rate": 9.346845350675088e-05,
|
5526 |
+
"loss": 6.8287,
|
5527 |
+
"step": 786
|
5528 |
+
},
|
5529 |
+
{
|
5530 |
+
"epoch": 0.09847270338539019,
|
5531 |
+
"grad_norm": 0.07537488639354706,
|
5532 |
+
"learning_rate": 9.325807326128814e-05,
|
5533 |
+
"loss": 6.8314,
|
5534 |
+
"step": 787
|
5535 |
+
},
|
5536 |
+
{
|
5537 |
+
"epoch": 0.09859782753200441,
|
5538 |
+
"grad_norm": 0.058308299630880356,
|
5539 |
+
"learning_rate": 9.304772298750463e-05,
|
5540 |
+
"loss": 6.8287,
|
5541 |
+
"step": 788
|
5542 |
+
},
|
5543 |
+
{
|
5544 |
+
"epoch": 0.09872295167861862,
|
5545 |
+
"grad_norm": 0.07489283382892609,
|
5546 |
+
"learning_rate": 9.283740362052642e-05,
|
5547 |
+
"loss": 6.8237,
|
5548 |
+
"step": 789
|
5549 |
+
},
|
5550 |
+
{
|
5551 |
+
"epoch": 0.09884807582523285,
|
5552 |
+
"grad_norm": 0.06404101848602295,
|
5553 |
+
"learning_rate": 9.26271160953421e-05,
|
5554 |
+
"loss": 6.8209,
|
5555 |
+
"step": 790
|
5556 |
+
},
|
5557 |
+
{
|
5558 |
+
"epoch": 0.09897319997184707,
|
5559 |
+
"grad_norm": 0.076097272336483,
|
5560 |
+
"learning_rate": 9.241686134679867e-05,
|
5561 |
+
"loss": 6.8184,
|
5562 |
+
"step": 791
|
5563 |
+
},
|
5564 |
+
{
|
5565 |
+
"epoch": 0.09909832411846128,
|
5566 |
+
"grad_norm": 0.09229526668787003,
|
5567 |
+
"learning_rate": 9.220664030959749e-05,
|
5568 |
+
"loss": 6.8274,
|
5569 |
+
"step": 792
|
5570 |
+
},
|
5571 |
+
{
|
5572 |
+
"epoch": 0.09922344826507551,
|
5573 |
+
"grad_norm": 0.09321905672550201,
|
5574 |
+
"learning_rate": 9.199645391828999e-05,
|
5575 |
+
"loss": 6.8178,
|
5576 |
+
"step": 793
|
5577 |
+
},
|
5578 |
+
{
|
5579 |
+
"epoch": 0.09934857241168972,
|
5580 |
+
"grad_norm": 0.07489687949419022,
|
5581 |
+
"learning_rate": 9.178630310727365e-05,
|
5582 |
+
"loss": 6.8181,
|
5583 |
+
"step": 794
|
5584 |
+
},
|
5585 |
+
{
|
5586 |
+
"epoch": 0.09947369655830395,
|
5587 |
+
"grad_norm": 0.07067129760980606,
|
5588 |
+
"learning_rate": 9.157618881078772e-05,
|
5589 |
+
"loss": 6.8117,
|
5590 |
+
"step": 795
|
5591 |
+
},
|
5592 |
+
{
|
5593 |
+
"epoch": 0.09959882070491816,
|
5594 |
+
"grad_norm": 0.08925637602806091,
|
5595 |
+
"learning_rate": 9.136611196290915e-05,
|
5596 |
+
"loss": 6.8088,
|
5597 |
+
"step": 796
|
5598 |
+
},
|
5599 |
+
{
|
5600 |
+
"epoch": 0.09972394485153238,
|
5601 |
+
"grad_norm": 0.08182400465011597,
|
5602 |
+
"learning_rate": 9.115607349754834e-05,
|
5603 |
+
"loss": 6.8032,
|
5604 |
+
"step": 797
|
5605 |
+
},
|
5606 |
+
{
|
5607 |
+
"epoch": 0.09984906899814659,
|
5608 |
+
"grad_norm": 0.07875595986843109,
|
5609 |
+
"learning_rate": 9.094607434844523e-05,
|
5610 |
+
"loss": 6.7973,
|
5611 |
+
"step": 798
|
5612 |
+
},
|
5613 |
+
{
|
5614 |
+
"epoch": 0.09997419314476082,
|
5615 |
+
"grad_norm": 0.09395304322242737,
|
5616 |
+
"learning_rate": 9.07361154491648e-05,
|
5617 |
+
"loss": 6.7894,
|
5618 |
+
"step": 799
|
5619 |
+
},
|
5620 |
+
{
|
5621 |
+
"epoch": 0.10009931729137504,
|
5622 |
+
"grad_norm": 0.17146538197994232,
|
5623 |
+
"learning_rate": 9.052619773309317e-05,
|
5624 |
+
"loss": 6.7584,
|
5625 |
+
"step": 800
|
5626 |
+
},
|
5627 |
+
{
|
5628 |
+
"epoch": 0.10022444143798925,
|
5629 |
+
"grad_norm": 0.14705929160118103,
|
5630 |
+
"learning_rate": 9.031632213343339e-05,
|
5631 |
+
"loss": 6.8753,
|
5632 |
+
"step": 801
|
5633 |
+
},
|
5634 |
+
{
|
5635 |
+
"epoch": 0.10034956558460348,
|
5636 |
+
"grad_norm": 0.0928153544664383,
|
5637 |
+
"learning_rate": 9.01064895832012e-05,
|
5638 |
+
"loss": 6.8757,
|
5639 |
+
"step": 802
|
5640 |
+
},
|
5641 |
+
{
|
5642 |
+
"epoch": 0.10047468973121769,
|
5643 |
+
"grad_norm": 0.08452393114566803,
|
5644 |
+
"learning_rate": 8.98967010152211e-05,
|
5645 |
+
"loss": 6.8669,
|
5646 |
+
"step": 803
|
5647 |
+
},
|
5648 |
+
{
|
5649 |
+
"epoch": 0.10059981387783191,
|
5650 |
+
"grad_norm": 0.11025939881801605,
|
5651 |
+
"learning_rate": 8.968695736212193e-05,
|
5652 |
+
"loss": 6.8577,
|
5653 |
+
"step": 804
|
5654 |
+
},
|
5655 |
+
{
|
5656 |
+
"epoch": 0.10072493802444613,
|
5657 |
+
"grad_norm": 0.07015882432460785,
|
5658 |
+
"learning_rate": 8.947725955633294e-05,
|
5659 |
+
"loss": 6.8713,
|
5660 |
+
"step": 805
|
5661 |
+
},
|
5662 |
+
{
|
5663 |
+
"epoch": 0.10085006217106035,
|
5664 |
+
"grad_norm": 0.06923877447843552,
|
5665 |
+
"learning_rate": 8.926760853007946e-05,
|
5666 |
+
"loss": 6.8689,
|
5667 |
+
"step": 806
|
5668 |
+
},
|
5669 |
+
{
|
5670 |
+
"epoch": 0.10097518631767456,
|
5671 |
+
"grad_norm": 0.062181293964385986,
|
5672 |
+
"learning_rate": 8.905800521537905e-05,
|
5673 |
+
"loss": 6.8692,
|
5674 |
+
"step": 807
|
5675 |
+
},
|
5676 |
+
{
|
5677 |
+
"epoch": 0.10110031046428879,
|
5678 |
+
"grad_norm": 0.06040557101368904,
|
5679 |
+
"learning_rate": 8.884845054403699e-05,
|
5680 |
+
"loss": 6.8717,
|
5681 |
+
"step": 808
|
5682 |
+
},
|
5683 |
+
{
|
5684 |
+
"epoch": 0.10122543461090301,
|
5685 |
+
"grad_norm": 0.05984716862440109,
|
5686 |
+
"learning_rate": 8.863894544764236e-05,
|
5687 |
+
"loss": 6.8587,
|
5688 |
+
"step": 809
|
5689 |
+
},
|
5690 |
+
{
|
5691 |
+
"epoch": 0.10135055875751722,
|
5692 |
+
"grad_norm": 0.05124804750084877,
|
5693 |
+
"learning_rate": 8.84294908575639e-05,
|
5694 |
+
"loss": 6.8682,
|
5695 |
+
"step": 810
|
5696 |
+
},
|
5697 |
+
{
|
5698 |
+
"epoch": 0.10147568290413145,
|
5699 |
+
"grad_norm": 0.0511602908372879,
|
5700 |
+
"learning_rate": 8.822008770494572e-05,
|
5701 |
+
"loss": 6.8637,
|
5702 |
+
"step": 811
|
5703 |
+
},
|
5704 |
+
{
|
5705 |
+
"epoch": 0.10160080705074566,
|
5706 |
+
"grad_norm": 0.06778772175312042,
|
5707 |
+
"learning_rate": 8.801073692070337e-05,
|
5708 |
+
"loss": 6.8608,
|
5709 |
+
"step": 812
|
5710 |
+
},
|
5711 |
+
{
|
5712 |
+
"epoch": 0.10172593119735988,
|
5713 |
+
"grad_norm": 0.05513143539428711,
|
5714 |
+
"learning_rate": 8.780143943551954e-05,
|
5715 |
+
"loss": 6.8618,
|
5716 |
+
"step": 813
|
5717 |
+
},
|
5718 |
+
{
|
5719 |
+
"epoch": 0.1018510553439741,
|
5720 |
+
"grad_norm": 0.07517508417367935,
|
5721 |
+
"learning_rate": 8.759219617983999e-05,
|
5722 |
+
"loss": 6.8585,
|
5723 |
+
"step": 814
|
5724 |
+
},
|
5725 |
+
{
|
5726 |
+
"epoch": 0.10197617949058832,
|
5727 |
+
"grad_norm": 0.072541743516922,
|
5728 |
+
"learning_rate": 8.738300808386935e-05,
|
5729 |
+
"loss": 6.8633,
|
5730 |
+
"step": 815
|
5731 |
+
},
|
5732 |
+
{
|
5733 |
+
"epoch": 0.10210130363720253,
|
5734 |
+
"grad_norm": 0.06264056265354156,
|
5735 |
+
"learning_rate": 8.717387607756713e-05,
|
5736 |
+
"loss": 6.8559,
|
5737 |
+
"step": 816
|
5738 |
+
},
|
5739 |
+
{
|
5740 |
+
"epoch": 0.10222642778381676,
|
5741 |
+
"grad_norm": 0.05450925976037979,
|
5742 |
+
"learning_rate": 8.696480109064342e-05,
|
5743 |
+
"loss": 6.8502,
|
5744 |
+
"step": 817
|
5745 |
+
},
|
5746 |
+
{
|
5747 |
+
"epoch": 0.10235155193043098,
|
5748 |
+
"grad_norm": 0.06095590069890022,
|
5749 |
+
"learning_rate": 8.675578405255485e-05,
|
5750 |
+
"loss": 6.8539,
|
5751 |
+
"step": 818
|
5752 |
+
},
|
5753 |
+
{
|
5754 |
+
"epoch": 0.10247667607704519,
|
5755 |
+
"grad_norm": 0.05517008900642395,
|
5756 |
+
"learning_rate": 8.654682589250038e-05,
|
5757 |
+
"loss": 6.8462,
|
5758 |
+
"step": 819
|
5759 |
+
},
|
5760 |
+
{
|
5761 |
+
"epoch": 0.10260180022365942,
|
5762 |
+
"grad_norm": 0.05329270660877228,
|
5763 |
+
"learning_rate": 8.633792753941733e-05,
|
5764 |
+
"loss": 6.8496,
|
5765 |
+
"step": 820
|
5766 |
+
},
|
5767 |
+
{
|
5768 |
+
"epoch": 0.10272692437027363,
|
5769 |
+
"grad_norm": 0.05850008502602577,
|
5770 |
+
"learning_rate": 8.612908992197705e-05,
|
5771 |
+
"loss": 6.8485,
|
5772 |
+
"step": 821
|
5773 |
+
},
|
5774 |
+
{
|
5775 |
+
"epoch": 0.10285204851688785,
|
5776 |
+
"grad_norm": 0.06142275780439377,
|
5777 |
+
"learning_rate": 8.592031396858093e-05,
|
5778 |
+
"loss": 6.8488,
|
5779 |
+
"step": 822
|
5780 |
+
},
|
5781 |
+
{
|
5782 |
+
"epoch": 0.10297717266350206,
|
5783 |
+
"grad_norm": 0.06849534064531326,
|
5784 |
+
"learning_rate": 8.571160060735624e-05,
|
5785 |
+
"loss": 6.8497,
|
5786 |
+
"step": 823
|
5787 |
+
},
|
5788 |
+
{
|
5789 |
+
"epoch": 0.10310229681011629,
|
5790 |
+
"grad_norm": 0.048923857510089874,
|
5791 |
+
"learning_rate": 8.550295076615188e-05,
|
5792 |
+
"loss": 6.8435,
|
5793 |
+
"step": 824
|
5794 |
+
},
|
5795 |
+
{
|
5796 |
+
"epoch": 0.1032274209567305,
|
5797 |
+
"grad_norm": 0.06683506071567535,
|
5798 |
+
"learning_rate": 8.529436537253458e-05,
|
5799 |
+
"loss": 6.8449,
|
5800 |
+
"step": 825
|
5801 |
+
},
|
5802 |
+
{
|
5803 |
+
"epoch": 0.10335254510334473,
|
5804 |
+
"grad_norm": 0.05200260132551193,
|
5805 |
+
"learning_rate": 8.508584535378439e-05,
|
5806 |
+
"loss": 6.8427,
|
5807 |
+
"step": 826
|
5808 |
+
},
|
5809 |
+
{
|
5810 |
+
"epoch": 0.10347766924995894,
|
5811 |
+
"grad_norm": 0.05982294678688049,
|
5812 |
+
"learning_rate": 8.487739163689079e-05,
|
5813 |
+
"loss": 6.8404,
|
5814 |
+
"step": 827
|
5815 |
+
},
|
5816 |
+
{
|
5817 |
+
"epoch": 0.10360279339657316,
|
5818 |
+
"grad_norm": 0.06280147284269333,
|
5819 |
+
"learning_rate": 8.466900514854847e-05,
|
5820 |
+
"loss": 6.8409,
|
5821 |
+
"step": 828
|
5822 |
+
},
|
5823 |
+
{
|
5824 |
+
"epoch": 0.10372791754318739,
|
5825 |
+
"grad_norm": 0.059844985604286194,
|
5826 |
+
"learning_rate": 8.446068681515334e-05,
|
5827 |
+
"loss": 6.839,
|
5828 |
+
"step": 829
|
5829 |
+
},
|
5830 |
+
{
|
5831 |
+
"epoch": 0.1038530416898016,
|
5832 |
+
"grad_norm": 0.07303334772586823,
|
5833 |
+
"learning_rate": 8.425243756279824e-05,
|
5834 |
+
"loss": 6.8399,
|
5835 |
+
"step": 830
|
5836 |
+
},
|
5837 |
+
{
|
5838 |
+
"epoch": 0.10397816583641582,
|
5839 |
+
"grad_norm": 0.06451032310724258,
|
5840 |
+
"learning_rate": 8.404425831726894e-05,
|
5841 |
+
"loss": 6.8385,
|
5842 |
+
"step": 831
|
5843 |
+
},
|
5844 |
+
{
|
5845 |
+
"epoch": 0.10410328998303003,
|
5846 |
+
"grad_norm": 0.05930513143539429,
|
5847 |
+
"learning_rate": 8.383615000404e-05,
|
5848 |
+
"loss": 6.8362,
|
5849 |
+
"step": 832
|
5850 |
+
},
|
5851 |
+
{
|
5852 |
+
"epoch": 0.10422841412964426,
|
5853 |
+
"grad_norm": 0.06087719649076462,
|
5854 |
+
"learning_rate": 8.362811354827059e-05,
|
5855 |
+
"loss": 6.8368,
|
5856 |
+
"step": 833
|
5857 |
+
},
|
5858 |
+
{
|
5859 |
+
"epoch": 0.10435353827625847,
|
5860 |
+
"grad_norm": 0.05968770384788513,
|
5861 |
+
"learning_rate": 8.342014987480047e-05,
|
5862 |
+
"loss": 6.8324,
|
5863 |
+
"step": 834
|
5864 |
+
},
|
5865 |
+
{
|
5866 |
+
"epoch": 0.1044786624228727,
|
5867 |
+
"grad_norm": 0.07309002429246902,
|
5868 |
+
"learning_rate": 8.321225990814591e-05,
|
5869 |
+
"loss": 6.8302,
|
5870 |
+
"step": 835
|
5871 |
+
},
|
5872 |
+
{
|
5873 |
+
"epoch": 0.1046037865694869,
|
5874 |
+
"grad_norm": 0.06117624789476395,
|
5875 |
+
"learning_rate": 8.300444457249543e-05,
|
5876 |
+
"loss": 6.832,
|
5877 |
+
"step": 836
|
5878 |
+
},
|
5879 |
+
{
|
5880 |
+
"epoch": 0.10472891071610113,
|
5881 |
+
"grad_norm": 0.0783989205956459,
|
5882 |
+
"learning_rate": 8.279670479170573e-05,
|
5883 |
+
"loss": 6.8292,
|
5884 |
+
"step": 837
|
5885 |
+
},
|
5886 |
+
{
|
5887 |
+
"epoch": 0.10485403486271536,
|
5888 |
+
"grad_norm": 0.07800403982400894,
|
5889 |
+
"learning_rate": 8.258904148929775e-05,
|
5890 |
+
"loss": 6.8277,
|
5891 |
+
"step": 838
|
5892 |
+
},
|
5893 |
+
{
|
5894 |
+
"epoch": 0.10497915900932957,
|
5895 |
+
"grad_norm": 0.07050147652626038,
|
5896 |
+
"learning_rate": 8.238145558845235e-05,
|
5897 |
+
"loss": 6.8252,
|
5898 |
+
"step": 839
|
5899 |
+
},
|
5900 |
+
{
|
5901 |
+
"epoch": 0.10510428315594379,
|
5902 |
+
"grad_norm": 0.08124469965696335,
|
5903 |
+
"learning_rate": 8.217394801200631e-05,
|
5904 |
+
"loss": 6.821,
|
5905 |
+
"step": 840
|
5906 |
+
},
|
5907 |
+
{
|
5908 |
+
"epoch": 0.105229407302558,
|
5909 |
+
"grad_norm": 0.10209415853023529,
|
5910 |
+
"learning_rate": 8.196651968244826e-05,
|
5911 |
+
"loss": 6.8257,
|
5912 |
+
"step": 841
|
5913 |
+
},
|
5914 |
+
{
|
5915 |
+
"epoch": 0.10535453144917223,
|
5916 |
+
"grad_norm": 0.07131823152303696,
|
5917 |
+
"learning_rate": 8.175917152191447e-05,
|
5918 |
+
"loss": 6.8218,
|
5919 |
+
"step": 842
|
5920 |
+
},
|
5921 |
+
{
|
5922 |
+
"epoch": 0.10547965559578644,
|
5923 |
+
"grad_norm": 0.0662199854850769,
|
5924 |
+
"learning_rate": 8.15519044521848e-05,
|
5925 |
+
"loss": 6.8211,
|
5926 |
+
"step": 843
|
5927 |
+
},
|
5928 |
+
{
|
5929 |
+
"epoch": 0.10560477974240066,
|
5930 |
+
"grad_norm": 0.07491844892501831,
|
5931 |
+
"learning_rate": 8.134471939467874e-05,
|
5932 |
+
"loss": 6.8125,
|
5933 |
+
"step": 844
|
5934 |
+
},
|
5935 |
+
{
|
5936 |
+
"epoch": 0.10572990388901488,
|
5937 |
+
"grad_norm": 0.07353292405605316,
|
5938 |
+
"learning_rate": 8.113761727045105e-05,
|
5939 |
+
"loss": 6.812,
|
5940 |
+
"step": 845
|
5941 |
+
},
|
5942 |
+
{
|
5943 |
+
"epoch": 0.1058550280356291,
|
5944 |
+
"grad_norm": 0.07755854725837708,
|
5945 |
+
"learning_rate": 8.093059900018792e-05,
|
5946 |
+
"loss": 6.8107,
|
5947 |
+
"step": 846
|
5948 |
+
},
|
5949 |
+
{
|
5950 |
+
"epoch": 0.10598015218224333,
|
5951 |
+
"grad_norm": 0.09446351230144501,
|
5952 |
+
"learning_rate": 8.072366550420266e-05,
|
5953 |
+
"loss": 6.8037,
|
5954 |
+
"step": 847
|
5955 |
+
},
|
5956 |
+
{
|
5957 |
+
"epoch": 0.10610527632885754,
|
5958 |
+
"grad_norm": 0.09002508223056793,
|
5959 |
+
"learning_rate": 8.051681770243175e-05,
|
5960 |
+
"loss": 6.7908,
|
5961 |
+
"step": 848
|
5962 |
+
},
|
5963 |
+
{
|
5964 |
+
"epoch": 0.10623040047547176,
|
5965 |
+
"grad_norm": 0.10826987028121948,
|
5966 |
+
"learning_rate": 8.031005651443073e-05,
|
5967 |
+
"loss": 6.7755,
|
5968 |
+
"step": 849
|
5969 |
+
},
|
5970 |
+
{
|
5971 |
+
"epoch": 0.10635552462208597,
|
5972 |
+
"grad_norm": 0.1577070951461792,
|
5973 |
+
"learning_rate": 8.010338285937006e-05,
|
5974 |
+
"loss": 6.7446,
|
5975 |
+
"step": 850
|
5976 |
+
},
|
5977 |
+
{
|
5978 |
+
"epoch": 0.1064806487687002,
|
5979 |
+
"grad_norm": 0.10693579912185669,
|
5980 |
+
"learning_rate": 7.989679765603108e-05,
|
5981 |
+
"loss": 6.8654,
|
5982 |
+
"step": 851
|
5983 |
+
},
|
5984 |
+
{
|
5985 |
+
"epoch": 0.10660577291531441,
|
5986 |
+
"grad_norm": 0.10050234198570251,
|
5987 |
+
"learning_rate": 7.969030182280192e-05,
|
5988 |
+
"loss": 6.8684,
|
5989 |
+
"step": 852
|
5990 |
+
},
|
5991 |
+
{
|
5992 |
+
"epoch": 0.10673089706192863,
|
5993 |
+
"grad_norm": 0.09752437472343445,
|
5994 |
+
"learning_rate": 7.948389627767343e-05,
|
5995 |
+
"loss": 6.8641,
|
5996 |
+
"step": 853
|
5997 |
+
},
|
5998 |
+
{
|
5999 |
+
"epoch": 0.10685602120854285,
|
6000 |
+
"grad_norm": 0.0903589129447937,
|
6001 |
+
"learning_rate": 7.927758193823501e-05,
|
6002 |
+
"loss": 6.8666,
|
6003 |
+
"step": 854
|
6004 |
+
},
|
6005 |
+
{
|
6006 |
+
"epoch": 0.10698114535515707,
|
6007 |
+
"grad_norm": 0.09941129386425018,
|
6008 |
+
"learning_rate": 7.907135972167069e-05,
|
6009 |
+
"loss": 6.858,
|
6010 |
+
"step": 855
|
6011 |
+
},
|
6012 |
+
{
|
6013 |
+
"epoch": 0.1071062695017713,
|
6014 |
+
"grad_norm": 0.08500877022743225,
|
6015 |
+
"learning_rate": 7.88652305447549e-05,
|
6016 |
+
"loss": 6.868,
|
6017 |
+
"step": 856
|
6018 |
+
},
|
6019 |
+
{
|
6020 |
+
"epoch": 0.1072313936483855,
|
6021 |
+
"grad_norm": 0.06359398365020752,
|
6022 |
+
"learning_rate": 7.865919532384844e-05,
|
6023 |
+
"loss": 6.8696,
|
6024 |
+
"step": 857
|
6025 |
+
},
|
6026 |
+
{
|
6027 |
+
"epoch": 0.10735651779499973,
|
6028 |
+
"grad_norm": 0.06349831819534302,
|
6029 |
+
"learning_rate": 7.845325497489449e-05,
|
6030 |
+
"loss": 6.8735,
|
6031 |
+
"step": 858
|
6032 |
+
},
|
6033 |
+
{
|
6034 |
+
"epoch": 0.10748164194161394,
|
6035 |
+
"grad_norm": 0.05805711820721626,
|
6036 |
+
"learning_rate": 7.82474104134144e-05,
|
6037 |
+
"loss": 6.8621,
|
6038 |
+
"step": 859
|
6039 |
+
},
|
6040 |
+
{
|
6041 |
+
"epoch": 0.10760676608822817,
|
6042 |
+
"grad_norm": 0.060704994946718216,
|
6043 |
+
"learning_rate": 7.804166255450373e-05,
|
6044 |
+
"loss": 6.8656,
|
6045 |
+
"step": 860
|
6046 |
+
},
|
6047 |
+
{
|
6048 |
+
"epoch": 0.10773189023484238,
|
6049 |
+
"grad_norm": 0.0630919337272644,
|
6050 |
+
"learning_rate": 7.783601231282812e-05,
|
6051 |
+
"loss": 6.8645,
|
6052 |
+
"step": 861
|
6053 |
+
},
|
6054 |
+
{
|
6055 |
+
"epoch": 0.1078570143814566,
|
6056 |
+
"grad_norm": 0.0664263442158699,
|
6057 |
+
"learning_rate": 7.763046060261932e-05,
|
6058 |
+
"loss": 6.8656,
|
6059 |
+
"step": 862
|
6060 |
+
},
|
6061 |
+
{
|
6062 |
+
"epoch": 0.10798213852807081,
|
6063 |
+
"grad_norm": 0.05371001362800598,
|
6064 |
+
"learning_rate": 7.742500833767094e-05,
|
6065 |
+
"loss": 6.864,
|
6066 |
+
"step": 863
|
6067 |
+
},
|
6068 |
+
{
|
6069 |
+
"epoch": 0.10810726267468504,
|
6070 |
+
"grad_norm": 0.05414309725165367,
|
6071 |
+
"learning_rate": 7.721965643133458e-05,
|
6072 |
+
"loss": 6.859,
|
6073 |
+
"step": 864
|
6074 |
+
},
|
6075 |
+
{
|
6076 |
+
"epoch": 0.10823238682129926,
|
6077 |
+
"grad_norm": 0.05882842093706131,
|
6078 |
+
"learning_rate": 7.701440579651566e-05,
|
6079 |
+
"loss": 6.8571,
|
6080 |
+
"step": 865
|
6081 |
+
},
|
6082 |
+
{
|
6083 |
+
"epoch": 0.10835751096791348,
|
6084 |
+
"grad_norm": 0.06309391558170319,
|
6085 |
+
"learning_rate": 7.680925734566937e-05,
|
6086 |
+
"loss": 6.8543,
|
6087 |
+
"step": 866
|
6088 |
+
},
|
6089 |
+
{
|
6090 |
+
"epoch": 0.1084826351145277,
|
6091 |
+
"grad_norm": 0.05783820152282715,
|
6092 |
+
"learning_rate": 7.660421199079669e-05,
|
6093 |
+
"loss": 6.8538,
|
6094 |
+
"step": 867
|
6095 |
+
},
|
6096 |
+
{
|
6097 |
+
"epoch": 0.10860775926114191,
|
6098 |
+
"grad_norm": 0.0630047544836998,
|
6099 |
+
"learning_rate": 7.639927064344022e-05,
|
6100 |
+
"loss": 6.8532,
|
6101 |
+
"step": 868
|
6102 |
+
},
|
6103 |
+
{
|
6104 |
+
"epoch": 0.10873288340775614,
|
6105 |
+
"grad_norm": 0.07220378518104553,
|
6106 |
+
"learning_rate": 7.619443421468021e-05,
|
6107 |
+
"loss": 6.8497,
|
6108 |
+
"step": 869
|
6109 |
+
},
|
6110 |
+
{
|
6111 |
+
"epoch": 0.10885800755437035,
|
6112 |
+
"grad_norm": 0.06430362164974213,
|
6113 |
+
"learning_rate": 7.598970361513051e-05,
|
6114 |
+
"loss": 6.8488,
|
6115 |
+
"step": 870
|
6116 |
+
},
|
6117 |
+
{
|
6118 |
+
"epoch": 0.10898313170098457,
|
6119 |
+
"grad_norm": 0.053624700754880905,
|
6120 |
+
"learning_rate": 7.578507975493448e-05,
|
6121 |
+
"loss": 6.8483,
|
6122 |
+
"step": 871
|
6123 |
+
},
|
6124 |
+
{
|
6125 |
+
"epoch": 0.10910825584759878,
|
6126 |
+
"grad_norm": 0.07166837900876999,
|
6127 |
+
"learning_rate": 7.558056354376098e-05,
|
6128 |
+
"loss": 6.8457,
|
6129 |
+
"step": 872
|
6130 |
+
},
|
6131 |
+
{
|
6132 |
+
"epoch": 0.10923337999421301,
|
6133 |
+
"grad_norm": 0.05962124094367027,
|
6134 |
+
"learning_rate": 7.537615589080027e-05,
|
6135 |
+
"loss": 6.8397,
|
6136 |
+
"step": 873
|
6137 |
+
},
|
6138 |
+
{
|
6139 |
+
"epoch": 0.10935850414082722,
|
6140 |
+
"grad_norm": 0.054664015769958496,
|
6141 |
+
"learning_rate": 7.517185770476006e-05,
|
6142 |
+
"loss": 6.8446,
|
6143 |
+
"step": 874
|
6144 |
+
},
|
6145 |
+
{
|
6146 |
+
"epoch": 0.10948362828744145,
|
6147 |
+
"grad_norm": 0.04989203065633774,
|
6148 |
+
"learning_rate": 7.496766989386136e-05,
|
6149 |
+
"loss": 6.8444,
|
6150 |
+
"step": 875
|
6151 |
+
},
|
6152 |
+
{
|
6153 |
+
"epoch": 0.10960875243405567,
|
6154 |
+
"grad_norm": 0.05883051082491875,
|
6155 |
+
"learning_rate": 7.476359336583454e-05,
|
6156 |
+
"loss": 6.8424,
|
6157 |
+
"step": 876
|
6158 |
+
},
|
6159 |
+
{
|
6160 |
+
"epoch": 0.10973387658066988,
|
6161 |
+
"grad_norm": 0.06765749305486679,
|
6162 |
+
"learning_rate": 7.455962902791522e-05,
|
6163 |
+
"loss": 6.8373,
|
6164 |
+
"step": 877
|
6165 |
+
},
|
6166 |
+
{
|
6167 |
+
"epoch": 0.1098590007272841,
|
6168 |
+
"grad_norm": 0.05092870071530342,
|
6169 |
+
"learning_rate": 7.435577778684033e-05,
|
6170 |
+
"loss": 6.8393,
|
6171 |
+
"step": 878
|
6172 |
+
},
|
6173 |
+
{
|
6174 |
+
"epoch": 0.10998412487389832,
|
6175 |
+
"grad_norm": 0.05673045665025711,
|
6176 |
+
"learning_rate": 7.415204054884399e-05,
|
6177 |
+
"loss": 6.8382,
|
6178 |
+
"step": 879
|
6179 |
+
},
|
6180 |
+
{
|
6181 |
+
"epoch": 0.11010924902051254,
|
6182 |
+
"grad_norm": 0.0606488436460495,
|
6183 |
+
"learning_rate": 7.394841821965345e-05,
|
6184 |
+
"loss": 6.8382,
|
6185 |
+
"step": 880
|
6186 |
+
},
|
6187 |
+
{
|
6188 |
+
"epoch": 0.11023437316712675,
|
6189 |
+
"grad_norm": 0.05659674480557442,
|
6190 |
+
"learning_rate": 7.374491170448525e-05,
|
6191 |
+
"loss": 6.8359,
|
6192 |
+
"step": 881
|
6193 |
+
},
|
6194 |
+
{
|
6195 |
+
"epoch": 0.11035949731374098,
|
6196 |
+
"grad_norm": 0.052246998995542526,
|
6197 |
+
"learning_rate": 7.3541521908041e-05,
|
6198 |
+
"loss": 6.8362,
|
6199 |
+
"step": 882
|
6200 |
+
},
|
6201 |
+
{
|
6202 |
+
"epoch": 0.11048462146035519,
|
6203 |
+
"grad_norm": 0.0668715238571167,
|
6204 |
+
"learning_rate": 7.33382497345034e-05,
|
6205 |
+
"loss": 6.8337,
|
6206 |
+
"step": 883
|
6207 |
+
},
|
6208 |
+
{
|
6209 |
+
"epoch": 0.11060974560696941,
|
6210 |
+
"grad_norm": 0.06160235404968262,
|
6211 |
+
"learning_rate": 7.313509608753231e-05,
|
6212 |
+
"loss": 6.8335,
|
6213 |
+
"step": 884
|
6214 |
+
},
|
6215 |
+
{
|
6216 |
+
"epoch": 0.11073486975358364,
|
6217 |
+
"grad_norm": 0.07179737091064453,
|
6218 |
+
"learning_rate": 7.293206187026061e-05,
|
6219 |
+
"loss": 6.8288,
|
6220 |
+
"step": 885
|
6221 |
+
},
|
6222 |
+
{
|
6223 |
+
"epoch": 0.11085999390019785,
|
6224 |
+
"grad_norm": 0.0652439147233963,
|
6225 |
+
"learning_rate": 7.27291479852903e-05,
|
6226 |
+
"loss": 6.8263,
|
6227 |
+
"step": 886
|
6228 |
+
},
|
6229 |
+
{
|
6230 |
+
"epoch": 0.11098511804681208,
|
6231 |
+
"grad_norm": 0.058492496609687805,
|
6232 |
+
"learning_rate": 7.252635533468843e-05,
|
6233 |
+
"loss": 6.8291,
|
6234 |
+
"step": 887
|
6235 |
+
},
|
6236 |
+
{
|
6237 |
+
"epoch": 0.11111024219342629,
|
6238 |
+
"grad_norm": 0.05899550020694733,
|
6239 |
+
"learning_rate": 7.232368481998309e-05,
|
6240 |
+
"loss": 6.8276,
|
6241 |
+
"step": 888
|
6242 |
+
},
|
6243 |
+
{
|
6244 |
+
"epoch": 0.11123536634004051,
|
6245 |
+
"grad_norm": 0.060387495905160904,
|
6246 |
+
"learning_rate": 7.212113734215932e-05,
|
6247 |
+
"loss": 6.8274,
|
6248 |
+
"step": 889
|
6249 |
+
},
|
6250 |
+
{
|
6251 |
+
"epoch": 0.11136049048665472,
|
6252 |
+
"grad_norm": 0.06588352471590042,
|
6253 |
+
"learning_rate": 7.191871380165538e-05,
|
6254 |
+
"loss": 6.8265,
|
6255 |
+
"step": 890
|
6256 |
+
},
|
6257 |
+
{
|
6258 |
+
"epoch": 0.11148561463326895,
|
6259 |
+
"grad_norm": 0.07027008384466171,
|
6260 |
+
"learning_rate": 7.17164150983584e-05,
|
6261 |
+
"loss": 6.8171,
|
6262 |
+
"step": 891
|
6263 |
+
},
|
6264 |
+
{
|
6265 |
+
"epoch": 0.11161073877988316,
|
6266 |
+
"grad_norm": 0.07204768806695938,
|
6267 |
+
"learning_rate": 7.151424213160061e-05,
|
6268 |
+
"loss": 6.816,
|
6269 |
+
"step": 892
|
6270 |
+
},
|
6271 |
+
{
|
6272 |
+
"epoch": 0.11173586292649738,
|
6273 |
+
"grad_norm": 0.07449936866760254,
|
6274 |
+
"learning_rate": 7.131219580015521e-05,
|
6275 |
+
"loss": 6.8186,
|
6276 |
+
"step": 893
|
6277 |
+
},
|
6278 |
+
{
|
6279 |
+
"epoch": 0.11186098707311161,
|
6280 |
+
"grad_norm": 0.07239895313978195,
|
6281 |
+
"learning_rate": 7.11102770022325e-05,
|
6282 |
+
"loss": 6.8172,
|
6283 |
+
"step": 894
|
6284 |
+
},
|
6285 |
+
{
|
6286 |
+
"epoch": 0.11198611121972582,
|
6287 |
+
"grad_norm": 0.08143356442451477,
|
6288 |
+
"learning_rate": 7.090848663547574e-05,
|
6289 |
+
"loss": 6.8129,
|
6290 |
+
"step": 895
|
6291 |
+
},
|
6292 |
+
{
|
6293 |
+
"epoch": 0.11211123536634005,
|
6294 |
+
"grad_norm": 0.06850866973400116,
|
6295 |
+
"learning_rate": 7.070682559695736e-05,
|
6296 |
+
"loss": 6.8124,
|
6297 |
+
"step": 896
|
6298 |
+
},
|
6299 |
+
{
|
6300 |
+
"epoch": 0.11223635951295426,
|
6301 |
+
"grad_norm": 0.07549439370632172,
|
6302 |
+
"learning_rate": 7.050529478317476e-05,
|
6303 |
+
"loss": 6.7967,
|
6304 |
+
"step": 897
|
6305 |
+
},
|
6306 |
+
{
|
6307 |
+
"epoch": 0.11236148365956848,
|
6308 |
+
"grad_norm": 0.16005338728427887,
|
6309 |
+
"learning_rate": 7.03038950900464e-05,
|
6310 |
+
"loss": 6.7838,
|
6311 |
+
"step": 898
|
6312 |
+
},
|
6313 |
+
{
|
6314 |
+
"epoch": 0.11248660780618269,
|
6315 |
+
"grad_norm": 0.12860530614852905,
|
6316 |
+
"learning_rate": 7.010262741290798e-05,
|
6317 |
+
"loss": 6.7815,
|
6318 |
+
"step": 899
|
6319 |
+
},
|
6320 |
+
{
|
6321 |
+
"epoch": 0.11261173195279692,
|
6322 |
+
"grad_norm": 0.14462625980377197,
|
6323 |
+
"learning_rate": 6.990149264650814e-05,
|
6324 |
+
"loss": 6.7578,
|
6325 |
+
"step": 900
|
6326 |
+
},
|
6327 |
+
{
|
6328 |
+
"epoch": 0.11273685609941113,
|
6329 |
+
"grad_norm": 0.09840188175439835,
|
6330 |
+
"learning_rate": 6.970049168500474e-05,
|
6331 |
+
"loss": 6.867,
|
6332 |
+
"step": 901
|
6333 |
+
},
|
6334 |
+
{
|
6335 |
+
"epoch": 0.11286198024602535,
|
6336 |
+
"grad_norm": 0.0897248163819313,
|
6337 |
+
"learning_rate": 6.94996254219608e-05,
|
6338 |
+
"loss": 6.8693,
|
6339 |
+
"step": 902
|
6340 |
+
},
|
6341 |
+
{
|
6342 |
+
"epoch": 0.11298710439263958,
|
6343 |
+
"grad_norm": 0.08066925406455994,
|
6344 |
+
"learning_rate": 6.929889475034048e-05,
|
6345 |
+
"loss": 6.8709,
|
6346 |
+
"step": 903
|
6347 |
+
},
|
6348 |
+
{
|
6349 |
+
"epoch": 0.11311222853925379,
|
6350 |
+
"grad_norm": 0.13347402215003967,
|
6351 |
+
"learning_rate": 6.909830056250527e-05,
|
6352 |
+
"loss": 6.8459,
|
6353 |
+
"step": 904
|
6354 |
+
},
|
6355 |
+
{
|
6356 |
+
"epoch": 0.11323735268586801,
|
6357 |
+
"grad_norm": 0.07866514474153519,
|
6358 |
+
"learning_rate": 6.889784375020978e-05,
|
6359 |
+
"loss": 6.8647,
|
6360 |
+
"step": 905
|
6361 |
+
},
|
6362 |
+
{
|
6363 |
+
"epoch": 0.11336247683248223,
|
6364 |
+
"grad_norm": 0.0862560123205185,
|
6365 |
+
"learning_rate": 6.869752520459803e-05,
|
6366 |
+
"loss": 6.8689,
|
6367 |
+
"step": 906
|
6368 |
+
},
|
6369 |
+
{
|
6370 |
+
"epoch": 0.11348760097909645,
|
6371 |
+
"grad_norm": 0.09121212363243103,
|
6372 |
+
"learning_rate": 6.849734581619918e-05,
|
6373 |
+
"loss": 6.862,
|
6374 |
+
"step": 907
|
6375 |
+
},
|
6376 |
+
{
|
6377 |
+
"epoch": 0.11361272512571066,
|
6378 |
+
"grad_norm": 0.07277815043926239,
|
6379 |
+
"learning_rate": 6.829730647492404e-05,
|
6380 |
+
"loss": 6.8675,
|
6381 |
+
"step": 908
|
6382 |
+
},
|
6383 |
+
{
|
6384 |
+
"epoch": 0.11373784927232489,
|
6385 |
+
"grad_norm": 0.0751807689666748,
|
6386 |
+
"learning_rate": 6.80974080700606e-05,
|
6387 |
+
"loss": 6.8619,
|
6388 |
+
"step": 909
|
6389 |
+
},
|
6390 |
+
{
|
6391 |
+
"epoch": 0.1138629734189391,
|
6392 |
+
"grad_norm": 0.0807303935289383,
|
6393 |
+
"learning_rate": 6.789765149027039e-05,
|
6394 |
+
"loss": 6.8671,
|
6395 |
+
"step": 910
|
6396 |
+
},
|
6397 |
+
{
|
6398 |
+
"epoch": 0.11398809756555332,
|
6399 |
+
"grad_norm": 0.06756477802991867,
|
6400 |
+
"learning_rate": 6.769803762358443e-05,
|
6401 |
+
"loss": 6.8642,
|
6402 |
+
"step": 911
|
6403 |
+
},
|
6404 |
+
{
|
6405 |
+
"epoch": 0.11411322171216755,
|
6406 |
+
"grad_norm": 0.06978469341993332,
|
6407 |
+
"learning_rate": 6.749856735739928e-05,
|
6408 |
+
"loss": 6.8667,
|
6409 |
+
"step": 912
|
6410 |
+
},
|
6411 |
+
{
|
6412 |
+
"epoch": 0.11423834585878176,
|
6413 |
+
"grad_norm": 0.06968016922473907,
|
6414 |
+
"learning_rate": 6.729924157847323e-05,
|
6415 |
+
"loss": 6.8616,
|
6416 |
+
"step": 913
|
6417 |
+
},
|
6418 |
+
{
|
6419 |
+
"epoch": 0.11436347000539598,
|
6420 |
+
"grad_norm": 0.06229887902736664,
|
6421 |
+
"learning_rate": 6.710006117292209e-05,
|
6422 |
+
"loss": 6.8622,
|
6423 |
+
"step": 914
|
6424 |
+
},
|
6425 |
+
{
|
6426 |
+
"epoch": 0.1144885941520102,
|
6427 |
+
"grad_norm": 0.05655563250184059,
|
6428 |
+
"learning_rate": 6.690102702621548e-05,
|
6429 |
+
"loss": 6.8623,
|
6430 |
+
"step": 915
|
6431 |
+
},
|
6432 |
+
{
|
6433 |
+
"epoch": 0.11461371829862442,
|
6434 |
+
"grad_norm": 0.0545574352145195,
|
6435 |
+
"learning_rate": 6.670214002317278e-05,
|
6436 |
+
"loss": 6.8566,
|
6437 |
+
"step": 916
|
6438 |
+
},
|
6439 |
+
{
|
6440 |
+
"epoch": 0.11473884244523863,
|
6441 |
+
"grad_norm": 0.07182218134403229,
|
6442 |
+
"learning_rate": 6.650340104795932e-05,
|
6443 |
+
"loss": 6.8585,
|
6444 |
+
"step": 917
|
6445 |
+
},
|
6446 |
+
{
|
6447 |
+
"epoch": 0.11486396659185286,
|
6448 |
+
"grad_norm": 0.05521298944950104,
|
6449 |
+
"learning_rate": 6.630481098408228e-05,
|
6450 |
+
"loss": 6.8544,
|
6451 |
+
"step": 918
|
6452 |
+
},
|
6453 |
+
{
|
6454 |
+
"epoch": 0.11498909073846707,
|
6455 |
+
"grad_norm": 0.06467194110155106,
|
6456 |
+
"learning_rate": 6.610637071438686e-05,
|
6457 |
+
"loss": 6.8552,
|
6458 |
+
"step": 919
|
6459 |
+
},
|
6460 |
+
{
|
6461 |
+
"epoch": 0.11511421488508129,
|
6462 |
+
"grad_norm": 0.06465508043766022,
|
6463 |
+
"learning_rate": 6.590808112105232e-05,
|
6464 |
+
"loss": 6.8476,
|
6465 |
+
"step": 920
|
6466 |
+
},
|
6467 |
+
{
|
6468 |
+
"epoch": 0.1152393390316955,
|
6469 |
+
"grad_norm": 0.05993629992008209,
|
6470 |
+
"learning_rate": 6.570994308558812e-05,
|
6471 |
+
"loss": 6.8448,
|
6472 |
+
"step": 921
|
6473 |
+
},
|
6474 |
+
{
|
6475 |
+
"epoch": 0.11536446317830973,
|
6476 |
+
"grad_norm": 0.07816801220178604,
|
6477 |
+
"learning_rate": 6.551195748882997e-05,
|
6478 |
+
"loss": 6.8491,
|
6479 |
+
"step": 922
|
6480 |
+
},
|
6481 |
+
{
|
6482 |
+
"epoch": 0.11548958732492395,
|
6483 |
+
"grad_norm": 0.05899886414408684,
|
6484 |
+
"learning_rate": 6.531412521093586e-05,
|
6485 |
+
"loss": 6.8463,
|
6486 |
+
"step": 923
|
6487 |
+
},
|
6488 |
+
{
|
6489 |
+
"epoch": 0.11561471147153816,
|
6490 |
+
"grad_norm": 0.07259286195039749,
|
6491 |
+
"learning_rate": 6.51164471313822e-05,
|
6492 |
+
"loss": 6.8459,
|
6493 |
+
"step": 924
|
6494 |
+
},
|
6495 |
+
{
|
6496 |
+
"epoch": 0.11573983561815239,
|
6497 |
+
"grad_norm": 0.06900156289339066,
|
6498 |
+
"learning_rate": 6.491892412895995e-05,
|
6499 |
+
"loss": 6.8445,
|
6500 |
+
"step": 925
|
6501 |
+
},
|
6502 |
+
{
|
6503 |
+
"epoch": 0.1158649597647666,
|
6504 |
+
"grad_norm": 0.05701984837651253,
|
6505 |
+
"learning_rate": 6.472155708177052e-05,
|
6506 |
+
"loss": 6.8416,
|
6507 |
+
"step": 926
|
6508 |
+
},
|
6509 |
+
{
|
6510 |
+
"epoch": 0.11599008391138083,
|
6511 |
+
"grad_norm": 0.048884131014347076,
|
6512 |
+
"learning_rate": 6.452434686722224e-05,
|
6513 |
+
"loss": 6.8378,
|
6514 |
+
"step": 927
|
6515 |
+
},
|
6516 |
+
{
|
6517 |
+
"epoch": 0.11611520805799504,
|
6518 |
+
"grad_norm": 0.0714973509311676,
|
6519 |
+
"learning_rate": 6.432729436202604e-05,
|
6520 |
+
"loss": 6.8423,
|
6521 |
+
"step": 928
|
6522 |
+
},
|
6523 |
+
{
|
6524 |
+
"epoch": 0.11624033220460926,
|
6525 |
+
"grad_norm": 0.07560869306325912,
|
6526 |
+
"learning_rate": 6.41304004421918e-05,
|
6527 |
+
"loss": 6.8416,
|
6528 |
+
"step": 929
|
6529 |
+
},
|
6530 |
+
{
|
6531 |
+
"epoch": 0.11636545635122347,
|
6532 |
+
"grad_norm": 0.06673278659582138,
|
6533 |
+
"learning_rate": 6.393366598302446e-05,
|
6534 |
+
"loss": 6.8384,
|
6535 |
+
"step": 930
|
6536 |
+
},
|
6537 |
+
{
|
6538 |
+
"epoch": 0.1164905804978377,
|
6539 |
+
"grad_norm": 0.055350061506032944,
|
6540 |
+
"learning_rate": 6.373709185911998e-05,
|
6541 |
+
"loss": 6.8377,
|
6542 |
+
"step": 931
|
6543 |
+
},
|
6544 |
+
{
|
6545 |
+
"epoch": 0.11661570464445192,
|
6546 |
+
"grad_norm": 0.057414110749959946,
|
6547 |
+
"learning_rate": 6.354067894436155e-05,
|
6548 |
+
"loss": 6.8346,
|
6549 |
+
"step": 932
|
6550 |
+
},
|
6551 |
+
{
|
6552 |
+
"epoch": 0.11674082879106613,
|
6553 |
+
"grad_norm": 0.05158224701881409,
|
6554 |
+
"learning_rate": 6.334442811191576e-05,
|
6555 |
+
"loss": 6.8316,
|
6556 |
+
"step": 933
|
6557 |
+
},
|
6558 |
+
{
|
6559 |
+
"epoch": 0.11686595293768036,
|
6560 |
+
"grad_norm": 0.06667657196521759,
|
6561 |
+
"learning_rate": 6.314834023422858e-05,
|
6562 |
+
"loss": 6.8341,
|
6563 |
+
"step": 934
|
6564 |
+
},
|
6565 |
+
{
|
6566 |
+
"epoch": 0.11699107708429457,
|
6567 |
+
"grad_norm": 0.060989443212747574,
|
6568 |
+
"learning_rate": 6.295241618302156e-05,
|
6569 |
+
"loss": 6.8293,
|
6570 |
+
"step": 935
|
6571 |
+
},
|
6572 |
+
{
|
6573 |
+
"epoch": 0.1171162012309088,
|
6574 |
+
"grad_norm": 0.08631948381662369,
|
6575 |
+
"learning_rate": 6.275665682928803e-05,
|
6576 |
+
"loss": 6.8301,
|
6577 |
+
"step": 936
|
6578 |
+
},
|
6579 |
+
{
|
6580 |
+
"epoch": 0.117241325377523,
|
6581 |
+
"grad_norm": 0.08441098034381866,
|
6582 |
+
"learning_rate": 6.256106304328905e-05,
|
6583 |
+
"loss": 6.8262,
|
6584 |
+
"step": 937
|
6585 |
+
},
|
6586 |
+
{
|
6587 |
+
"epoch": 0.11736644952413723,
|
6588 |
+
"grad_norm": 0.08572442829608917,
|
6589 |
+
"learning_rate": 6.23656356945497e-05,
|
6590 |
+
"loss": 6.8278,
|
6591 |
+
"step": 938
|
6592 |
+
},
|
6593 |
+
{
|
6594 |
+
"epoch": 0.11749157367075144,
|
6595 |
+
"grad_norm": 0.06193273887038231,
|
6596 |
+
"learning_rate": 6.21703756518551e-05,
|
6597 |
+
"loss": 6.8282,
|
6598 |
+
"step": 939
|
6599 |
+
},
|
6600 |
+
{
|
6601 |
+
"epoch": 0.11761669781736567,
|
6602 |
+
"grad_norm": 0.07838264852762222,
|
6603 |
+
"learning_rate": 6.197528378324665e-05,
|
6604 |
+
"loss": 6.8229,
|
6605 |
+
"step": 940
|
6606 |
+
},
|
6607 |
+
{
|
6608 |
+
"epoch": 0.11774182196397989,
|
6609 |
+
"grad_norm": 0.07304549217224121,
|
6610 |
+
"learning_rate": 6.17803609560181e-05,
|
6611 |
+
"loss": 6.8224,
|
6612 |
+
"step": 941
|
6613 |
+
},
|
6614 |
+
{
|
6615 |
+
"epoch": 0.1178669461105941,
|
6616 |
+
"grad_norm": 0.0821477547287941,
|
6617 |
+
"learning_rate": 6.158560803671168e-05,
|
6618 |
+
"loss": 6.8177,
|
6619 |
+
"step": 942
|
6620 |
+
},
|
6621 |
+
{
|
6622 |
+
"epoch": 0.11799207025720833,
|
6623 |
+
"grad_norm": 0.06880045682191849,
|
6624 |
+
"learning_rate": 6.139102589111435e-05,
|
6625 |
+
"loss": 6.8189,
|
6626 |
+
"step": 943
|
6627 |
+
},
|
6628 |
+
{
|
6629 |
+
"epoch": 0.11811719440382254,
|
6630 |
+
"grad_norm": 0.07983653247356415,
|
6631 |
+
"learning_rate": 6.119661538425381e-05,
|
6632 |
+
"loss": 6.8113,
|
6633 |
+
"step": 944
|
6634 |
+
},
|
6635 |
+
{
|
6636 |
+
"epoch": 0.11824231855043676,
|
6637 |
+
"grad_norm": 0.07099959999322891,
|
6638 |
+
"learning_rate": 6.100237738039484e-05,
|
6639 |
+
"loss": 6.8141,
|
6640 |
+
"step": 945
|
6641 |
+
},
|
6642 |
+
{
|
6643 |
+
"epoch": 0.11836744269705098,
|
6644 |
+
"grad_norm": 0.1001226007938385,
|
6645 |
+
"learning_rate": 6.0808312743035236e-05,
|
6646 |
+
"loss": 6.8078,
|
6647 |
+
"step": 946
|
6648 |
+
},
|
6649 |
+
{
|
6650 |
+
"epoch": 0.1184925668436652,
|
6651 |
+
"grad_norm": 0.07789657264947891,
|
6652 |
+
"learning_rate": 6.061442233490211e-05,
|
6653 |
+
"loss": 6.8047,
|
6654 |
+
"step": 947
|
6655 |
+
},
|
6656 |
+
{
|
6657 |
+
"epoch": 0.11861769099027941,
|
6658 |
+
"grad_norm": 0.10231375694274902,
|
6659 |
+
"learning_rate": 6.042070701794806e-05,
|
6660 |
+
"loss": 6.7926,
|
6661 |
+
"step": 948
|
6662 |
+
},
|
6663 |
+
{
|
6664 |
+
"epoch": 0.11874281513689364,
|
6665 |
+
"grad_norm": 0.09192590415477753,
|
6666 |
+
"learning_rate": 6.0227167653347305e-05,
|
6667 |
+
"loss": 6.7759,
|
6668 |
+
"step": 949
|
6669 |
+
},
|
6670 |
+
{
|
6671 |
+
"epoch": 0.11886793928350786,
|
6672 |
+
"grad_norm": 0.16595888137817383,
|
6673 |
+
"learning_rate": 6.0033805101491794e-05,
|
6674 |
+
"loss": 6.7533,
|
6675 |
+
"step": 950
|
6676 |
+
},
|
6677 |
+
{
|
6678 |
+
"epoch": 0.11899306343012207,
|
6679 |
+
"grad_norm": 0.11045640707015991,
|
6680 |
+
"learning_rate": 5.98406202219875e-05,
|
6681 |
+
"loss": 6.8629,
|
6682 |
+
"step": 951
|
6683 |
+
},
|
6684 |
+
{
|
6685 |
+
"epoch": 0.1191181875767363,
|
6686 |
+
"grad_norm": 0.08538468182086945,
|
6687 |
+
"learning_rate": 5.964761387365052e-05,
|
6688 |
+
"loss": 6.8838,
|
6689 |
+
"step": 952
|
6690 |
+
},
|
6691 |
+
{
|
6692 |
+
"epoch": 0.11924331172335051,
|
6693 |
+
"grad_norm": 0.0835360661149025,
|
6694 |
+
"learning_rate": 5.9454786914503255e-05,
|
6695 |
+
"loss": 6.8766,
|
6696 |
+
"step": 953
|
6697 |
+
},
|
6698 |
+
{
|
6699 |
+
"epoch": 0.11936843586996473,
|
6700 |
+
"grad_norm": 0.1049657091498375,
|
6701 |
+
"learning_rate": 5.926214020177074e-05,
|
6702 |
+
"loss": 6.8536,
|
6703 |
+
"step": 954
|
6704 |
+
},
|
6705 |
+
{
|
6706 |
+
"epoch": 0.11949356001657895,
|
6707 |
+
"grad_norm": 0.085104800760746,
|
6708 |
+
"learning_rate": 5.9069674591876534e-05,
|
6709 |
+
"loss": 6.864,
|
6710 |
+
"step": 955
|
6711 |
+
},
|
6712 |
+
{
|
6713 |
+
"epoch": 0.11961868416319317,
|
6714 |
+
"grad_norm": 0.05738501250743866,
|
6715 |
+
"learning_rate": 5.887739094043923e-05,
|
6716 |
+
"loss": 6.8682,
|
6717 |
+
"step": 956
|
6718 |
+
},
|
6719 |
+
{
|
6720 |
+
"epoch": 0.11974380830980738,
|
6721 |
+
"grad_norm": 0.087504543364048,
|
6722 |
+
"learning_rate": 5.868529010226845e-05,
|
6723 |
+
"loss": 6.8728,
|
6724 |
+
"step": 957
|
6725 |
+
},
|
6726 |
+
{
|
6727 |
+
"epoch": 0.1198689324564216,
|
6728 |
+
"grad_norm": 0.061660073697566986,
|
6729 |
+
"learning_rate": 5.849337293136112e-05,
|
6730 |
+
"loss": 6.864,
|
6731 |
+
"step": 958
|
6732 |
+
},
|
6733 |
+
{
|
6734 |
+
"epoch": 0.11999405660303583,
|
6735 |
+
"grad_norm": 0.06358201056718826,
|
6736 |
+
"learning_rate": 5.830164028089766e-05,
|
6737 |
+
"loss": 6.8676,
|
6738 |
+
"step": 959
|
6739 |
+
},
|
6740 |
+
{
|
6741 |
+
"epoch": 0.12011918074965004,
|
6742 |
+
"grad_norm": 0.06568393856287003,
|
6743 |
+
"learning_rate": 5.811009300323818e-05,
|
6744 |
+
"loss": 6.8548,
|
6745 |
+
"step": 960
|
6746 |
+
},
|
6747 |
+
{
|
6748 |
+
"epoch": 0.12024430489626427,
|
6749 |
+
"grad_norm": 0.055418290197849274,
|
6750 |
+
"learning_rate": 5.791873194991872e-05,
|
6751 |
+
"loss": 6.8622,
|
6752 |
+
"step": 961
|
6753 |
+
},
|
6754 |
+
{
|
6755 |
+
"epoch": 0.12036942904287848,
|
6756 |
+
"grad_norm": 0.0675298199057579,
|
6757 |
+
"learning_rate": 5.7727557971647427e-05,
|
6758 |
+
"loss": 6.8638,
|
6759 |
+
"step": 962
|
6760 |
+
},
|
6761 |
+
{
|
6762 |
+
"epoch": 0.1204945531894927,
|
6763 |
+
"grad_norm": 0.06013267859816551,
|
6764 |
+
"learning_rate": 5.7536571918300864e-05,
|
6765 |
+
"loss": 6.8618,
|
6766 |
+
"step": 963
|
6767 |
+
},
|
6768 |
+
{
|
6769 |
+
"epoch": 0.12061967733610691,
|
6770 |
+
"grad_norm": 0.06305553019046783,
|
6771 |
+
"learning_rate": 5.734577463892008e-05,
|
6772 |
+
"loss": 6.8594,
|
6773 |
+
"step": 964
|
6774 |
+
},
|
6775 |
+
{
|
6776 |
+
"epoch": 0.12074480148272114,
|
6777 |
+
"grad_norm": 0.05761939287185669,
|
6778 |
+
"learning_rate": 5.7155166981706956e-05,
|
6779 |
+
"loss": 6.8571,
|
6780 |
+
"step": 965
|
6781 |
+
},
|
6782 |
+
{
|
6783 |
+
"epoch": 0.12086992562933535,
|
6784 |
+
"grad_norm": 0.060438401997089386,
|
6785 |
+
"learning_rate": 5.6964749794020354e-05,
|
6786 |
+
"loss": 6.8574,
|
6787 |
+
"step": 966
|
6788 |
+
},
|
6789 |
+
{
|
6790 |
+
"epoch": 0.12099504977594958,
|
6791 |
+
"grad_norm": 0.06701270490884781,
|
6792 |
+
"learning_rate": 5.6774523922372394e-05,
|
6793 |
+
"loss": 6.8553,
|
6794 |
+
"step": 967
|
6795 |
+
},
|
6796 |
+
{
|
6797 |
+
"epoch": 0.12112017392256379,
|
6798 |
+
"grad_norm": 0.06402300298213959,
|
6799 |
+
"learning_rate": 5.6584490212424804e-05,
|
6800 |
+
"loss": 6.8507,
|
6801 |
+
"step": 968
|
6802 |
+
},
|
6803 |
+
{
|
6804 |
+
"epoch": 0.12124529806917801,
|
6805 |
+
"grad_norm": 0.0751137062907219,
|
6806 |
+
"learning_rate": 5.639464950898491e-05,
|
6807 |
+
"loss": 6.8517,
|
6808 |
+
"step": 969
|
6809 |
+
},
|
6810 |
+
{
|
6811 |
+
"epoch": 0.12137042221579224,
|
6812 |
+
"grad_norm": 0.06476029753684998,
|
6813 |
+
"learning_rate": 5.620500265600206e-05,
|
6814 |
+
"loss": 6.8511,
|
6815 |
+
"step": 970
|
6816 |
+
},
|
6817 |
+
{
|
6818 |
+
"epoch": 0.12149554636240645,
|
6819 |
+
"grad_norm": 0.05707380548119545,
|
6820 |
+
"learning_rate": 5.601555049656382e-05,
|
6821 |
+
"loss": 6.8485,
|
6822 |
+
"step": 971
|
6823 |
+
},
|
6824 |
+
{
|
6825 |
+
"epoch": 0.12162067050902067,
|
6826 |
+
"grad_norm": 0.06374350935220718,
|
6827 |
+
"learning_rate": 5.58262938728922e-05,
|
6828 |
+
"loss": 6.8468,
|
6829 |
+
"step": 972
|
6830 |
+
},
|
6831 |
+
{
|
6832 |
+
"epoch": 0.12174579465563488,
|
6833 |
+
"grad_norm": 0.05225362256169319,
|
6834 |
+
"learning_rate": 5.563723362634008e-05,
|
6835 |
+
"loss": 6.845,
|
6836 |
+
"step": 973
|
6837 |
+
},
|
6838 |
+
{
|
6839 |
+
"epoch": 0.12187091880224911,
|
6840 |
+
"grad_norm": 0.06538752466440201,
|
6841 |
+
"learning_rate": 5.544837059738719e-05,
|
6842 |
+
"loss": 6.8445,
|
6843 |
+
"step": 974
|
6844 |
+
},
|
6845 |
+
{
|
6846 |
+
"epoch": 0.12199604294886332,
|
6847 |
+
"grad_norm": 0.057842180132865906,
|
6848 |
+
"learning_rate": 5.525970562563656e-05,
|
6849 |
+
"loss": 6.8452,
|
6850 |
+
"step": 975
|
6851 |
+
},
|
6852 |
+
{
|
6853 |
+
"epoch": 0.12212116709547755,
|
6854 |
+
"grad_norm": 0.05999337136745453,
|
6855 |
+
"learning_rate": 5.507123954981073e-05,
|
6856 |
+
"loss": 6.8448,
|
6857 |
+
"step": 976
|
6858 |
+
},
|
6859 |
+
{
|
6860 |
+
"epoch": 0.12224629124209176,
|
6861 |
+
"grad_norm": 0.06252874433994293,
|
6862 |
+
"learning_rate": 5.488297320774807e-05,
|
6863 |
+
"loss": 6.8421,
|
6864 |
+
"step": 977
|
6865 |
+
},
|
6866 |
+
{
|
6867 |
+
"epoch": 0.12237141538870598,
|
6868 |
+
"grad_norm": 0.06220080703496933,
|
6869 |
+
"learning_rate": 5.4694907436399e-05,
|
6870 |
+
"loss": 6.8408,
|
6871 |
+
"step": 978
|
6872 |
+
},
|
6873 |
+
{
|
6874 |
+
"epoch": 0.1224965395353202,
|
6875 |
+
"grad_norm": 0.05970214679837227,
|
6876 |
+
"learning_rate": 5.4507043071822284e-05,
|
6877 |
+
"loss": 6.8397,
|
6878 |
+
"step": 979
|
6879 |
+
},
|
6880 |
+
{
|
6881 |
+
"epoch": 0.12262166368193442,
|
6882 |
+
"grad_norm": 0.06806926429271698,
|
6883 |
+
"learning_rate": 5.431938094918132e-05,
|
6884 |
+
"loss": 6.8399,
|
6885 |
+
"step": 980
|
6886 |
+
},
|
6887 |
+
{
|
6888 |
+
"epoch": 0.12274678782854864,
|
6889 |
+
"grad_norm": 0.06218835338950157,
|
6890 |
+
"learning_rate": 5.41319219027404e-05,
|
6891 |
+
"loss": 6.8363,
|
6892 |
+
"step": 981
|
6893 |
+
},
|
6894 |
+
{
|
6895 |
+
"epoch": 0.12287191197516285,
|
6896 |
+
"grad_norm": 0.0615081824362278,
|
6897 |
+
"learning_rate": 5.394466676586114e-05,
|
6898 |
+
"loss": 6.8348,
|
6899 |
+
"step": 982
|
6900 |
+
},
|
6901 |
+
{
|
6902 |
+
"epoch": 0.12299703612177708,
|
6903 |
+
"grad_norm": 0.05896433815360069,
|
6904 |
+
"learning_rate": 5.375761637099854e-05,
|
6905 |
+
"loss": 6.8335,
|
6906 |
+
"step": 983
|
6907 |
+
},
|
6908 |
+
{
|
6909 |
+
"epoch": 0.12312216026839129,
|
6910 |
+
"grad_norm": 0.063568115234375,
|
6911 |
+
"learning_rate": 5.357077154969742e-05,
|
6912 |
+
"loss": 6.8362,
|
6913 |
+
"step": 984
|
6914 |
+
},
|
6915 |
+
{
|
6916 |
+
"epoch": 0.12324728441500551,
|
6917 |
+
"grad_norm": 0.06212873011827469,
|
6918 |
+
"learning_rate": 5.3384133132588784e-05,
|
6919 |
+
"loss": 6.8325,
|
6920 |
+
"step": 985
|
6921 |
+
},
|
6922 |
+
{
|
6923 |
+
"epoch": 0.12337240856161973,
|
6924 |
+
"grad_norm": 0.07271798700094223,
|
6925 |
+
"learning_rate": 5.3197701949386e-05,
|
6926 |
+
"loss": 6.8326,
|
6927 |
+
"step": 986
|
6928 |
+
},
|
6929 |
+
{
|
6930 |
+
"epoch": 0.12349753270823395,
|
6931 |
+
"grad_norm": 0.06554724276065826,
|
6932 |
+
"learning_rate": 5.301147882888116e-05,
|
6933 |
+
"loss": 6.8327,
|
6934 |
+
"step": 987
|
6935 |
+
},
|
6936 |
+
{
|
6937 |
+
"epoch": 0.12362265685484818,
|
6938 |
+
"grad_norm": 0.07189320772886276,
|
6939 |
+
"learning_rate": 5.28254645989414e-05,
|
6940 |
+
"loss": 6.8263,
|
6941 |
+
"step": 988
|
6942 |
+
},
|
6943 |
+
{
|
6944 |
+
"epoch": 0.12374778100146239,
|
6945 |
+
"grad_norm": 0.062407851219177246,
|
6946 |
+
"learning_rate": 5.2639660086505226e-05,
|
6947 |
+
"loss": 6.8253,
|
6948 |
+
"step": 989
|
6949 |
+
},
|
6950 |
+
{
|
6951 |
+
"epoch": 0.12387290514807661,
|
6952 |
+
"grad_norm": 0.06889786571264267,
|
6953 |
+
"learning_rate": 5.2454066117578815e-05,
|
6954 |
+
"loss": 6.8229,
|
6955 |
+
"step": 990
|
6956 |
+
},
|
6957 |
+
{
|
6958 |
+
"epoch": 0.12399802929469082,
|
6959 |
+
"grad_norm": 0.06396161764860153,
|
6960 |
+
"learning_rate": 5.226868351723244e-05,
|
6961 |
+
"loss": 6.8219,
|
6962 |
+
"step": 991
|
6963 |
+
},
|
6964 |
+
{
|
6965 |
+
"epoch": 0.12412315344130505,
|
6966 |
+
"grad_norm": 0.05977191403508186,
|
6967 |
+
"learning_rate": 5.2083513109596616e-05,
|
6968 |
+
"loss": 6.823,
|
6969 |
+
"step": 992
|
6970 |
+
},
|
6971 |
+
{
|
6972 |
+
"epoch": 0.12424827758791926,
|
6973 |
+
"grad_norm": 0.07001759111881256,
|
6974 |
+
"learning_rate": 5.189855571785859e-05,
|
6975 |
+
"loss": 6.8181,
|
6976 |
+
"step": 993
|
6977 |
+
},
|
6978 |
+
{
|
6979 |
+
"epoch": 0.12437340173453348,
|
6980 |
+
"grad_norm": 0.076796755194664,
|
6981 |
+
"learning_rate": 5.171381216425863e-05,
|
6982 |
+
"loss": 6.8115,
|
6983 |
+
"step": 994
|
6984 |
+
},
|
6985 |
+
{
|
6986 |
+
"epoch": 0.1244985258811477,
|
6987 |
+
"grad_norm": 0.08506302535533905,
|
6988 |
+
"learning_rate": 5.152928327008635e-05,
|
6989 |
+
"loss": 6.811,
|
6990 |
+
"step": 995
|
6991 |
+
},
|
6992 |
+
{
|
6993 |
+
"epoch": 0.12462365002776192,
|
6994 |
+
"grad_norm": 0.07255339622497559,
|
6995 |
+
"learning_rate": 5.134496985567714e-05,
|
6996 |
+
"loss": 6.8082,
|
6997 |
+
"step": 996
|
6998 |
+
},
|
6999 |
+
{
|
7000 |
+
"epoch": 0.12474877417437615,
|
7001 |
+
"grad_norm": 0.08383986353874207,
|
7002 |
+
"learning_rate": 5.116087274040837e-05,
|
7003 |
+
"loss": 6.8022,
|
7004 |
+
"step": 997
|
7005 |
+
},
|
7006 |
+
{
|
7007 |
+
"epoch": 0.12487389832099036,
|
7008 |
+
"grad_norm": 0.12946462631225586,
|
7009 |
+
"learning_rate": 5.0976992742695925e-05,
|
7010 |
+
"loss": 6.7992,
|
7011 |
+
"step": 998
|
7012 |
+
},
|
7013 |
+
{
|
7014 |
+
"epoch": 0.12499902246760458,
|
7015 |
+
"grad_norm": 0.10370881855487823,
|
7016 |
+
"learning_rate": 5.07933306799904e-05,
|
7017 |
+
"loss": 6.7887,
|
7018 |
+
"step": 999
|
7019 |
+
},
|
7020 |
+
{
|
7021 |
+
"epoch": 0.1251241466142188,
|
7022 |
+
"grad_norm": 0.11729452013969421,
|
7023 |
+
"learning_rate": 5.060988736877366e-05,
|
7024 |
+
"loss": 6.7613,
|
7025 |
+
"step": 1000
|
7026 |
+
},
|
7027 |
+
{
|
7028 |
+
"epoch": 0.125249270760833,
|
7029 |
+
"grad_norm": 0.12090456485748291,
|
7030 |
+
"learning_rate": 5.042666362455498e-05,
|
7031 |
+
"loss": 6.8519,
|
7032 |
+
"step": 1001
|
7033 |
+
},
|
7034 |
+
{
|
7035 |
+
"epoch": 0.12537439490744723,
|
7036 |
+
"grad_norm": 0.09242799878120422,
|
7037 |
+
"learning_rate": 5.024366026186755e-05,
|
7038 |
+
"loss": 6.8678,
|
7039 |
+
"step": 1002
|
7040 |
+
},
|
7041 |
+
{
|
7042 |
+
"epoch": 0.12549951905406145,
|
7043 |
+
"grad_norm": 0.07915151119232178,
|
7044 |
+
"learning_rate": 5.006087809426496e-05,
|
7045 |
+
"loss": 6.8749,
|
7046 |
+
"step": 1003
|
7047 |
+
},
|
7048 |
+
{
|
7049 |
+
"epoch": 0.12562464320067568,
|
7050 |
+
"grad_norm": 0.10022272914648056,
|
7051 |
+
"learning_rate": 4.987831793431731e-05,
|
7052 |
+
"loss": 6.8535,
|
7053 |
+
"step": 1004
|
7054 |
+
},
|
7055 |
+
{
|
7056 |
+
"epoch": 0.12574976734728988,
|
7057 |
+
"grad_norm": 0.08195165544748306,
|
7058 |
+
"learning_rate": 4.9695980593607817e-05,
|
7059 |
+
"loss": 6.866,
|
7060 |
+
"step": 1005
|
7061 |
+
},
|
7062 |
+
{
|
7063 |
+
"epoch": 0.1258748914939041,
|
7064 |
+
"grad_norm": 0.07860789448022842,
|
7065 |
+
"learning_rate": 4.9513866882729146e-05,
|
7066 |
+
"loss": 6.8531,
|
7067 |
+
"step": 1006
|
7068 |
+
},
|
7069 |
+
{
|
7070 |
+
"epoch": 0.12600001564051833,
|
7071 |
+
"grad_norm": 0.06036710739135742,
|
7072 |
+
"learning_rate": 4.9331977611279777e-05,
|
7073 |
+
"loss": 6.8647,
|
7074 |
+
"step": 1007
|
7075 |
+
},
|
7076 |
+
{
|
7077 |
+
"epoch": 0.12612513978713255,
|
7078 |
+
"grad_norm": 0.06800255179405212,
|
7079 |
+
"learning_rate": 4.9150313587860433e-05,
|
7080 |
+
"loss": 6.8689,
|
7081 |
+
"step": 1008
|
7082 |
+
},
|
7083 |
+
{
|
7084 |
+
"epoch": 0.12625026393374678,
|
7085 |
+
"grad_norm": 0.06008204072713852,
|
7086 |
+
"learning_rate": 4.896887562007054e-05,
|
7087 |
+
"loss": 6.8642,
|
7088 |
+
"step": 1009
|
7089 |
+
},
|
7090 |
+
{
|
7091 |
+
"epoch": 0.12637538808036097,
|
7092 |
+
"grad_norm": 0.059629153460264206,
|
7093 |
+
"learning_rate": 4.8787664514504504e-05,
|
7094 |
+
"loss": 6.8625,
|
7095 |
+
"step": 1010
|
7096 |
+
},
|
7097 |
+
{
|
7098 |
+
"epoch": 0.1265005122269752,
|
7099 |
+
"grad_norm": 0.06875478476285934,
|
7100 |
+
"learning_rate": 4.860668107674823e-05,
|
7101 |
+
"loss": 6.8644,
|
7102 |
+
"step": 1011
|
7103 |
+
},
|
7104 |
+
{
|
7105 |
+
"epoch": 0.12662563637358942,
|
7106 |
+
"grad_norm": 0.05550326406955719,
|
7107 |
+
"learning_rate": 4.8425926111375506e-05,
|
7108 |
+
"loss": 6.8572,
|
7109 |
+
"step": 1012
|
7110 |
+
},
|
7111 |
+
{
|
7112 |
+
"epoch": 0.12675076052020365,
|
7113 |
+
"grad_norm": 0.05815112590789795,
|
7114 |
+
"learning_rate": 4.824540042194443e-05,
|
7115 |
+
"loss": 6.8634,
|
7116 |
+
"step": 1013
|
7117 |
+
},
|
7118 |
+
{
|
7119 |
+
"epoch": 0.12687588466681785,
|
7120 |
+
"grad_norm": 0.06269997358322144,
|
7121 |
+
"learning_rate": 4.8065104810993856e-05,
|
7122 |
+
"loss": 6.8576,
|
7123 |
+
"step": 1014
|
7124 |
+
},
|
7125 |
+
{
|
7126 |
+
"epoch": 0.12700100881343207,
|
7127 |
+
"grad_norm": 0.06365355849266052,
|
7128 |
+
"learning_rate": 4.788504008003978e-05,
|
7129 |
+
"loss": 6.8597,
|
7130 |
+
"step": 1015
|
7131 |
+
},
|
7132 |
+
{
|
7133 |
+
"epoch": 0.1271261329600463,
|
7134 |
+
"grad_norm": 0.06040506809949875,
|
7135 |
+
"learning_rate": 4.770520702957182e-05,
|
7136 |
+
"loss": 6.8565,
|
7137 |
+
"step": 1016
|
7138 |
+
},
|
7139 |
+
{
|
7140 |
+
"epoch": 0.12725125710666052,
|
7141 |
+
"grad_norm": 0.05758778750896454,
|
7142 |
+
"learning_rate": 4.752560645904962e-05,
|
7143 |
+
"loss": 6.8554,
|
7144 |
+
"step": 1017
|
7145 |
+
},
|
7146 |
+
{
|
7147 |
+
"epoch": 0.12737638125327475,
|
7148 |
+
"grad_norm": 0.05948428809642792,
|
7149 |
+
"learning_rate": 4.734623916689941e-05,
|
7150 |
+
"loss": 6.8545,
|
7151 |
+
"step": 1018
|
7152 |
+
},
|
7153 |
+
{
|
7154 |
+
"epoch": 0.12750150539988894,
|
7155 |
+
"grad_norm": 0.04706620052456856,
|
7156 |
+
"learning_rate": 4.716710595051022e-05,
|
7157 |
+
"loss": 6.8528,
|
7158 |
+
"step": 1019
|
7159 |
+
},
|
7160 |
+
{
|
7161 |
+
"epoch": 0.12762662954650317,
|
7162 |
+
"grad_norm": 0.058379221707582474,
|
7163 |
+
"learning_rate": 4.698820760623064e-05,
|
7164 |
+
"loss": 6.8506,
|
7165 |
+
"step": 1020
|
7166 |
+
},
|
7167 |
+
{
|
7168 |
+
"epoch": 0.1277517536931174,
|
7169 |
+
"grad_norm": 0.04983724653720856,
|
7170 |
+
"learning_rate": 4.6809544929365004e-05,
|
7171 |
+
"loss": 6.8472,
|
7172 |
+
"step": 1021
|
7173 |
+
},
|
7174 |
+
{
|
7175 |
+
"epoch": 0.12787687783973162,
|
7176 |
+
"grad_norm": 0.06882207095623016,
|
7177 |
+
"learning_rate": 4.663111871417e-05,
|
7178 |
+
"loss": 6.8441,
|
7179 |
+
"step": 1022
|
7180 |
+
},
|
7181 |
+
{
|
7182 |
+
"epoch": 0.12800200198634581,
|
7183 |
+
"grad_norm": 0.05252334475517273,
|
7184 |
+
"learning_rate": 4.645292975385111e-05,
|
7185 |
+
"loss": 6.8487,
|
7186 |
+
"step": 1023
|
7187 |
+
},
|
7188 |
+
{
|
7189 |
+
"epoch": 0.12812712613296004,
|
7190 |
+
"grad_norm": 0.05272122099995613,
|
7191 |
+
"learning_rate": 4.627497884055912e-05,
|
7192 |
+
"loss": 6.8447,
|
7193 |
+
"step": 1024
|
7194 |
+
},
|
7195 |
+
{
|
7196 |
+
"epoch": 0.12825225027957426,
|
7197 |
+
"grad_norm": 0.05039634928107262,
|
7198 |
+
"learning_rate": 4.609726676538652e-05,
|
7199 |
+
"loss": 6.8434,
|
7200 |
+
"step": 1025
|
7201 |
+
},
|
7202 |
+
{
|
7203 |
+
"epoch": 0.1283773744261885,
|
7204 |
+
"grad_norm": 0.06072818860411644,
|
7205 |
+
"learning_rate": 4.591979431836402e-05,
|
7206 |
+
"loss": 6.8387,
|
7207 |
+
"step": 1026
|
7208 |
+
},
|
7209 |
+
{
|
7210 |
+
"epoch": 0.12850249857280271,
|
7211 |
+
"grad_norm": 0.052245255559682846,
|
7212 |
+
"learning_rate": 4.574256228845706e-05,
|
7213 |
+
"loss": 6.8429,
|
7214 |
+
"step": 1027
|
7215 |
+
},
|
7216 |
+
{
|
7217 |
+
"epoch": 0.1286276227194169,
|
7218 |
+
"grad_norm": 0.05823485553264618,
|
7219 |
+
"learning_rate": 4.5565571463562365e-05,
|
7220 |
+
"loss": 6.8427,
|
7221 |
+
"step": 1028
|
7222 |
+
},
|
7223 |
+
{
|
7224 |
+
"epoch": 0.12875274686603114,
|
7225 |
+
"grad_norm": 0.06420707702636719,
|
7226 |
+
"learning_rate": 4.5388822630504256e-05,
|
7227 |
+
"loss": 6.8364,
|
7228 |
+
"step": 1029
|
7229 |
+
},
|
7230 |
+
{
|
7231 |
+
"epoch": 0.12887787101264536,
|
7232 |
+
"grad_norm": 0.05722834914922714,
|
7233 |
+
"learning_rate": 4.521231657503132e-05,
|
7234 |
+
"loss": 6.837,
|
7235 |
+
"step": 1030
|
7236 |
+
},
|
7237 |
+
{
|
7238 |
+
"epoch": 0.1290029951592596,
|
7239 |
+
"grad_norm": 0.06068609282374382,
|
7240 |
+
"learning_rate": 4.503605408181286e-05,
|
7241 |
+
"loss": 6.8384,
|
7242 |
+
"step": 1031
|
7243 |
+
},
|
7244 |
+
{
|
7245 |
+
"epoch": 0.12912811930587378,
|
7246 |
+
"grad_norm": 0.05515401065349579,
|
7247 |
+
"learning_rate": 4.486003593443537e-05,
|
7248 |
+
"loss": 6.8369,
|
7249 |
+
"step": 1032
|
7250 |
+
},
|
7251 |
+
{
|
7252 |
+
"epoch": 0.129253243452488,
|
7253 |
+
"grad_norm": 0.05642779543995857,
|
7254 |
+
"learning_rate": 4.468426291539914e-05,
|
7255 |
+
"loss": 6.8333,
|
7256 |
+
"step": 1033
|
7257 |
+
},
|
7258 |
+
{
|
7259 |
+
"epoch": 0.12937836759910223,
|
7260 |
+
"grad_norm": 0.07462010532617569,
|
7261 |
+
"learning_rate": 4.4508735806114654e-05,
|
7262 |
+
"loss": 6.8316,
|
7263 |
+
"step": 1034
|
7264 |
+
},
|
7265 |
+
{
|
7266 |
+
"epoch": 0.12950349174571646,
|
7267 |
+
"grad_norm": 0.0689237043261528,
|
7268 |
+
"learning_rate": 4.433345538689929e-05,
|
7269 |
+
"loss": 6.8344,
|
7270 |
+
"step": 1035
|
7271 |
+
},
|
7272 |
+
{
|
7273 |
+
"epoch": 0.12962861589233068,
|
7274 |
+
"grad_norm": 0.0574100986123085,
|
7275 |
+
"learning_rate": 4.415842243697369e-05,
|
7276 |
+
"loss": 6.8293,
|
7277 |
+
"step": 1036
|
7278 |
+
},
|
7279 |
+
{
|
7280 |
+
"epoch": 0.12975374003894488,
|
7281 |
+
"grad_norm": 0.07493086904287338,
|
7282 |
+
"learning_rate": 4.39836377344583e-05,
|
7283 |
+
"loss": 6.8341,
|
7284 |
+
"step": 1037
|
7285 |
+
},
|
7286 |
+
{
|
7287 |
+
"epoch": 0.1298788641855591,
|
7288 |
+
"grad_norm": 0.060792844742536545,
|
7289 |
+
"learning_rate": 4.380910205637012e-05,
|
7290 |
+
"loss": 6.8281,
|
7291 |
+
"step": 1038
|
7292 |
+
},
|
7293 |
+
{
|
7294 |
+
"epoch": 0.13000398833217333,
|
7295 |
+
"grad_norm": 0.08996855467557907,
|
7296 |
+
"learning_rate": 4.363481617861893e-05,
|
7297 |
+
"loss": 6.8238,
|
7298 |
+
"step": 1039
|
7299 |
+
},
|
7300 |
+
{
|
7301 |
+
"epoch": 0.13012911247878756,
|
7302 |
+
"grad_norm": 0.09788423031568527,
|
7303 |
+
"learning_rate": 4.346078087600412e-05,
|
7304 |
+
"loss": 6.8225,
|
7305 |
+
"step": 1040
|
7306 |
+
},
|
7307 |
+
{
|
7308 |
+
"epoch": 0.13025423662540175,
|
7309 |
+
"grad_norm": 0.062340155243873596,
|
7310 |
+
"learning_rate": 4.3286996922211034e-05,
|
7311 |
+
"loss": 6.8259,
|
7312 |
+
"step": 1041
|
7313 |
+
},
|
7314 |
+
{
|
7315 |
+
"epoch": 0.13037936077201598,
|
7316 |
+
"grad_norm": 0.07115781307220459,
|
7317 |
+
"learning_rate": 4.311346508980772e-05,
|
7318 |
+
"loss": 6.8176,
|
7319 |
+
"step": 1042
|
7320 |
+
},
|
7321 |
+
{
|
7322 |
+
"epoch": 0.1305044849186302,
|
7323 |
+
"grad_norm": 0.0779058188199997,
|
7324 |
+
"learning_rate": 4.2940186150241365e-05,
|
7325 |
+
"loss": 6.812,
|
7326 |
+
"step": 1043
|
7327 |
+
},
|
7328 |
+
{
|
7329 |
+
"epoch": 0.13062960906524443,
|
7330 |
+
"grad_norm": 0.0738992691040039,
|
7331 |
+
"learning_rate": 4.27671608738349e-05,
|
7332 |
+
"loss": 6.8132,
|
7333 |
+
"step": 1044
|
7334 |
+
},
|
7335 |
+
{
|
7336 |
+
"epoch": 0.13075473321185865,
|
7337 |
+
"grad_norm": 0.08018433302640915,
|
7338 |
+
"learning_rate": 4.2594390029783534e-05,
|
7339 |
+
"loss": 6.8091,
|
7340 |
+
"step": 1045
|
7341 |
+
},
|
7342 |
+
{
|
7343 |
+
"epoch": 0.13087985735847285,
|
7344 |
+
"grad_norm": 0.08278074860572815,
|
7345 |
+
"learning_rate": 4.242187438615153e-05,
|
7346 |
+
"loss": 6.8058,
|
7347 |
+
"step": 1046
|
7348 |
+
},
|
7349 |
+
{
|
7350 |
+
"epoch": 0.13100498150508708,
|
7351 |
+
"grad_norm": 0.08070545643568039,
|
7352 |
+
"learning_rate": 4.224961470986849e-05,
|
7353 |
+
"loss": 6.8039,
|
7354 |
+
"step": 1047
|
7355 |
+
},
|
7356 |
+
{
|
7357 |
+
"epoch": 0.1311301056517013,
|
7358 |
+
"grad_norm": 0.07522248476743698,
|
7359 |
+
"learning_rate": 4.207761176672614e-05,
|
7360 |
+
"loss": 6.7942,
|
7361 |
+
"step": 1048
|
7362 |
+
},
|
7363 |
+
{
|
7364 |
+
"epoch": 0.13125522979831553,
|
7365 |
+
"grad_norm": 0.09361255913972855,
|
7366 |
+
"learning_rate": 4.190586632137491e-05,
|
7367 |
+
"loss": 6.7863,
|
7368 |
+
"step": 1049
|
7369 |
+
},
|
7370 |
+
{
|
7371 |
+
"epoch": 0.13138035394492972,
|
7372 |
+
"grad_norm": 0.1457311362028122,
|
7373 |
+
"learning_rate": 4.173437913732048e-05,
|
7374 |
+
"loss": 6.7532,
|
7375 |
+
"step": 1050
|
7376 |
+
},
|
7377 |
+
{
|
7378 |
+
"epoch": 0.13150547809154395,
|
7379 |
+
"grad_norm": 0.11023912578821182,
|
7380 |
+
"learning_rate": 4.156315097692037e-05,
|
7381 |
+
"loss": 6.8567,
|
7382 |
+
"step": 1051
|
7383 |
+
},
|
7384 |
+
{
|
7385 |
+
"epoch": 0.13163060223815817,
|
7386 |
+
"grad_norm": 0.11056804656982422,
|
7387 |
+
"learning_rate": 4.139218260138074e-05,
|
7388 |
+
"loss": 6.856,
|
7389 |
+
"step": 1052
|
7390 |
+
},
|
7391 |
+
{
|
7392 |
+
"epoch": 0.1317557263847724,
|
7393 |
+
"grad_norm": 0.0829029530286789,
|
7394 |
+
"learning_rate": 4.12214747707527e-05,
|
7395 |
+
"loss": 6.864,
|
7396 |
+
"step": 1053
|
7397 |
+
},
|
7398 |
+
{
|
7399 |
+
"epoch": 0.13188085053138662,
|
7400 |
+
"grad_norm": 0.0777166411280632,
|
7401 |
+
"learning_rate": 4.1051028243929125e-05,
|
7402 |
+
"loss": 6.8561,
|
7403 |
+
"step": 1054
|
7404 |
+
},
|
7405 |
+
{
|
7406 |
+
"epoch": 0.13200597467800082,
|
7407 |
+
"grad_norm": 0.06604031473398209,
|
7408 |
+
"learning_rate": 4.088084377864135e-05,
|
7409 |
+
"loss": 6.8584,
|
7410 |
+
"step": 1055
|
7411 |
+
},
|
7412 |
+
{
|
7413 |
+
"epoch": 0.13213109882461505,
|
7414 |
+
"grad_norm": 0.07673287391662598,
|
7415 |
+
"learning_rate": 4.07109221314556e-05,
|
7416 |
+
"loss": 6.8644,
|
7417 |
+
"step": 1056
|
7418 |
+
},
|
7419 |
+
{
|
7420 |
+
"epoch": 0.13225622297122927,
|
7421 |
+
"grad_norm": 0.07796303927898407,
|
7422 |
+
"learning_rate": 4.054126405776971e-05,
|
7423 |
+
"loss": 6.861,
|
7424 |
+
"step": 1057
|
7425 |
+
},
|
7426 |
+
{
|
7427 |
+
"epoch": 0.1323813471178435,
|
7428 |
+
"grad_norm": 0.061380449682474136,
|
7429 |
+
"learning_rate": 4.037187031180985e-05,
|
7430 |
+
"loss": 6.8683,
|
7431 |
+
"step": 1058
|
7432 |
+
},
|
7433 |
+
{
|
7434 |
+
"epoch": 0.1325064712644577,
|
7435 |
+
"grad_norm": 0.06435754150152206,
|
7436 |
+
"learning_rate": 4.020274164662707e-05,
|
7437 |
+
"loss": 6.8672,
|
7438 |
+
"step": 1059
|
7439 |
+
},
|
7440 |
+
{
|
7441 |
+
"epoch": 0.13263159541107192,
|
7442 |
+
"grad_norm": 0.069906085729599,
|
7443 |
+
"learning_rate": 4.003387881409397e-05,
|
7444 |
+
"loss": 6.8546,
|
7445 |
+
"step": 1060
|
7446 |
+
},
|
7447 |
+
{
|
7448 |
+
"epoch": 0.13275671955768614,
|
7449 |
+
"grad_norm": 0.05770441144704819,
|
7450 |
+
"learning_rate": 3.986528256490141e-05,
|
7451 |
+
"loss": 6.8672,
|
7452 |
+
"step": 1061
|
7453 |
+
},
|
7454 |
+
{
|
7455 |
+
"epoch": 0.13288184370430037,
|
7456 |
+
"grad_norm": 0.06263688951730728,
|
7457 |
+
"learning_rate": 3.969695364855511e-05,
|
7458 |
+
"loss": 6.86,
|
7459 |
+
"step": 1062
|
7460 |
+
},
|
7461 |
+
{
|
7462 |
+
"epoch": 0.1330069678509146,
|
7463 |
+
"grad_norm": 0.057067472487688065,
|
7464 |
+
"learning_rate": 3.952889281337235e-05,
|
7465 |
+
"loss": 6.8575,
|
7466 |
+
"step": 1063
|
7467 |
+
},
|
7468 |
+
{
|
7469 |
+
"epoch": 0.1331320919975288,
|
7470 |
+
"grad_norm": 0.058888472616672516,
|
7471 |
+
"learning_rate": 3.93611008064786e-05,
|
7472 |
+
"loss": 6.8564,
|
7473 |
+
"step": 1064
|
7474 |
+
},
|
7475 |
+
{
|
7476 |
+
"epoch": 0.13325721614414301,
|
7477 |
+
"grad_norm": 0.06526350229978561,
|
7478 |
+
"learning_rate": 3.9193578373804364e-05,
|
7479 |
+
"loss": 6.8573,
|
7480 |
+
"step": 1065
|
7481 |
+
},
|
7482 |
+
{
|
7483 |
+
"epoch": 0.13338234029075724,
|
7484 |
+
"grad_norm": 0.053352899849414825,
|
7485 |
+
"learning_rate": 3.90263262600816e-05,
|
7486 |
+
"loss": 6.8556,
|
7487 |
+
"step": 1066
|
7488 |
+
},
|
7489 |
+
{
|
7490 |
+
"epoch": 0.13350746443737146,
|
7491 |
+
"grad_norm": 0.05884247645735741,
|
7492 |
+
"learning_rate": 3.88593452088406e-05,
|
7493 |
+
"loss": 6.8505,
|
7494 |
+
"step": 1067
|
7495 |
+
},
|
7496 |
+
{
|
7497 |
+
"epoch": 0.13363258858398566,
|
7498 |
+
"grad_norm": 0.053777825087308884,
|
7499 |
+
"learning_rate": 3.869263596240661e-05,
|
7500 |
+
"loss": 6.8505,
|
7501 |
+
"step": 1068
|
7502 |
+
},
|
7503 |
+
{
|
7504 |
+
"epoch": 0.1337577127305999,
|
7505 |
+
"grad_norm": 0.04613254964351654,
|
7506 |
+
"learning_rate": 3.8526199261896544e-05,
|
7507 |
+
"loss": 6.8507,
|
7508 |
+
"step": 1069
|
7509 |
+
},
|
7510 |
+
{
|
7511 |
+
"epoch": 0.1338828368772141,
|
7512 |
+
"grad_norm": 0.05576649308204651,
|
7513 |
+
"learning_rate": 3.836003584721577e-05,
|
7514 |
+
"loss": 6.8476,
|
7515 |
+
"step": 1070
|
7516 |
+
},
|
7517 |
+
{
|
7518 |
+
"epoch": 0.13400796102382834,
|
7519 |
+
"grad_norm": 0.05065496265888214,
|
7520 |
+
"learning_rate": 3.8194146457054655e-05,
|
7521 |
+
"loss": 6.8444,
|
7522 |
+
"step": 1071
|
7523 |
+
},
|
7524 |
+
{
|
7525 |
+
"epoch": 0.13413308517044256,
|
7526 |
+
"grad_norm": 0.06763933598995209,
|
7527 |
+
"learning_rate": 3.802853182888543e-05,
|
7528 |
+
"loss": 6.8466,
|
7529 |
+
"step": 1072
|
7530 |
+
},
|
7531 |
+
{
|
7532 |
+
"epoch": 0.13425820931705676,
|
7533 |
+
"grad_norm": 0.0658731460571289,
|
7534 |
+
"learning_rate": 3.786319269895877e-05,
|
7535 |
+
"loss": 6.8461,
|
7536 |
+
"step": 1073
|
7537 |
+
},
|
7538 |
+
{
|
7539 |
+
"epoch": 0.13438333346367098,
|
7540 |
+
"grad_norm": 0.058000680059194565,
|
7541 |
+
"learning_rate": 3.769812980230074e-05,
|
7542 |
+
"loss": 6.8445,
|
7543 |
+
"step": 1074
|
7544 |
+
},
|
7545 |
+
{
|
7546 |
+
"epoch": 0.1345084576102852,
|
7547 |
+
"grad_norm": 0.05953332036733627,
|
7548 |
+
"learning_rate": 3.7533343872709294e-05,
|
7549 |
+
"loss": 6.8419,
|
7550 |
+
"step": 1075
|
7551 |
+
},
|
7552 |
+
{
|
7553 |
+
"epoch": 0.13463358175689943,
|
7554 |
+
"grad_norm": 0.06550874561071396,
|
7555 |
+
"learning_rate": 3.736883564275112e-05,
|
7556 |
+
"loss": 6.8379,
|
7557 |
+
"step": 1076
|
7558 |
+
},
|
7559 |
+
{
|
7560 |
+
"epoch": 0.13475870590351363,
|
7561 |
+
"grad_norm": 0.05789563059806824,
|
7562 |
+
"learning_rate": 3.7204605843758386e-05,
|
7563 |
+
"loss": 6.8419,
|
7564 |
+
"step": 1077
|
7565 |
+
},
|
7566 |
+
{
|
7567 |
+
"epoch": 0.13488383005012786,
|
7568 |
+
"grad_norm": 0.05452827364206314,
|
7569 |
+
"learning_rate": 3.704065520582549e-05,
|
7570 |
+
"loss": 6.8405,
|
7571 |
+
"step": 1078
|
7572 |
+
},
|
7573 |
+
{
|
7574 |
+
"epoch": 0.13500895419674208,
|
7575 |
+
"grad_norm": 0.06073261424899101,
|
7576 |
+
"learning_rate": 3.6876984457805786e-05,
|
7577 |
+
"loss": 6.8416,
|
7578 |
+
"step": 1079
|
7579 |
+
},
|
7580 |
+
{
|
7581 |
+
"epoch": 0.1351340783433563,
|
7582 |
+
"grad_norm": 0.04960978776216507,
|
7583 |
+
"learning_rate": 3.671359432730834e-05,
|
7584 |
+
"loss": 6.836,
|
7585 |
+
"step": 1080
|
7586 |
+
},
|
7587 |
+
{
|
7588 |
+
"epoch": 0.13525920248997053,
|
7589 |
+
"grad_norm": 0.06479854136705399,
|
7590 |
+
"learning_rate": 3.655048554069478e-05,
|
7591 |
+
"loss": 6.836,
|
7592 |
+
"step": 1081
|
7593 |
+
},
|
7594 |
+
{
|
7595 |
+
"epoch": 0.13538432663658473,
|
7596 |
+
"grad_norm": 0.057061318308115005,
|
7597 |
+
"learning_rate": 3.638765882307589e-05,
|
7598 |
+
"loss": 6.8331,
|
7599 |
+
"step": 1082
|
7600 |
+
},
|
7601 |
+
{
|
7602 |
+
"epoch": 0.13550945078319895,
|
7603 |
+
"grad_norm": 0.05493835359811783,
|
7604 |
+
"learning_rate": 3.6225114898308634e-05,
|
7605 |
+
"loss": 6.8354,
|
7606 |
+
"step": 1083
|
7607 |
+
},
|
7608 |
+
{
|
7609 |
+
"epoch": 0.13563457492981318,
|
7610 |
+
"grad_norm": 0.06502033025026321,
|
7611 |
+
"learning_rate": 3.6062854488992714e-05,
|
7612 |
+
"loss": 6.8291,
|
7613 |
+
"step": 1084
|
7614 |
+
},
|
7615 |
+
{
|
7616 |
+
"epoch": 0.1357596990764274,
|
7617 |
+
"grad_norm": 0.05362612009048462,
|
7618 |
+
"learning_rate": 3.5900878316467454e-05,
|
7619 |
+
"loss": 6.8302,
|
7620 |
+
"step": 1085
|
7621 |
+
},
|
7622 |
+
{
|
7623 |
+
"epoch": 0.1358848232230416,
|
7624 |
+
"grad_norm": 0.053463976830244064,
|
7625 |
+
"learning_rate": 3.573918710080857e-05,
|
7626 |
+
"loss": 6.8263,
|
7627 |
+
"step": 1086
|
7628 |
+
},
|
7629 |
+
{
|
7630 |
+
"epoch": 0.13600994736965583,
|
7631 |
+
"grad_norm": 0.06826691329479218,
|
7632 |
+
"learning_rate": 3.5577781560825066e-05,
|
7633 |
+
"loss": 6.8255,
|
7634 |
+
"step": 1087
|
7635 |
+
},
|
7636 |
+
{
|
7637 |
+
"epoch": 0.13613507151627005,
|
7638 |
+
"grad_norm": 0.07258055359125137,
|
7639 |
+
"learning_rate": 3.541666241405588e-05,
|
7640 |
+
"loss": 6.8246,
|
7641 |
+
"step": 1088
|
7642 |
+
},
|
7643 |
+
{
|
7644 |
+
"epoch": 0.13626019566288428,
|
7645 |
+
"grad_norm": 0.0681692585349083,
|
7646 |
+
"learning_rate": 3.5255830376766764e-05,
|
7647 |
+
"loss": 6.821,
|
7648 |
+
"step": 1089
|
7649 |
+
},
|
7650 |
+
{
|
7651 |
+
"epoch": 0.13638531980949847,
|
7652 |
+
"grad_norm": 0.08735627681016922,
|
7653 |
+
"learning_rate": 3.509528616394716e-05,
|
7654 |
+
"loss": 6.8246,
|
7655 |
+
"step": 1090
|
7656 |
+
},
|
7657 |
+
{
|
7658 |
+
"epoch": 0.1365104439561127,
|
7659 |
+
"grad_norm": 0.0646921768784523,
|
7660 |
+
"learning_rate": 3.4935030489306883e-05,
|
7661 |
+
"loss": 6.822,
|
7662 |
+
"step": 1091
|
7663 |
+
},
|
7664 |
+
{
|
7665 |
+
"epoch": 0.13663556810272692,
|
7666 |
+
"grad_norm": 0.07364804297685623,
|
7667 |
+
"learning_rate": 3.4775064065273165e-05,
|
7668 |
+
"loss": 6.8199,
|
7669 |
+
"step": 1092
|
7670 |
+
},
|
7671 |
+
{
|
7672 |
+
"epoch": 0.13676069224934115,
|
7673 |
+
"grad_norm": 0.07897388935089111,
|
7674 |
+
"learning_rate": 3.4615387602987236e-05,
|
7675 |
+
"loss": 6.8132,
|
7676 |
+
"step": 1093
|
7677 |
+
},
|
7678 |
+
{
|
7679 |
+
"epoch": 0.13688581639595537,
|
7680 |
+
"grad_norm": 0.09701348096132278,
|
7681 |
+
"learning_rate": 3.445600181230134e-05,
|
7682 |
+
"loss": 6.8149,
|
7683 |
+
"step": 1094
|
7684 |
+
},
|
7685 |
+
{
|
7686 |
+
"epoch": 0.13701094054256957,
|
7687 |
+
"grad_norm": 0.08542627096176147,
|
7688 |
+
"learning_rate": 3.429690740177549e-05,
|
7689 |
+
"loss": 6.8097,
|
7690 |
+
"step": 1095
|
7691 |
+
},
|
7692 |
+
{
|
7693 |
+
"epoch": 0.1371360646891838,
|
7694 |
+
"grad_norm": 0.07645785808563232,
|
7695 |
+
"learning_rate": 3.413810507867436e-05,
|
7696 |
+
"loss": 6.8076,
|
7697 |
+
"step": 1096
|
7698 |
+
},
|
7699 |
+
{
|
7700 |
+
"epoch": 0.13726118883579802,
|
7701 |
+
"grad_norm": 0.12249205261468887,
|
7702 |
+
"learning_rate": 3.397959554896415e-05,
|
7703 |
+
"loss": 6.8004,
|
7704 |
+
"step": 1097
|
7705 |
+
},
|
7706 |
+
{
|
7707 |
+
"epoch": 0.13738631298241225,
|
7708 |
+
"grad_norm": 0.09605488181114197,
|
7709 |
+
"learning_rate": 3.3821379517309405e-05,
|
7710 |
+
"loss": 6.7911,
|
7711 |
+
"step": 1098
|
7712 |
+
},
|
7713 |
+
{
|
7714 |
+
"epoch": 0.13751143712902644,
|
7715 |
+
"grad_norm": 0.14662940800189972,
|
7716 |
+
"learning_rate": 3.3663457687069924e-05,
|
7717 |
+
"loss": 6.7794,
|
7718 |
+
"step": 1099
|
7719 |
+
},
|
7720 |
+
{
|
7721 |
+
"epoch": 0.13763656127564067,
|
7722 |
+
"grad_norm": 0.1348690390586853,
|
7723 |
+
"learning_rate": 3.350583076029754e-05,
|
7724 |
+
"loss": 6.7481,
|
7725 |
+
"step": 1100
|
7726 |
+
},
|
7727 |
+
{
|
7728 |
+
"epoch": 0.1377616854222549,
|
7729 |
+
"grad_norm": 0.08334768563508987,
|
7730 |
+
"learning_rate": 3.334849943773323e-05,
|
7731 |
+
"loss": 6.8782,
|
7732 |
+
"step": 1101
|
7733 |
+
},
|
7734 |
+
{
|
7735 |
+
"epoch": 0.13788680956886912,
|
7736 |
+
"grad_norm": 0.07065068930387497,
|
7737 |
+
"learning_rate": 3.319146441880371e-05,
|
7738 |
+
"loss": 6.8769,
|
7739 |
+
"step": 1102
|
7740 |
+
},
|
7741 |
+
{
|
7742 |
+
"epoch": 0.13801193371548334,
|
7743 |
+
"grad_norm": 0.07668652385473251,
|
7744 |
+
"learning_rate": 3.3034726401618444e-05,
|
7745 |
+
"loss": 6.8576,
|
7746 |
+
"step": 1103
|
7747 |
+
},
|
7748 |
+
{
|
7749 |
+
"epoch": 0.13813705786209754,
|
7750 |
+
"grad_norm": 0.09782318025827408,
|
7751 |
+
"learning_rate": 3.28782860829667e-05,
|
7752 |
+
"loss": 6.8516,
|
7753 |
+
"step": 1104
|
7754 |
+
},
|
7755 |
+
{
|
7756 |
+
"epoch": 0.13826218200871176,
|
7757 |
+
"grad_norm": 0.07026124000549316,
|
7758 |
+
"learning_rate": 3.272214415831418e-05,
|
7759 |
+
"loss": 6.8561,
|
7760 |
+
"step": 1105
|
7761 |
+
},
|
7762 |
+
{
|
7763 |
+
"epoch": 0.138387306155326,
|
7764 |
+
"grad_norm": 0.05975576490163803,
|
7765 |
+
"learning_rate": 3.2566301321800085e-05,
|
7766 |
+
"loss": 6.8604,
|
7767 |
+
"step": 1106
|
7768 |
+
},
|
7769 |
+
{
|
7770 |
+
"epoch": 0.13851243030194021,
|
7771 |
+
"grad_norm": 0.07976236194372177,
|
7772 |
+
"learning_rate": 3.241075826623401e-05,
|
7773 |
+
"loss": 6.8588,
|
7774 |
+
"step": 1107
|
7775 |
+
},
|
7776 |
+
{
|
7777 |
+
"epoch": 0.1386375544485544,
|
7778 |
+
"grad_norm": 0.06251887232065201,
|
7779 |
+
"learning_rate": 3.225551568309284e-05,
|
7780 |
+
"loss": 6.8608,
|
7781 |
+
"step": 1108
|
7782 |
+
},
|
7783 |
+
{
|
7784 |
+
"epoch": 0.13876267859516864,
|
7785 |
+
"grad_norm": 0.06219517067074776,
|
7786 |
+
"learning_rate": 3.210057426251773e-05,
|
7787 |
+
"loss": 6.8616,
|
7788 |
+
"step": 1109
|
7789 |
+
},
|
7790 |
+
{
|
7791 |
+
"epoch": 0.13888780274178286,
|
7792 |
+
"grad_norm": 0.06574711948633194,
|
7793 |
+
"learning_rate": 3.1945934693310896e-05,
|
7794 |
+
"loss": 6.8637,
|
7795 |
+
"step": 1110
|
7796 |
+
},
|
7797 |
+
{
|
7798 |
+
"epoch": 0.1390129268883971,
|
7799 |
+
"grad_norm": 0.07629784196615219,
|
7800 |
+
"learning_rate": 3.179159766293282e-05,
|
7801 |
+
"loss": 6.8643,
|
7802 |
+
"step": 1111
|
7803 |
+
},
|
7804 |
+
{
|
7805 |
+
"epoch": 0.1391380510350113,
|
7806 |
+
"grad_norm": 0.062084611505270004,
|
7807 |
+
"learning_rate": 3.163756385749889e-05,
|
7808 |
+
"loss": 6.862,
|
7809 |
+
"step": 1112
|
7810 |
+
},
|
7811 |
+
{
|
7812 |
+
"epoch": 0.1392631751816255,
|
7813 |
+
"grad_norm": 0.0663914680480957,
|
7814 |
+
"learning_rate": 3.148383396177653e-05,
|
7815 |
+
"loss": 6.8568,
|
7816 |
+
"step": 1113
|
7817 |
+
},
|
7818 |
+
{
|
7819 |
+
"epoch": 0.13938829932823973,
|
7820 |
+
"grad_norm": 0.06513141095638275,
|
7821 |
+
"learning_rate": 3.133040865918213e-05,
|
7822 |
+
"loss": 6.8629,
|
7823 |
+
"step": 1114
|
7824 |
+
},
|
7825 |
+
{
|
7826 |
+
"epoch": 0.13951342347485396,
|
7827 |
+
"grad_norm": 0.06291124224662781,
|
7828 |
+
"learning_rate": 3.117728863177796e-05,
|
7829 |
+
"loss": 6.8559,
|
7830 |
+
"step": 1115
|
7831 |
+
},
|
7832 |
+
{
|
7833 |
+
"epoch": 0.13963854762146818,
|
7834 |
+
"grad_norm": 0.05424998328089714,
|
7835 |
+
"learning_rate": 3.102447456026919e-05,
|
7836 |
+
"loss": 6.8577,
|
7837 |
+
"step": 1116
|
7838 |
+
},
|
7839 |
+
{
|
7840 |
+
"epoch": 0.13976367176808238,
|
7841 |
+
"grad_norm": 0.06632808595895767,
|
7842 |
+
"learning_rate": 3.0871967124000834e-05,
|
7843 |
+
"loss": 6.8552,
|
7844 |
+
"step": 1117
|
7845 |
+
},
|
7846 |
+
{
|
7847 |
+
"epoch": 0.1398887959146966,
|
7848 |
+
"grad_norm": 0.06717648357152939,
|
7849 |
+
"learning_rate": 3.0719767000954714e-05,
|
7850 |
+
"loss": 6.8552,
|
7851 |
+
"step": 1118
|
7852 |
+
},
|
7853 |
+
{
|
7854 |
+
"epoch": 0.14001392006131083,
|
7855 |
+
"grad_norm": 0.05267615616321564,
|
7856 |
+
"learning_rate": 3.056787486774656e-05,
|
7857 |
+
"loss": 6.8514,
|
7858 |
+
"step": 1119
|
7859 |
+
},
|
7860 |
+
{
|
7861 |
+
"epoch": 0.14013904420792506,
|
7862 |
+
"grad_norm": 0.05303286761045456,
|
7863 |
+
"learning_rate": 3.041629139962283e-05,
|
7864 |
+
"loss": 6.8497,
|
7865 |
+
"step": 1120
|
7866 |
+
},
|
7867 |
+
{
|
7868 |
+
"epoch": 0.14026416835453928,
|
7869 |
+
"grad_norm": 0.06788763403892517,
|
7870 |
+
"learning_rate": 3.0265017270457775e-05,
|
7871 |
+
"loss": 6.8512,
|
7872 |
+
"step": 1121
|
7873 |
+
},
|
7874 |
+
{
|
7875 |
+
"epoch": 0.14038929250115348,
|
7876 |
+
"grad_norm": 0.0560276061296463,
|
7877 |
+
"learning_rate": 3.0114053152750556e-05,
|
7878 |
+
"loss": 6.8478,
|
7879 |
+
"step": 1122
|
7880 |
+
},
|
7881 |
+
{
|
7882 |
+
"epoch": 0.1405144166477677,
|
7883 |
+
"grad_norm": 0.055129554122686386,
|
7884 |
+
"learning_rate": 2.9963399717622077e-05,
|
7885 |
+
"loss": 6.849,
|
7886 |
+
"step": 1123
|
7887 |
+
},
|
7888 |
+
{
|
7889 |
+
"epoch": 0.14063954079438193,
|
7890 |
+
"grad_norm": 0.06273508071899414,
|
7891 |
+
"learning_rate": 2.98130576348121e-05,
|
7892 |
+
"loss": 6.8461,
|
7893 |
+
"step": 1124
|
7894 |
+
},
|
7895 |
+
{
|
7896 |
+
"epoch": 0.14076466494099615,
|
7897 |
+
"grad_norm": 0.05553919076919556,
|
7898 |
+
"learning_rate": 2.966302757267625e-05,
|
7899 |
+
"loss": 6.8436,
|
7900 |
+
"step": 1125
|
7901 |
+
},
|
7902 |
+
{
|
7903 |
+
"epoch": 0.14076466494099615,
|
7904 |
+
"eval_loss": 6.835322380065918,
|
7905 |
+
"eval_runtime": 30.0947,
|
7906 |
+
"eval_samples_per_second": 447.288,
|
7907 |
+
"eval_steps_per_second": 223.661,
|
7908 |
+
"step": 1125
|
7909 |
}
|
7910 |
],
|
7911 |
"logging_steps": 1,
|
|
|
7925 |
"attributes": {}
|
7926 |
}
|
7927 |
},
|
7928 |
+
"total_flos": 12255858266112.0,
|
7929 |
"train_batch_size": 2,
|
7930 |
"trial_name": null,
|
7931 |
"trial_params": null
|