cutelemonlili commited on
Commit
167c59b
·
verified ·
1 Parent(s): e8355b2

Add files using upload-large-folder tool

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: other
4
+ base_model: Qwen/Qwen2.5-Coder-7B-Instruct
5
+ tags:
6
+ - llama-factory
7
+ - full
8
+ - generated_from_trainer
9
+ model-index:
10
+ - name: MATH_training_response_Qwen2.5-32B-Instruct_common_correct_level
11
+ results: []
12
+ ---
13
+
14
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
+ should probably proofread and complete it, then remove this comment. -->
16
+
17
+ # MATH_training_response_Qwen2.5-32B-Instruct_common_correct_level
18
+
19
+ This model is a fine-tuned version of [Qwen/Qwen2.5-Coder-7B-Instruct](https://huggingface.co/Qwen/Qwen2.5-Coder-7B-Instruct) on the MATH_training_response_Qwen2.5-32B-Instruct_common_correct_level dataset.
20
+ It achieves the following results on the evaluation set:
21
+ - Loss: 0.1231
22
+
23
+ ## Model description
24
+
25
+ More information needed
26
+
27
+ ## Intended uses & limitations
28
+
29
+ More information needed
30
+
31
+ ## Training and evaluation data
32
+
33
+ More information needed
34
+
35
+ ## Training procedure
36
+
37
+ ### Training hyperparameters
38
+
39
+ The following hyperparameters were used during training:
40
+ - learning_rate: 1e-05
41
+ - train_batch_size: 4
42
+ - eval_batch_size: 1
43
+ - seed: 42
44
+ - distributed_type: multi-GPU
45
+ - num_devices: 4
46
+ - total_train_batch_size: 16
47
+ - total_eval_batch_size: 4
48
+ - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
49
+ - lr_scheduler_type: cosine
50
+ - num_epochs: 2
51
+
52
+ ### Training results
53
+
54
+ | Training Loss | Epoch | Step | Validation Loss |
55
+ |:-------------:|:------:|:----:|:---------------:|
56
+ | 0.0233 | 1.3699 | 200 | 0.1278 |
57
+
58
+
59
+ ### Framework versions
60
+
61
+ - Transformers 4.46.1
62
+ - Pytorch 2.5.1+cu124
63
+ - Datasets 3.1.0
64
+ - Tokenizers 0.20.3
added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|object_ref_end|>": 151647,
16
+ "<|object_ref_start|>": 151646,
17
+ "<|quad_end|>": 151651,
18
+ "<|quad_start|>": 151650,
19
+ "<|repo_name|>": 151663,
20
+ "<|video_pad|>": 151656,
21
+ "<|vision_end|>": 151653,
22
+ "<|vision_pad|>": 151654,
23
+ "<|vision_start|>": 151652
24
+ }
all_results.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 2.0,
3
+ "eval_loss": 0.12309124320745468,
4
+ "eval_runtime": 3.5999,
5
+ "eval_samples_per_second": 6.667,
6
+ "eval_steps_per_second": 1.667,
7
+ "total_flos": 8378343751680.0,
8
+ "train_loss": 0.06280981206449948,
9
+ "train_runtime": 833.6474,
10
+ "train_samples_per_second": 5.587,
11
+ "train_steps_per_second": 0.35
12
+ }
config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "Qwen/Qwen2.5-Coder-7B-Instruct",
3
+ "architectures": [
4
+ "Qwen2ForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "eos_token_id": 151645,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 3584,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 18944,
13
+ "max_position_embeddings": 32768,
14
+ "max_window_layers": 28,
15
+ "model_type": "qwen2",
16
+ "num_attention_heads": 28,
17
+ "num_hidden_layers": 28,
18
+ "num_key_value_heads": 4,
19
+ "rms_norm_eps": 1e-06,
20
+ "rope_scaling": null,
21
+ "rope_theta": 1000000.0,
22
+ "sliding_window": null,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "bfloat16",
25
+ "transformers_version": "4.46.1",
26
+ "use_cache": false,
27
+ "use_sliding_window": false,
28
+ "vocab_size": 152064
29
+ }
eval_results.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 2.0,
3
+ "eval_loss": 0.12309124320745468,
4
+ "eval_runtime": 3.5999,
5
+ "eval_samples_per_second": 6.667,
6
+ "eval_steps_per_second": 1.667
7
+ }
generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151643,
9
+ "repetition_penalty": 1.1,
10
+ "temperature": 0.7,
11
+ "top_k": 20,
12
+ "top_p": 0.8,
13
+ "transformers_version": "4.46.1"
14
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1e9939398160d6ad810e731cc2b4292985cb3c74ab09f8451720ad81390a526b
3
+ size 4877660776
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe3129555d733f8e81fe739766f91e2d7e0e679f887b957680c04a467ac9e18c
3
+ size 4932751008
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d44c599db585cbb83c1a0bb90e09e4657297f93315ed6d050742bc632db2f68f
3
+ size 4330865200
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bcbeda28782a275fe7ddc5cf500462b99c0ed632ca46648a9dd1fe34d58d31f6
3
+ size 1089994880
model.safetensors.index.json ADDED
@@ -0,0 +1,346 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 15231233024
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00004-of-00004.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
13
+ "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
14
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
15
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
16
+ "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
17
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
18
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
19
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
20
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
21
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
22
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
23
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
24
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
25
+ "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
26
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
27
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
28
+ "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
29
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
30
+ "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
31
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
32
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
33
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
34
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
35
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
36
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
37
+ "model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
38
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
39
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
40
+ "model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
41
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
42
+ "model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
43
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
44
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
45
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
46
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
47
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
48
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
49
+ "model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
50
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
51
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
52
+ "model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
53
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
54
+ "model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
55
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
56
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
57
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
58
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
59
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
60
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
61
+ "model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
62
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
63
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
64
+ "model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
65
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
66
+ "model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
67
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
68
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
69
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
70
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
71
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
72
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
73
+ "model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
74
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
75
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
76
+ "model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
77
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
78
+ "model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
79
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
80
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
81
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
82
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
83
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
84
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
85
+ "model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
86
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
87
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
88
+ "model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
89
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
90
+ "model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
91
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
92
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
93
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
94
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
95
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
96
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
97
+ "model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
98
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
99
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
100
+ "model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
101
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
102
+ "model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
103
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
104
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
105
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
106
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
107
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
108
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
109
+ "model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
110
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
111
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
112
+ "model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
113
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
114
+ "model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
115
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
116
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
117
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
118
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
119
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
120
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
121
+ "model.layers.17.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
122
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
123
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
124
+ "model.layers.17.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
125
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
126
+ "model.layers.17.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
127
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
128
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
129
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
130
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
131
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
132
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
133
+ "model.layers.18.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
134
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
135
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
136
+ "model.layers.18.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
137
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
138
+ "model.layers.18.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
139
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
140
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
141
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
142
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
143
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
144
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
145
+ "model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
146
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
147
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
148
+ "model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
149
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
150
+ "model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
151
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
152
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
153
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
154
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
155
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
156
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
157
+ "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
158
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
159
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
160
+ "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
161
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
162
+ "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
163
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
164
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
165
+ "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
166
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
167
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
168
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
169
+ "model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
170
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
171
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
172
+ "model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
173
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
174
+ "model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
175
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
176
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
177
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
178
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
179
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
180
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
181
+ "model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
182
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
183
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
184
+ "model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
185
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
186
+ "model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
187
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
188
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
189
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
190
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
191
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
192
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
193
+ "model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
194
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
195
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
196
+ "model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
197
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
198
+ "model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
199
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
200
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
201
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
202
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
203
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
204
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
205
+ "model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
206
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
207
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
208
+ "model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
209
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
210
+ "model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
211
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
212
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
213
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
214
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
215
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
216
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
217
+ "model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
218
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
219
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
220
+ "model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
221
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
222
+ "model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
223
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
224
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
225
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
226
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
227
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
228
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
229
+ "model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
230
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
231
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
232
+ "model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
233
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
234
+ "model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
235
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
236
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
237
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
238
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
239
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
240
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
241
+ "model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
242
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
243
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
244
+ "model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
245
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
246
+ "model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
247
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
248
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
249
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
250
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
251
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
252
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
253
+ "model.layers.27.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
254
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
255
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
256
+ "model.layers.27.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
257
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
258
+ "model.layers.27.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
259
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
260
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
261
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
262
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
263
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
264
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
265
+ "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
266
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
267
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
268
+ "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
269
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
270
+ "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
271
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
272
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
273
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
274
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
275
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
276
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
277
+ "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
278
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
279
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
280
+ "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
281
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
282
+ "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
283
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
284
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
285
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
286
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
287
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
288
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
289
+ "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
290
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
291
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
292
+ "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
293
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
294
+ "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
295
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
296
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
297
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
298
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
299
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
300
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
301
+ "model.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
302
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
303
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
304
+ "model.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
305
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
306
+ "model.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
307
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
308
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
309
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
310
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
311
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
312
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
313
+ "model.layers.7.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
314
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
315
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
316
+ "model.layers.7.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
317
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
318
+ "model.layers.7.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
319
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
320
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
321
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
322
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
323
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
324
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
325
+ "model.layers.8.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
326
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
327
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
328
+ "model.layers.8.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
329
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
330
+ "model.layers.8.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
331
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
332
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
333
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
334
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
335
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
336
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
337
+ "model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
338
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
339
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
340
+ "model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
341
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
342
+ "model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
343
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
344
+ "model.norm.weight": "model-00003-of-00004.safetensors"
345
+ }
346
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
+ size 11421896
tokenizer_config.json ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
199
+ "clean_up_tokenization_spaces": false,
200
+ "eos_token": "<|im_end|>",
201
+ "errors": "replace",
202
+ "model_max_length": 32768,
203
+ "pad_token": "<|endoftext|>",
204
+ "padding_side": "right",
205
+ "split_special_tokens": false,
206
+ "tokenizer_class": "Qwen2Tokenizer",
207
+ "unk_token": null
208
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 2.0,
3
+ "total_flos": 8378343751680.0,
4
+ "train_loss": 0.06280981206449948,
5
+ "train_runtime": 833.6474,
6
+ "train_samples_per_second": 5.587,
7
+ "train_steps_per_second": 0.35
8
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,294 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 1, "total_steps": 292, "loss": 0.1231, "lr": 9.999710619100732e-06, "epoch": 0.00684931506849315, "percentage": 0.34, "elapsed_time": "0:00:04", "remaining_time": "0:20:00"}
2
+ {"current_steps": 2, "total_steps": 292, "loss": 0.1599, "lr": 9.998842509899456e-06, "epoch": 0.0136986301369863, "percentage": 0.68, "elapsed_time": "0:00:06", "remaining_time": "0:16:16"}
3
+ {"current_steps": 3, "total_steps": 292, "loss": 0.1042, "lr": 9.997395772881853e-06, "epoch": 0.02054794520547945, "percentage": 1.03, "elapsed_time": "0:00:09", "remaining_time": "0:15:03"}
4
+ {"current_steps": 4, "total_steps": 292, "loss": 0.114, "lr": 9.995370575511151e-06, "epoch": 0.0273972602739726, "percentage": 1.37, "elapsed_time": "0:00:11", "remaining_time": "0:14:20"}
5
+ {"current_steps": 5, "total_steps": 292, "loss": 0.0993, "lr": 9.992767152208724e-06, "epoch": 0.03424657534246575, "percentage": 1.71, "elapsed_time": "0:00:14", "remaining_time": "0:14:12"}
6
+ {"current_steps": 6, "total_steps": 292, "loss": 0.0887, "lr": 9.989585804326963e-06, "epoch": 0.0410958904109589, "percentage": 2.05, "elapsed_time": "0:00:17", "remaining_time": "0:13:55"}
7
+ {"current_steps": 7, "total_steps": 292, "loss": 0.0992, "lr": 9.985826900114391e-06, "epoch": 0.04794520547945205, "percentage": 2.4, "elapsed_time": "0:00:19", "remaining_time": "0:13:33"}
8
+ {"current_steps": 8, "total_steps": 292, "loss": 0.0936, "lr": 9.98149087467304e-06, "epoch": 0.0547945205479452, "percentage": 2.74, "elapsed_time": "0:00:22", "remaining_time": "0:13:19"}
9
+ {"current_steps": 9, "total_steps": 292, "loss": 0.1073, "lr": 9.97657822990809e-06, "epoch": 0.06164383561643835, "percentage": 3.08, "elapsed_time": "0:00:25", "remaining_time": "0:13:11"}
10
+ {"current_steps": 10, "total_steps": 292, "loss": 0.0993, "lr": 9.97108953446976e-06, "epoch": 0.0684931506849315, "percentage": 3.42, "elapsed_time": "0:00:27", "remaining_time": "0:13:04"}
11
+ {"current_steps": 11, "total_steps": 292, "loss": 0.1084, "lr": 9.965025423687505e-06, "epoch": 0.07534246575342465, "percentage": 3.77, "elapsed_time": "0:00:30", "remaining_time": "0:12:55"}
12
+ {"current_steps": 12, "total_steps": 292, "loss": 0.0843, "lr": 9.95838659949645e-06, "epoch": 0.0821917808219178, "percentage": 4.11, "elapsed_time": "0:00:32", "remaining_time": "0:12:49"}
13
+ {"current_steps": 13, "total_steps": 292, "loss": 0.0894, "lr": 9.951173830356168e-06, "epoch": 0.08904109589041095, "percentage": 4.45, "elapsed_time": "0:00:35", "remaining_time": "0:12:47"}
14
+ {"current_steps": 14, "total_steps": 292, "loss": 0.1161, "lr": 9.943387951161702e-06, "epoch": 0.0958904109589041, "percentage": 4.79, "elapsed_time": "0:00:38", "remaining_time": "0:12:39"}
15
+ {"current_steps": 15, "total_steps": 292, "loss": 0.0872, "lr": 9.935029863146946e-06, "epoch": 0.10273972602739725, "percentage": 5.14, "elapsed_time": "0:00:40", "remaining_time": "0:12:33"}
16
+ {"current_steps": 16, "total_steps": 292, "loss": 0.0697, "lr": 9.926100533780304e-06, "epoch": 0.1095890410958904, "percentage": 5.48, "elapsed_time": "0:00:43", "remaining_time": "0:12:34"}
17
+ {"current_steps": 17, "total_steps": 292, "loss": 0.1001, "lr": 9.916600996652726e-06, "epoch": 0.11643835616438356, "percentage": 5.82, "elapsed_time": "0:00:46", "remaining_time": "0:12:27"}
18
+ {"current_steps": 18, "total_steps": 292, "loss": 0.1142, "lr": 9.906532351358047e-06, "epoch": 0.1232876712328767, "percentage": 6.16, "elapsed_time": "0:00:48", "remaining_time": "0:12:22"}
19
+ {"current_steps": 19, "total_steps": 292, "loss": 0.0951, "lr": 9.895895763365722e-06, "epoch": 0.13013698630136986, "percentage": 6.51, "elapsed_time": "0:00:51", "remaining_time": "0:12:18"}
20
+ {"current_steps": 20, "total_steps": 292, "loss": 0.1102, "lr": 9.88469246388591e-06, "epoch": 0.136986301369863, "percentage": 6.85, "elapsed_time": "0:00:54", "remaining_time": "0:12:15"}
21
+ {"current_steps": 21, "total_steps": 292, "loss": 0.1102, "lr": 9.872923749726959e-06, "epoch": 0.14383561643835616, "percentage": 7.19, "elapsed_time": "0:00:56", "remaining_time": "0:12:10"}
22
+ {"current_steps": 22, "total_steps": 292, "loss": 0.0889, "lr": 9.860590983145307e-06, "epoch": 0.1506849315068493, "percentage": 7.53, "elapsed_time": "0:00:59", "remaining_time": "0:12:08"}
23
+ {"current_steps": 23, "total_steps": 292, "loss": 0.1128, "lr": 9.847695591687788e-06, "epoch": 0.15753424657534246, "percentage": 7.88, "elapsed_time": "0:01:01", "remaining_time": "0:12:04"}
24
+ {"current_steps": 24, "total_steps": 292, "loss": 0.099, "lr": 9.834239068026388e-06, "epoch": 0.1643835616438356, "percentage": 8.22, "elapsed_time": "0:01:04", "remaining_time": "0:12:02"}
25
+ {"current_steps": 25, "total_steps": 292, "loss": 0.0931, "lr": 9.82022296978548e-06, "epoch": 0.17123287671232876, "percentage": 8.56, "elapsed_time": "0:01:07", "remaining_time": "0:11:57"}
26
+ {"current_steps": 26, "total_steps": 292, "loss": 0.0875, "lr": 9.805648919361505e-06, "epoch": 0.1780821917808219, "percentage": 8.9, "elapsed_time": "0:01:09", "remaining_time": "0:11:53"}
27
+ {"current_steps": 27, "total_steps": 292, "loss": 0.1036, "lr": 9.790518603735191e-06, "epoch": 0.18493150684931506, "percentage": 9.25, "elapsed_time": "0:01:12", "remaining_time": "0:11:48"}
28
+ {"current_steps": 28, "total_steps": 292, "loss": 0.0994, "lr": 9.774833774276278e-06, "epoch": 0.1917808219178082, "percentage": 9.59, "elapsed_time": "0:01:14", "remaining_time": "0:11:44"}
29
+ {"current_steps": 29, "total_steps": 292, "loss": 0.1192, "lr": 9.758596246540782e-06, "epoch": 0.19863013698630136, "percentage": 9.93, "elapsed_time": "0:01:17", "remaining_time": "0:11:41"}
30
+ {"current_steps": 30, "total_steps": 292, "loss": 0.0928, "lr": 9.741807900060858e-06, "epoch": 0.2054794520547945, "percentage": 10.27, "elapsed_time": "0:01:19", "remaining_time": "0:11:36"}
31
+ {"current_steps": 31, "total_steps": 292, "loss": 0.1115, "lr": 9.724470678127226e-06, "epoch": 0.21232876712328766, "percentage": 10.62, "elapsed_time": "0:01:22", "remaining_time": "0:11:34"}
32
+ {"current_steps": 32, "total_steps": 292, "loss": 0.109, "lr": 9.706586587564236e-06, "epoch": 0.2191780821917808, "percentage": 10.96, "elapsed_time": "0:01:25", "remaining_time": "0:11:30"}
33
+ {"current_steps": 33, "total_steps": 292, "loss": 0.1025, "lr": 9.68815769849757e-06, "epoch": 0.22602739726027396, "percentage": 11.3, "elapsed_time": "0:01:27", "remaining_time": "0:11:27"}
34
+ {"current_steps": 34, "total_steps": 292, "loss": 0.1096, "lr": 9.669186144114627e-06, "epoch": 0.2328767123287671, "percentage": 11.64, "elapsed_time": "0:01:30", "remaining_time": "0:11:23"}
35
+ {"current_steps": 35, "total_steps": 292, "loss": 0.1092, "lr": 9.649674120417591e-06, "epoch": 0.23972602739726026, "percentage": 11.99, "elapsed_time": "0:01:32", "remaining_time": "0:11:20"}
36
+ {"current_steps": 36, "total_steps": 292, "loss": 0.1106, "lr": 9.62962388596925e-06, "epoch": 0.2465753424657534, "percentage": 12.33, "elapsed_time": "0:01:35", "remaining_time": "0:11:16"}
37
+ {"current_steps": 37, "total_steps": 292, "loss": 0.1164, "lr": 9.609037761631552e-06, "epoch": 0.2534246575342466, "percentage": 12.67, "elapsed_time": "0:01:37", "remaining_time": "0:11:13"}
38
+ {"current_steps": 38, "total_steps": 292, "loss": 0.1041, "lr": 9.587918130296969e-06, "epoch": 0.2602739726027397, "percentage": 13.01, "elapsed_time": "0:01:40", "remaining_time": "0:11:09"}
39
+ {"current_steps": 39, "total_steps": 292, "loss": 0.0961, "lr": 9.566267436612662e-06, "epoch": 0.2671232876712329, "percentage": 13.36, "elapsed_time": "0:01:42", "remaining_time": "0:11:07"}
40
+ {"current_steps": 40, "total_steps": 292, "loss": 0.1033, "lr": 9.544088186697515e-06, "epoch": 0.273972602739726, "percentage": 13.7, "elapsed_time": "0:01:45", "remaining_time": "0:11:05"}
41
+ {"current_steps": 41, "total_steps": 292, "loss": 0.0692, "lr": 9.521382947852042e-06, "epoch": 0.2808219178082192, "percentage": 14.04, "elapsed_time": "0:01:48", "remaining_time": "0:11:01"}
42
+ {"current_steps": 42, "total_steps": 292, "loss": 0.1024, "lr": 9.498154348261217e-06, "epoch": 0.2876712328767123, "percentage": 14.38, "elapsed_time": "0:01:50", "remaining_time": "0:10:59"}
43
+ {"current_steps": 43, "total_steps": 292, "loss": 0.1045, "lr": 9.474405076690257e-06, "epoch": 0.2945205479452055, "percentage": 14.73, "elapsed_time": "0:01:53", "remaining_time": "0:10:56"}
44
+ {"current_steps": 44, "total_steps": 292, "loss": 0.088, "lr": 9.450137882173385e-06, "epoch": 0.3013698630136986, "percentage": 15.07, "elapsed_time": "0:01:56", "remaining_time": "0:10:54"}
45
+ {"current_steps": 45, "total_steps": 292, "loss": 0.089, "lr": 9.425355573695628e-06, "epoch": 0.3082191780821918, "percentage": 15.41, "elapsed_time": "0:01:58", "remaining_time": "0:10:52"}
46
+ {"current_steps": 46, "total_steps": 292, "loss": 0.1076, "lr": 9.40006101986768e-06, "epoch": 0.3150684931506849, "percentage": 15.75, "elapsed_time": "0:02:01", "remaining_time": "0:10:49"}
47
+ {"current_steps": 47, "total_steps": 292, "loss": 0.0997, "lr": 9.374257148593824e-06, "epoch": 0.3219178082191781, "percentage": 16.1, "elapsed_time": "0:02:04", "remaining_time": "0:10:47"}
48
+ {"current_steps": 48, "total_steps": 292, "loss": 0.1089, "lr": 9.347946946733055e-06, "epoch": 0.3287671232876712, "percentage": 16.44, "elapsed_time": "0:02:06", "remaining_time": "0:10:44"}
49
+ {"current_steps": 49, "total_steps": 292, "loss": 0.1028, "lr": 9.321133459753322e-06, "epoch": 0.3356164383561644, "percentage": 16.78, "elapsed_time": "0:02:09", "remaining_time": "0:10:41"}
50
+ {"current_steps": 50, "total_steps": 292, "loss": 0.1166, "lr": 9.293819791379016e-06, "epoch": 0.3424657534246575, "percentage": 17.12, "elapsed_time": "0:02:11", "remaining_time": "0:10:38"}
51
+ {"current_steps": 51, "total_steps": 292, "loss": 0.1008, "lr": 9.266009103231702e-06, "epoch": 0.3493150684931507, "percentage": 17.47, "elapsed_time": "0:02:14", "remaining_time": "0:10:35"}
52
+ {"current_steps": 52, "total_steps": 292, "loss": 0.1126, "lr": 9.237704614464157e-06, "epoch": 0.3561643835616438, "percentage": 17.81, "elapsed_time": "0:02:16", "remaining_time": "0:10:31"}
53
+ {"current_steps": 53, "total_steps": 292, "loss": 0.1081, "lr": 9.208909601387748e-06, "epoch": 0.363013698630137, "percentage": 18.15, "elapsed_time": "0:02:19", "remaining_time": "0:10:28"}
54
+ {"current_steps": 54, "total_steps": 292, "loss": 0.0901, "lr": 9.179627397093184e-06, "epoch": 0.3698630136986301, "percentage": 18.49, "elapsed_time": "0:02:21", "remaining_time": "0:10:24"}
55
+ {"current_steps": 55, "total_steps": 292, "loss": 0.1017, "lr": 9.149861391064714e-06, "epoch": 0.3767123287671233, "percentage": 18.84, "elapsed_time": "0:02:24", "remaining_time": "0:10:22"}
56
+ {"current_steps": 56, "total_steps": 292, "loss": 0.1018, "lr": 9.119615028787771e-06, "epoch": 0.3835616438356164, "percentage": 19.18, "elapsed_time": "0:02:27", "remaining_time": "0:10:20"}
57
+ {"current_steps": 57, "total_steps": 292, "loss": 0.1049, "lr": 9.088891811350164e-06, "epoch": 0.3904109589041096, "percentage": 19.52, "elapsed_time": "0:02:29", "remaining_time": "0:10:17"}
58
+ {"current_steps": 58, "total_steps": 292, "loss": 0.1042, "lr": 9.057695295036806e-06, "epoch": 0.3972602739726027, "percentage": 19.86, "elapsed_time": "0:02:32", "remaining_time": "0:10:14"}
59
+ {"current_steps": 59, "total_steps": 292, "loss": 0.0888, "lr": 9.026029090918076e-06, "epoch": 0.4041095890410959, "percentage": 20.21, "elapsed_time": "0:02:35", "remaining_time": "0:10:12"}
60
+ {"current_steps": 60, "total_steps": 292, "loss": 0.1002, "lr": 8.993896864431825e-06, "epoch": 0.410958904109589, "percentage": 20.55, "elapsed_time": "0:02:37", "remaining_time": "0:10:09"}
61
+ {"current_steps": 61, "total_steps": 292, "loss": 0.0881, "lr": 8.96130233495909e-06, "epoch": 0.4178082191780822, "percentage": 20.89, "elapsed_time": "0:02:40", "remaining_time": "0:10:06"}
62
+ {"current_steps": 62, "total_steps": 292, "loss": 0.0944, "lr": 8.928249275393572e-06, "epoch": 0.4246575342465753, "percentage": 21.23, "elapsed_time": "0:02:43", "remaining_time": "0:10:04"}
63
+ {"current_steps": 63, "total_steps": 292, "loss": 0.0874, "lr": 8.894741511704911e-06, "epoch": 0.4315068493150685, "percentage": 21.58, "elapsed_time": "0:02:45", "remaining_time": "0:10:02"}
64
+ {"current_steps": 64, "total_steps": 292, "loss": 0.1053, "lr": 8.860782922495821e-06, "epoch": 0.4383561643835616, "percentage": 21.92, "elapsed_time": "0:02:48", "remaining_time": "0:10:00"}
65
+ {"current_steps": 65, "total_steps": 292, "loss": 0.1098, "lr": 8.826377438553138e-06, "epoch": 0.4452054794520548, "percentage": 22.26, "elapsed_time": "0:02:51", "remaining_time": "0:09:57"}
66
+ {"current_steps": 66, "total_steps": 292, "loss": 0.0983, "lr": 8.791529042392813e-06, "epoch": 0.4520547945205479, "percentage": 22.6, "elapsed_time": "0:02:53", "remaining_time": "0:09:54"}
67
+ {"current_steps": 67, "total_steps": 292, "loss": 0.0922, "lr": 8.756241767798934e-06, "epoch": 0.4589041095890411, "percentage": 22.95, "elapsed_time": "0:02:56", "remaining_time": "0:09:51"}
68
+ {"current_steps": 68, "total_steps": 292, "loss": 0.1009, "lr": 8.720519699356804e-06, "epoch": 0.4657534246575342, "percentage": 23.29, "elapsed_time": "0:02:58", "remaining_time": "0:09:48"}
69
+ {"current_steps": 69, "total_steps": 292, "loss": 0.1136, "lr": 8.684366971980139e-06, "epoch": 0.4726027397260274, "percentage": 23.63, "elapsed_time": "0:03:01", "remaining_time": "0:09:45"}
70
+ {"current_steps": 70, "total_steps": 292, "loss": 0.106, "lr": 8.647787770432439e-06, "epoch": 0.4794520547945205, "percentage": 23.97, "elapsed_time": "0:03:04", "remaining_time": "0:09:43"}
71
+ {"current_steps": 71, "total_steps": 292, "loss": 0.0781, "lr": 8.610786328842602e-06, "epoch": 0.4863013698630137, "percentage": 24.32, "elapsed_time": "0:03:06", "remaining_time": "0:09:41"}
72
+ {"current_steps": 72, "total_steps": 292, "loss": 0.0953, "lr": 8.573366930214807e-06, "epoch": 0.4931506849315068, "percentage": 24.66, "elapsed_time": "0:03:09", "remaining_time": "0:09:38"}
73
+ {"current_steps": 73, "total_steps": 292, "loss": 0.0977, "lr": 8.535533905932739e-06, "epoch": 0.5, "percentage": 25.0, "elapsed_time": "0:03:11", "remaining_time": "0:09:35"}
74
+ {"current_steps": 74, "total_steps": 292, "loss": 0.0986, "lr": 8.497291635258235e-06, "epoch": 0.5068493150684932, "percentage": 25.34, "elapsed_time": "0:03:14", "remaining_time": "0:09:33"}
75
+ {"current_steps": 75, "total_steps": 292, "loss": 0.1066, "lr": 8.458644544824371e-06, "epoch": 0.5136986301369864, "percentage": 25.68, "elapsed_time": "0:03:16", "remaining_time": "0:09:29"}
76
+ {"current_steps": 76, "total_steps": 292, "loss": 0.0778, "lr": 8.419597108123054e-06, "epoch": 0.5205479452054794, "percentage": 26.03, "elapsed_time": "0:03:19", "remaining_time": "0:09:27"}
77
+ {"current_steps": 77, "total_steps": 292, "loss": 0.0885, "lr": 8.380153844987225e-06, "epoch": 0.5273972602739726, "percentage": 26.37, "elapsed_time": "0:03:22", "remaining_time": "0:09:24"}
78
+ {"current_steps": 78, "total_steps": 292, "loss": 0.0784, "lr": 8.340319321067668e-06, "epoch": 0.5342465753424658, "percentage": 26.71, "elapsed_time": "0:03:24", "remaining_time": "0:09:22"}
79
+ {"current_steps": 79, "total_steps": 292, "loss": 0.1051, "lr": 8.300098147304523e-06, "epoch": 0.541095890410959, "percentage": 27.05, "elapsed_time": "0:03:27", "remaining_time": "0:09:19"}
80
+ {"current_steps": 80, "total_steps": 292, "loss": 0.1233, "lr": 8.259494979393563e-06, "epoch": 0.547945205479452, "percentage": 27.4, "elapsed_time": "0:03:30", "remaining_time": "0:09:16"}
81
+ {"current_steps": 81, "total_steps": 292, "loss": 0.1009, "lr": 8.218514517247287e-06, "epoch": 0.5547945205479452, "percentage": 27.74, "elapsed_time": "0:03:32", "remaining_time": "0:09:14"}
82
+ {"current_steps": 82, "total_steps": 292, "loss": 0.0909, "lr": 8.177161504450887e-06, "epoch": 0.5616438356164384, "percentage": 28.08, "elapsed_time": "0:03:35", "remaining_time": "0:09:11"}
83
+ {"current_steps": 83, "total_steps": 292, "loss": 0.0965, "lr": 8.135440727713179e-06, "epoch": 0.5684931506849316, "percentage": 28.42, "elapsed_time": "0:03:37", "remaining_time": "0:09:07"}
84
+ {"current_steps": 84, "total_steps": 292, "loss": 0.1382, "lr": 8.093357016312518e-06, "epoch": 0.5753424657534246, "percentage": 28.77, "elapsed_time": "0:03:40", "remaining_time": "0:09:04"}
85
+ {"current_steps": 85, "total_steps": 292, "loss": 0.111, "lr": 8.050915241537802e-06, "epoch": 0.5821917808219178, "percentage": 29.11, "elapsed_time": "0:03:42", "remaining_time": "0:09:01"}
86
+ {"current_steps": 86, "total_steps": 292, "loss": 0.0903, "lr": 8.008120316124612e-06, "epoch": 0.589041095890411, "percentage": 29.45, "elapsed_time": "0:03:45", "remaining_time": "0:08:59"}
87
+ {"current_steps": 87, "total_steps": 292, "loss": 0.0889, "lr": 7.964977193686551e-06, "epoch": 0.5958904109589042, "percentage": 29.79, "elapsed_time": "0:03:47", "remaining_time": "0:08:56"}
88
+ {"current_steps": 88, "total_steps": 292, "loss": 0.1025, "lr": 7.921490868141843e-06, "epoch": 0.6027397260273972, "percentage": 30.14, "elapsed_time": "0:03:50", "remaining_time": "0:08:53"}
89
+ {"current_steps": 89, "total_steps": 292, "loss": 0.0872, "lr": 7.877666373135287e-06, "epoch": 0.6095890410958904, "percentage": 30.48, "elapsed_time": "0:03:52", "remaining_time": "0:08:50"}
90
+ {"current_steps": 90, "total_steps": 292, "loss": 0.0976, "lr": 7.83350878145559e-06, "epoch": 0.6164383561643836, "percentage": 30.82, "elapsed_time": "0:03:55", "remaining_time": "0:08:48"}
91
+ {"current_steps": 91, "total_steps": 292, "loss": 0.0829, "lr": 7.789023204448189e-06, "epoch": 0.6232876712328768, "percentage": 31.16, "elapsed_time": "0:03:58", "remaining_time": "0:08:46"}
92
+ {"current_steps": 92, "total_steps": 292, "loss": 0.0862, "lr": 7.744214791423597e-06, "epoch": 0.6301369863013698, "percentage": 31.51, "elapsed_time": "0:04:01", "remaining_time": "0:08:44"}
93
+ {"current_steps": 93, "total_steps": 292, "loss": 0.1227, "lr": 7.699088729061355e-06, "epoch": 0.636986301369863, "percentage": 31.85, "elapsed_time": "0:04:03", "remaining_time": "0:08:41"}
94
+ {"current_steps": 94, "total_steps": 292, "loss": 0.0939, "lr": 7.653650240809667e-06, "epoch": 0.6438356164383562, "percentage": 32.19, "elapsed_time": "0:04:06", "remaining_time": "0:08:38"}
95
+ {"current_steps": 95, "total_steps": 292, "loss": 0.0957, "lr": 7.60790458628077e-06, "epoch": 0.6506849315068494, "percentage": 32.53, "elapsed_time": "0:04:08", "remaining_time": "0:08:36"}
96
+ {"current_steps": 96, "total_steps": 292, "loss": 0.0975, "lr": 7.56185706064212e-06, "epoch": 0.6575342465753424, "percentage": 32.88, "elapsed_time": "0:04:11", "remaining_time": "0:08:33"}
97
+ {"current_steps": 97, "total_steps": 292, "loss": 0.0857, "lr": 7.5155129940034675e-06, "epoch": 0.6643835616438356, "percentage": 33.22, "elapsed_time": "0:04:14", "remaining_time": "0:08:31"}
98
+ {"current_steps": 98, "total_steps": 292, "loss": 0.0995, "lr": 7.468877750799887e-06, "epoch": 0.6712328767123288, "percentage": 33.56, "elapsed_time": "0:04:17", "remaining_time": "0:08:29"}
99
+ {"current_steps": 99, "total_steps": 292, "loss": 0.0779, "lr": 7.421956729170823e-06, "epoch": 0.678082191780822, "percentage": 33.9, "elapsed_time": "0:04:19", "remaining_time": "0:08:26"}
100
+ {"current_steps": 100, "total_steps": 292, "loss": 0.0908, "lr": 7.374755360335253e-06, "epoch": 0.684931506849315, "percentage": 34.25, "elapsed_time": "0:04:22", "remaining_time": "0:08:23"}
101
+ {"current_steps": 101, "total_steps": 292, "loss": 0.094, "lr": 7.327279107962995e-06, "epoch": 0.6917808219178082, "percentage": 34.59, "elapsed_time": "0:04:24", "remaining_time": "0:08:20"}
102
+ {"current_steps": 102, "total_steps": 292, "loss": 0.1122, "lr": 7.279533467542295e-06, "epoch": 0.6986301369863014, "percentage": 34.93, "elapsed_time": "0:04:27", "remaining_time": "0:08:17"}
103
+ {"current_steps": 103, "total_steps": 292, "loss": 0.094, "lr": 7.2315239657436955e-06, "epoch": 0.7054794520547946, "percentage": 35.27, "elapsed_time": "0:04:29", "remaining_time": "0:08:15"}
104
+ {"current_steps": 104, "total_steps": 292, "loss": 0.1022, "lr": 7.183256159780321e-06, "epoch": 0.7123287671232876, "percentage": 35.62, "elapsed_time": "0:04:32", "remaining_time": "0:08:12"}
105
+ {"current_steps": 105, "total_steps": 292, "loss": 0.0998, "lr": 7.134735636764606e-06, "epoch": 0.7191780821917808, "percentage": 35.96, "elapsed_time": "0:04:35", "remaining_time": "0:08:11"}
106
+ {"current_steps": 106, "total_steps": 292, "loss": 0.1122, "lr": 7.085968013061585e-06, "epoch": 0.726027397260274, "percentage": 36.3, "elapsed_time": "0:04:38", "remaining_time": "0:08:08"}
107
+ {"current_steps": 107, "total_steps": 292, "loss": 0.074, "lr": 7.036958933638779e-06, "epoch": 0.7328767123287672, "percentage": 36.64, "elapsed_time": "0:04:41", "remaining_time": "0:08:05"}
108
+ {"current_steps": 108, "total_steps": 292, "loss": 0.1103, "lr": 6.987714071412781e-06, "epoch": 0.7397260273972602, "percentage": 36.99, "elapsed_time": "0:04:43", "remaining_time": "0:08:02"}
109
+ {"current_steps": 109, "total_steps": 292, "loss": 0.1014, "lr": 6.938239126592592e-06, "epoch": 0.7465753424657534, "percentage": 37.33, "elapsed_time": "0:04:46", "remaining_time": "0:08:00"}
110
+ {"current_steps": 110, "total_steps": 292, "loss": 0.0908, "lr": 6.888539826019824e-06, "epoch": 0.7534246575342466, "percentage": 37.67, "elapsed_time": "0:04:48", "remaining_time": "0:07:57"}
111
+ {"current_steps": 111, "total_steps": 292, "loss": 0.0973, "lr": 6.8386219225057945e-06, "epoch": 0.7602739726027398, "percentage": 38.01, "elapsed_time": "0:04:51", "remaining_time": "0:07:54"}
112
+ {"current_steps": 112, "total_steps": 292, "loss": 0.0983, "lr": 6.788491194165629e-06, "epoch": 0.7671232876712328, "percentage": 38.36, "elapsed_time": "0:04:53", "remaining_time": "0:07:52"}
113
+ {"current_steps": 113, "total_steps": 292, "loss": 0.101, "lr": 6.738153443749421e-06, "epoch": 0.773972602739726, "percentage": 38.7, "elapsed_time": "0:04:56", "remaining_time": "0:07:49"}
114
+ {"current_steps": 114, "total_steps": 292, "loss": 0.0949, "lr": 6.687614497970567e-06, "epoch": 0.7808219178082192, "percentage": 39.04, "elapsed_time": "0:04:59", "remaining_time": "0:07:47"}
115
+ {"current_steps": 115, "total_steps": 292, "loss": 0.1011, "lr": 6.636880206831298e-06, "epoch": 0.7876712328767124, "percentage": 39.38, "elapsed_time": "0:05:01", "remaining_time": "0:07:44"}
116
+ {"current_steps": 116, "total_steps": 292, "loss": 0.0872, "lr": 6.585956442945531e-06, "epoch": 0.7945205479452054, "percentage": 39.73, "elapsed_time": "0:05:04", "remaining_time": "0:07:41"}
117
+ {"current_steps": 117, "total_steps": 292, "loss": 0.0859, "lr": 6.534849100859101e-06, "epoch": 0.8013698630136986, "percentage": 40.07, "elapsed_time": "0:05:07", "remaining_time": "0:07:40"}
118
+ {"current_steps": 118, "total_steps": 292, "loss": 0.1142, "lr": 6.483564096367452e-06, "epoch": 0.8082191780821918, "percentage": 40.41, "elapsed_time": "0:05:10", "remaining_time": "0:07:37"}
119
+ {"current_steps": 119, "total_steps": 292, "loss": 0.1047, "lr": 6.432107365830872e-06, "epoch": 0.815068493150685, "percentage": 40.75, "elapsed_time": "0:05:12", "remaining_time": "0:07:34"}
120
+ {"current_steps": 120, "total_steps": 292, "loss": 0.1174, "lr": 6.380484865487346e-06, "epoch": 0.821917808219178, "percentage": 41.1, "elapsed_time": "0:05:15", "remaining_time": "0:07:32"}
121
+ {"current_steps": 121, "total_steps": 292, "loss": 0.1014, "lr": 6.328702570763098e-06, "epoch": 0.8287671232876712, "percentage": 41.44, "elapsed_time": "0:05:17", "remaining_time": "0:07:29"}
122
+ {"current_steps": 122, "total_steps": 292, "loss": 0.0888, "lr": 6.276766475580935e-06, "epoch": 0.8356164383561644, "percentage": 41.78, "elapsed_time": "0:05:20", "remaining_time": "0:07:26"}
123
+ {"current_steps": 123, "total_steps": 292, "loss": 0.1194, "lr": 6.224682591666431e-06, "epoch": 0.8424657534246576, "percentage": 42.12, "elapsed_time": "0:05:22", "remaining_time": "0:07:23"}
124
+ {"current_steps": 124, "total_steps": 292, "loss": 0.0766, "lr": 6.1724569478520495e-06, "epoch": 0.8493150684931506, "percentage": 42.47, "elapsed_time": "0:05:26", "remaining_time": "0:07:21"}
125
+ {"current_steps": 125, "total_steps": 292, "loss": 0.0999, "lr": 6.120095589379299e-06, "epoch": 0.8561643835616438, "percentage": 42.81, "elapsed_time": "0:05:28", "remaining_time": "0:07:18"}
126
+ {"current_steps": 126, "total_steps": 292, "loss": 0.1026, "lr": 6.067604577198981e-06, "epoch": 0.863013698630137, "percentage": 43.15, "elapsed_time": "0:05:31", "remaining_time": "0:07:17"}
127
+ {"current_steps": 127, "total_steps": 292, "loss": 0.1177, "lr": 6.014989987269617e-06, "epoch": 0.8698630136986302, "percentage": 43.49, "elapsed_time": "0:05:34", "remaining_time": "0:07:14"}
128
+ {"current_steps": 128, "total_steps": 292, "loss": 0.0841, "lr": 5.96225790985415e-06, "epoch": 0.8767123287671232, "percentage": 43.84, "elapsed_time": "0:05:36", "remaining_time": "0:07:11"}
129
+ {"current_steps": 129, "total_steps": 292, "loss": 0.0668, "lr": 5.909414448814971e-06, "epoch": 0.8835616438356164, "percentage": 44.18, "elapsed_time": "0:05:39", "remaining_time": "0:07:08"}
130
+ {"current_steps": 130, "total_steps": 292, "loss": 0.1175, "lr": 5.856465720907388e-06, "epoch": 0.8904109589041096, "percentage": 44.52, "elapsed_time": "0:05:41", "remaining_time": "0:07:05"}
131
+ {"current_steps": 131, "total_steps": 292, "loss": 0.1083, "lr": 5.803417855071603e-06, "epoch": 0.8972602739726028, "percentage": 44.86, "elapsed_time": "0:05:44", "remaining_time": "0:07:03"}
132
+ {"current_steps": 132, "total_steps": 292, "loss": 0.1048, "lr": 5.7502769917232635e-06, "epoch": 0.9041095890410958, "percentage": 45.21, "elapsed_time": "0:05:46", "remaining_time": "0:07:00"}
133
+ {"current_steps": 133, "total_steps": 292, "loss": 0.0984, "lr": 5.6970492820426994e-06, "epoch": 0.910958904109589, "percentage": 45.55, "elapsed_time": "0:05:49", "remaining_time": "0:06:57"}
134
+ {"current_steps": 134, "total_steps": 292, "loss": 0.0909, "lr": 5.643740887262905e-06, "epoch": 0.9178082191780822, "percentage": 45.89, "elapsed_time": "0:05:52", "remaining_time": "0:06:55"}
135
+ {"current_steps": 135, "total_steps": 292, "loss": 0.1065, "lr": 5.59035797795637e-06, "epoch": 0.9246575342465754, "percentage": 46.23, "elapsed_time": "0:05:54", "remaining_time": "0:06:52"}
136
+ {"current_steps": 136, "total_steps": 292, "loss": 0.0923, "lr": 5.536906733320816e-06, "epoch": 0.9315068493150684, "percentage": 46.58, "elapsed_time": "0:05:57", "remaining_time": "0:06:49"}
137
+ {"current_steps": 137, "total_steps": 292, "loss": 0.0903, "lr": 5.483393340463938e-06, "epoch": 0.9383561643835616, "percentage": 46.92, "elapsed_time": "0:05:59", "remaining_time": "0:06:47"}
138
+ {"current_steps": 138, "total_steps": 292, "loss": 0.1001, "lr": 5.429823993687234e-06, "epoch": 0.9452054794520548, "percentage": 47.26, "elapsed_time": "0:06:02", "remaining_time": "0:06:44"}
139
+ {"current_steps": 139, "total_steps": 292, "loss": 0.0828, "lr": 5.376204893769e-06, "epoch": 0.952054794520548, "percentage": 47.6, "elapsed_time": "0:06:04", "remaining_time": "0:06:41"}
140
+ {"current_steps": 140, "total_steps": 292, "loss": 0.0895, "lr": 5.322542247246583e-06, "epoch": 0.958904109589041, "percentage": 47.95, "elapsed_time": "0:06:07", "remaining_time": "0:06:38"}
141
+ {"current_steps": 141, "total_steps": 292, "loss": 0.0877, "lr": 5.26884226569794e-06, "epoch": 0.9657534246575342, "percentage": 48.29, "elapsed_time": "0:06:10", "remaining_time": "0:06:36"}
142
+ {"current_steps": 142, "total_steps": 292, "loss": 0.0935, "lr": 5.215111165022653e-06, "epoch": 0.9726027397260274, "percentage": 48.63, "elapsed_time": "0:06:12", "remaining_time": "0:06:33"}
143
+ {"current_steps": 143, "total_steps": 292, "loss": 0.105, "lr": 5.161355164722416e-06, "epoch": 0.9794520547945206, "percentage": 48.97, "elapsed_time": "0:06:15", "remaining_time": "0:06:31"}
144
+ {"current_steps": 144, "total_steps": 292, "loss": 0.0811, "lr": 5.107580487181112e-06, "epoch": 0.9863013698630136, "percentage": 49.32, "elapsed_time": "0:06:18", "remaining_time": "0:06:28"}
145
+ {"current_steps": 145, "total_steps": 292, "loss": 0.0776, "lr": 5.0537933569445585e-06, "epoch": 0.9931506849315068, "percentage": 49.66, "elapsed_time": "0:06:21", "remaining_time": "0:06:26"}
146
+ {"current_steps": 146, "total_steps": 292, "loss": 0.0813, "lr": 5e-06, "epoch": 1.0, "percentage": 50.0, "elapsed_time": "0:06:23", "remaining_time": "0:06:23"}
147
+ {"current_steps": 147, "total_steps": 292, "loss": 0.035, "lr": 4.946206643055443e-06, "epoch": 1.0068493150684932, "percentage": 50.34, "elapsed_time": "0:06:26", "remaining_time": "0:06:21"}
148
+ {"current_steps": 148, "total_steps": 292, "loss": 0.0242, "lr": 4.89241951281889e-06, "epoch": 1.0136986301369864, "percentage": 50.68, "elapsed_time": "0:06:29", "remaining_time": "0:06:18"}
149
+ {"current_steps": 149, "total_steps": 292, "loss": 0.0393, "lr": 4.838644835277585e-06, "epoch": 1.0205479452054795, "percentage": 51.03, "elapsed_time": "0:06:31", "remaining_time": "0:06:15"}
150
+ {"current_steps": 150, "total_steps": 292, "loss": 0.0299, "lr": 4.784888834977347e-06, "epoch": 1.0273972602739727, "percentage": 51.37, "elapsed_time": "0:06:34", "remaining_time": "0:06:13"}
151
+ {"current_steps": 151, "total_steps": 292, "loss": 0.0408, "lr": 4.731157734302063e-06, "epoch": 1.0342465753424657, "percentage": 51.71, "elapsed_time": "0:06:36", "remaining_time": "0:06:10"}
152
+ {"current_steps": 152, "total_steps": 292, "loss": 0.0294, "lr": 4.6774577527534195e-06, "epoch": 1.0410958904109588, "percentage": 52.05, "elapsed_time": "0:06:39", "remaining_time": "0:06:08"}
153
+ {"current_steps": 153, "total_steps": 292, "loss": 0.0402, "lr": 4.623795106231001e-06, "epoch": 1.047945205479452, "percentage": 52.4, "elapsed_time": "0:06:42", "remaining_time": "0:06:05"}
154
+ {"current_steps": 154, "total_steps": 292, "loss": 0.0306, "lr": 4.570176006312769e-06, "epoch": 1.0547945205479452, "percentage": 52.74, "elapsed_time": "0:06:44", "remaining_time": "0:06:02"}
155
+ {"current_steps": 155, "total_steps": 292, "loss": 0.0325, "lr": 4.516606659536063e-06, "epoch": 1.0616438356164384, "percentage": 53.08, "elapsed_time": "0:06:47", "remaining_time": "0:06:00"}
156
+ {"current_steps": 156, "total_steps": 292, "loss": 0.041, "lr": 4.463093266679185e-06, "epoch": 1.0684931506849316, "percentage": 53.42, "elapsed_time": "0:06:50", "remaining_time": "0:05:57"}
157
+ {"current_steps": 157, "total_steps": 292, "loss": 0.0278, "lr": 4.40964202204363e-06, "epoch": 1.0753424657534247, "percentage": 53.77, "elapsed_time": "0:06:53", "remaining_time": "0:05:55"}
158
+ {"current_steps": 158, "total_steps": 292, "loss": 0.032, "lr": 4.356259112737096e-06, "epoch": 1.0821917808219177, "percentage": 54.11, "elapsed_time": "0:06:55", "remaining_time": "0:05:52"}
159
+ {"current_steps": 159, "total_steps": 292, "loss": 0.0281, "lr": 4.302950717957304e-06, "epoch": 1.0890410958904109, "percentage": 54.45, "elapsed_time": "0:06:58", "remaining_time": "0:05:50"}
160
+ {"current_steps": 160, "total_steps": 292, "loss": 0.0263, "lr": 4.249723008276737e-06, "epoch": 1.095890410958904, "percentage": 54.79, "elapsed_time": "0:07:00", "remaining_time": "0:05:47"}
161
+ {"current_steps": 161, "total_steps": 292, "loss": 0.0326, "lr": 4.196582144928398e-06, "epoch": 1.1027397260273972, "percentage": 55.14, "elapsed_time": "0:07:03", "remaining_time": "0:05:44"}
162
+ {"current_steps": 162, "total_steps": 292, "loss": 0.0292, "lr": 4.143534279092613e-06, "epoch": 1.1095890410958904, "percentage": 55.48, "elapsed_time": "0:07:05", "remaining_time": "0:05:41"}
163
+ {"current_steps": 163, "total_steps": 292, "loss": 0.0408, "lr": 4.090585551185031e-06, "epoch": 1.1164383561643836, "percentage": 55.82, "elapsed_time": "0:07:08", "remaining_time": "0:05:39"}
164
+ {"current_steps": 164, "total_steps": 292, "loss": 0.0261, "lr": 4.037742090145851e-06, "epoch": 1.1232876712328768, "percentage": 56.16, "elapsed_time": "0:07:10", "remaining_time": "0:05:36"}
165
+ {"current_steps": 165, "total_steps": 292, "loss": 0.023, "lr": 3.985010012730382e-06, "epoch": 1.13013698630137, "percentage": 56.51, "elapsed_time": "0:07:13", "remaining_time": "0:05:33"}
166
+ {"current_steps": 166, "total_steps": 292, "loss": 0.0289, "lr": 3.93239542280102e-06, "epoch": 1.1369863013698631, "percentage": 56.85, "elapsed_time": "0:07:16", "remaining_time": "0:05:31"}
167
+ {"current_steps": 167, "total_steps": 292, "loss": 0.0254, "lr": 3.879904410620703e-06, "epoch": 1.143835616438356, "percentage": 57.19, "elapsed_time": "0:07:18", "remaining_time": "0:05:28"}
168
+ {"current_steps": 168, "total_steps": 292, "loss": 0.0401, "lr": 3.827543052147952e-06, "epoch": 1.1506849315068493, "percentage": 57.53, "elapsed_time": "0:07:21", "remaining_time": "0:05:25"}
169
+ {"current_steps": 169, "total_steps": 292, "loss": 0.041, "lr": 3.775317408333571e-06, "epoch": 1.1575342465753424, "percentage": 57.88, "elapsed_time": "0:07:24", "remaining_time": "0:05:23"}
170
+ {"current_steps": 170, "total_steps": 292, "loss": 0.0304, "lr": 3.7232335244190656e-06, "epoch": 1.1643835616438356, "percentage": 58.22, "elapsed_time": "0:07:27", "remaining_time": "0:05:20"}
171
+ {"current_steps": 171, "total_steps": 292, "loss": 0.026, "lr": 3.6712974292369035e-06, "epoch": 1.1712328767123288, "percentage": 58.56, "elapsed_time": "0:07:29", "remaining_time": "0:05:18"}
172
+ {"current_steps": 172, "total_steps": 292, "loss": 0.0248, "lr": 3.6195151345126556e-06, "epoch": 1.178082191780822, "percentage": 58.9, "elapsed_time": "0:07:32", "remaining_time": "0:05:15"}
173
+ {"current_steps": 173, "total_steps": 292, "loss": 0.0327, "lr": 3.5678926341691283e-06, "epoch": 1.1849315068493151, "percentage": 59.25, "elapsed_time": "0:07:34", "remaining_time": "0:05:12"}
174
+ {"current_steps": 174, "total_steps": 292, "loss": 0.0255, "lr": 3.5164359036325483e-06, "epoch": 1.191780821917808, "percentage": 59.59, "elapsed_time": "0:07:37", "remaining_time": "0:05:10"}
175
+ {"current_steps": 175, "total_steps": 292, "loss": 0.0273, "lr": 3.4651508991409016e-06, "epoch": 1.1986301369863013, "percentage": 59.93, "elapsed_time": "0:07:39", "remaining_time": "0:05:07"}
176
+ {"current_steps": 176, "total_steps": 292, "loss": 0.0334, "lr": 3.4140435570544708e-06, "epoch": 1.2054794520547945, "percentage": 60.27, "elapsed_time": "0:07:42", "remaining_time": "0:05:04"}
177
+ {"current_steps": 177, "total_steps": 292, "loss": 0.0258, "lr": 3.363119793168704e-06, "epoch": 1.2123287671232876, "percentage": 60.62, "elapsed_time": "0:07:45", "remaining_time": "0:05:02"}
178
+ {"current_steps": 178, "total_steps": 292, "loss": 0.0336, "lr": 3.3123855020294344e-06, "epoch": 1.2191780821917808, "percentage": 60.96, "elapsed_time": "0:07:48", "remaining_time": "0:04:59"}
179
+ {"current_steps": 179, "total_steps": 292, "loss": 0.0241, "lr": 3.26184655625058e-06, "epoch": 1.226027397260274, "percentage": 61.3, "elapsed_time": "0:07:50", "remaining_time": "0:04:57"}
180
+ {"current_steps": 180, "total_steps": 292, "loss": 0.0198, "lr": 3.2115088058343725e-06, "epoch": 1.2328767123287672, "percentage": 61.64, "elapsed_time": "0:07:53", "remaining_time": "0:04:54"}
181
+ {"current_steps": 181, "total_steps": 292, "loss": 0.0203, "lr": 3.161378077494205e-06, "epoch": 1.2397260273972603, "percentage": 61.99, "elapsed_time": "0:07:55", "remaining_time": "0:04:51"}
182
+ {"current_steps": 182, "total_steps": 292, "loss": 0.0247, "lr": 3.111460173980175e-06, "epoch": 1.2465753424657535, "percentage": 62.33, "elapsed_time": "0:07:58", "remaining_time": "0:04:49"}
183
+ {"current_steps": 183, "total_steps": 292, "loss": 0.0212, "lr": 3.06176087340741e-06, "epoch": 1.2534246575342465, "percentage": 62.67, "elapsed_time": "0:08:01", "remaining_time": "0:04:46"}
184
+ {"current_steps": 184, "total_steps": 292, "loss": 0.028, "lr": 3.0122859285872214e-06, "epoch": 1.2602739726027397, "percentage": 63.01, "elapsed_time": "0:08:03", "remaining_time": "0:04:44"}
185
+ {"current_steps": 185, "total_steps": 292, "loss": 0.0272, "lr": 2.9630410663612226e-06, "epoch": 1.2671232876712328, "percentage": 63.36, "elapsed_time": "0:08:06", "remaining_time": "0:04:41"}
186
+ {"current_steps": 186, "total_steps": 292, "loss": 0.0301, "lr": 2.914031986938417e-06, "epoch": 1.273972602739726, "percentage": 63.7, "elapsed_time": "0:08:09", "remaining_time": "0:04:38"}
187
+ {"current_steps": 187, "total_steps": 292, "loss": 0.028, "lr": 2.865264363235396e-06, "epoch": 1.2808219178082192, "percentage": 64.04, "elapsed_time": "0:08:11", "remaining_time": "0:04:36"}
188
+ {"current_steps": 188, "total_steps": 292, "loss": 0.0287, "lr": 2.816743840219681e-06, "epoch": 1.2876712328767124, "percentage": 64.38, "elapsed_time": "0:08:14", "remaining_time": "0:04:33"}
189
+ {"current_steps": 189, "total_steps": 292, "loss": 0.0266, "lr": 2.7684760342563045e-06, "epoch": 1.2945205479452055, "percentage": 64.73, "elapsed_time": "0:08:17", "remaining_time": "0:04:30"}
190
+ {"current_steps": 190, "total_steps": 292, "loss": 0.0221, "lr": 2.720466532457707e-06, "epoch": 1.3013698630136985, "percentage": 65.07, "elapsed_time": "0:08:19", "remaining_time": "0:04:28"}
191
+ {"current_steps": 191, "total_steps": 292, "loss": 0.0284, "lr": 2.6727208920370063e-06, "epoch": 1.308219178082192, "percentage": 65.41, "elapsed_time": "0:08:22", "remaining_time": "0:04:25"}
192
+ {"current_steps": 192, "total_steps": 292, "loss": 0.026, "lr": 2.6252446396647503e-06, "epoch": 1.3150684931506849, "percentage": 65.75, "elapsed_time": "0:08:24", "remaining_time": "0:04:22"}
193
+ {"current_steps": 193, "total_steps": 292, "loss": 0.0246, "lr": 2.578043270829178e-06, "epoch": 1.321917808219178, "percentage": 66.1, "elapsed_time": "0:08:27", "remaining_time": "0:04:20"}
194
+ {"current_steps": 194, "total_steps": 292, "loss": 0.03, "lr": 2.531122249200114e-06, "epoch": 1.3287671232876712, "percentage": 66.44, "elapsed_time": "0:08:29", "remaining_time": "0:04:17"}
195
+ {"current_steps": 195, "total_steps": 292, "loss": 0.0262, "lr": 2.4844870059965337e-06, "epoch": 1.3356164383561644, "percentage": 66.78, "elapsed_time": "0:08:32", "remaining_time": "0:04:14"}
196
+ {"current_steps": 196, "total_steps": 292, "loss": 0.0307, "lr": 2.438142939357882e-06, "epoch": 1.3424657534246576, "percentage": 67.12, "elapsed_time": "0:08:34", "remaining_time": "0:04:12"}
197
+ {"current_steps": 197, "total_steps": 292, "loss": 0.0316, "lr": 2.392095413719231e-06, "epoch": 1.3493150684931507, "percentage": 67.47, "elapsed_time": "0:08:37", "remaining_time": "0:04:09"}
198
+ {"current_steps": 198, "total_steps": 292, "loss": 0.0326, "lr": 2.346349759190332e-06, "epoch": 1.356164383561644, "percentage": 67.81, "elapsed_time": "0:08:40", "remaining_time": "0:04:06"}
199
+ {"current_steps": 199, "total_steps": 292, "loss": 0.0287, "lr": 2.3009112709386454e-06, "epoch": 1.3630136986301369, "percentage": 68.15, "elapsed_time": "0:08:43", "remaining_time": "0:04:04"}
200
+ {"current_steps": 200, "total_steps": 292, "loss": 0.0233, "lr": 2.2557852085764053e-06, "epoch": 1.36986301369863, "percentage": 68.49, "elapsed_time": "0:08:45", "remaining_time": "0:04:01"}
201
+ {"current_steps": 200, "total_steps": 292, "eval_loss": 0.12782636284828186, "epoch": 1.36986301369863, "percentage": 68.49, "elapsed_time": "0:08:49", "remaining_time": "0:04:03"}
202
+ {"current_steps": 201, "total_steps": 292, "loss": 0.0205, "lr": 2.2109767955518135e-06, "epoch": 1.3767123287671232, "percentage": 68.84, "elapsed_time": "0:08:51", "remaining_time": "0:04:00"}
203
+ {"current_steps": 202, "total_steps": 292, "loss": 0.0234, "lr": 2.1664912185444127e-06, "epoch": 1.3835616438356164, "percentage": 69.18, "elapsed_time": "0:08:54", "remaining_time": "0:03:58"}
204
+ {"current_steps": 203, "total_steps": 292, "loss": 0.0249, "lr": 2.1223336268647154e-06, "epoch": 1.3904109589041096, "percentage": 69.52, "elapsed_time": "0:08:57", "remaining_time": "0:03:55"}
205
+ {"current_steps": 204, "total_steps": 292, "loss": 0.0224, "lr": 2.0785091318581577e-06, "epoch": 1.3972602739726028, "percentage": 69.86, "elapsed_time": "0:08:59", "remaining_time": "0:03:52"}
206
+ {"current_steps": 205, "total_steps": 292, "loss": 0.0302, "lr": 2.035022806313449e-06, "epoch": 1.404109589041096, "percentage": 70.21, "elapsed_time": "0:09:02", "remaining_time": "0:03:50"}
207
+ {"current_steps": 206, "total_steps": 292, "loss": 0.0273, "lr": 1.991879683875386e-06, "epoch": 1.410958904109589, "percentage": 70.55, "elapsed_time": "0:09:04", "remaining_time": "0:03:47"}
208
+ {"current_steps": 207, "total_steps": 292, "loss": 0.022, "lr": 1.9490847584621993e-06, "epoch": 1.4178082191780823, "percentage": 70.89, "elapsed_time": "0:09:07", "remaining_time": "0:03:44"}
209
+ {"current_steps": 208, "total_steps": 292, "loss": 0.0216, "lr": 1.9066429836874844e-06, "epoch": 1.4246575342465753, "percentage": 71.23, "elapsed_time": "0:09:09", "remaining_time": "0:03:42"}
210
+ {"current_steps": 209, "total_steps": 292, "loss": 0.032, "lr": 1.8645592722868223e-06, "epoch": 1.4315068493150684, "percentage": 71.58, "elapsed_time": "0:09:12", "remaining_time": "0:03:39"}
211
+ {"current_steps": 210, "total_steps": 292, "loss": 0.0301, "lr": 1.8228384955491136e-06, "epoch": 1.4383561643835616, "percentage": 71.92, "elapsed_time": "0:09:14", "remaining_time": "0:03:36"}
212
+ {"current_steps": 211, "total_steps": 292, "loss": 0.0236, "lr": 1.7814854827527144e-06, "epoch": 1.4452054794520548, "percentage": 72.26, "elapsed_time": "0:09:17", "remaining_time": "0:03:34"}
213
+ {"current_steps": 212, "total_steps": 292, "loss": 0.0309, "lr": 1.7405050206064372e-06, "epoch": 1.452054794520548, "percentage": 72.6, "elapsed_time": "0:09:19", "remaining_time": "0:03:31"}
214
+ {"current_steps": 213, "total_steps": 292, "loss": 0.0187, "lr": 1.6999018526954775e-06, "epoch": 1.4589041095890412, "percentage": 72.95, "elapsed_time": "0:09:23", "remaining_time": "0:03:28"}
215
+ {"current_steps": 214, "total_steps": 292, "loss": 0.0241, "lr": 1.6596806789323317e-06, "epoch": 1.4657534246575343, "percentage": 73.29, "elapsed_time": "0:09:25", "remaining_time": "0:03:26"}
216
+ {"current_steps": 215, "total_steps": 292, "loss": 0.0239, "lr": 1.6198461550127758e-06, "epoch": 1.4726027397260273, "percentage": 73.63, "elapsed_time": "0:09:28", "remaining_time": "0:03:23"}
217
+ {"current_steps": 216, "total_steps": 292, "loss": 0.0231, "lr": 1.5804028918769488e-06, "epoch": 1.4794520547945205, "percentage": 73.97, "elapsed_time": "0:09:31", "remaining_time": "0:03:21"}
218
+ {"current_steps": 217, "total_steps": 292, "loss": 0.0282, "lr": 1.5413554551756321e-06, "epoch": 1.4863013698630136, "percentage": 74.32, "elapsed_time": "0:09:33", "remaining_time": "0:03:18"}
219
+ {"current_steps": 218, "total_steps": 292, "loss": 0.0292, "lr": 1.5027083647417657e-06, "epoch": 1.4931506849315068, "percentage": 74.66, "elapsed_time": "0:09:36", "remaining_time": "0:03:15"}
220
+ {"current_steps": 219, "total_steps": 292, "loss": 0.0277, "lr": 1.4644660940672628e-06, "epoch": 1.5, "percentage": 75.0, "elapsed_time": "0:09:38", "remaining_time": "0:03:12"}
221
+ {"current_steps": 220, "total_steps": 292, "loss": 0.0232, "lr": 1.4266330697851955e-06, "epoch": 1.5068493150684932, "percentage": 75.34, "elapsed_time": "0:09:41", "remaining_time": "0:03:10"}
222
+ {"current_steps": 221, "total_steps": 292, "loss": 0.0187, "lr": 1.3892136711573983e-06, "epoch": 1.5136986301369864, "percentage": 75.68, "elapsed_time": "0:09:44", "remaining_time": "0:03:07"}
223
+ {"current_steps": 222, "total_steps": 292, "loss": 0.0228, "lr": 1.3522122295675616e-06, "epoch": 1.5205479452054793, "percentage": 76.03, "elapsed_time": "0:09:46", "remaining_time": "0:03:05"}
224
+ {"current_steps": 223, "total_steps": 292, "loss": 0.0344, "lr": 1.3156330280198637e-06, "epoch": 1.5273972602739727, "percentage": 76.37, "elapsed_time": "0:09:49", "remaining_time": "0:03:02"}
225
+ {"current_steps": 224, "total_steps": 292, "loss": 0.0258, "lr": 1.2794803006431984e-06, "epoch": 1.5342465753424657, "percentage": 76.71, "elapsed_time": "0:09:51", "remaining_time": "0:02:59"}
226
+ {"current_steps": 225, "total_steps": 292, "loss": 0.0229, "lr": 1.2437582322010672e-06, "epoch": 1.541095890410959, "percentage": 77.05, "elapsed_time": "0:09:54", "remaining_time": "0:02:56"}
227
+ {"current_steps": 226, "total_steps": 292, "loss": 0.0306, "lr": 1.2084709576071885e-06, "epoch": 1.547945205479452, "percentage": 77.4, "elapsed_time": "0:09:56", "remaining_time": "0:02:54"}
228
+ {"current_steps": 227, "total_steps": 292, "loss": 0.0195, "lr": 1.1736225614468627e-06, "epoch": 1.5547945205479452, "percentage": 77.74, "elapsed_time": "0:09:59", "remaining_time": "0:02:51"}
229
+ {"current_steps": 228, "total_steps": 292, "loss": 0.0356, "lr": 1.1392170775041788e-06, "epoch": 1.5616438356164384, "percentage": 78.08, "elapsed_time": "0:10:02", "remaining_time": "0:02:48"}
230
+ {"current_steps": 229, "total_steps": 292, "loss": 0.0265, "lr": 1.1052584882950896e-06, "epoch": 1.5684931506849316, "percentage": 78.42, "elapsed_time": "0:10:04", "remaining_time": "0:02:46"}
231
+ {"current_steps": 230, "total_steps": 292, "loss": 0.0264, "lr": 1.0717507246064273e-06, "epoch": 1.5753424657534247, "percentage": 78.77, "elapsed_time": "0:10:07", "remaining_time": "0:02:43"}
232
+ {"current_steps": 231, "total_steps": 292, "loss": 0.0216, "lr": 1.0386976650409102e-06, "epoch": 1.5821917808219177, "percentage": 79.11, "elapsed_time": "0:10:10", "remaining_time": "0:02:41"}
233
+ {"current_steps": 232, "total_steps": 292, "loss": 0.0243, "lr": 1.0061031355681766e-06, "epoch": 1.589041095890411, "percentage": 79.45, "elapsed_time": "0:10:12", "remaining_time": "0:02:38"}
234
+ {"current_steps": 233, "total_steps": 292, "loss": 0.0267, "lr": 9.739709090819254e-07, "epoch": 1.595890410958904, "percentage": 79.79, "elapsed_time": "0:10:14", "remaining_time": "0:02:35"}
235
+ {"current_steps": 234, "total_steps": 292, "loss": 0.0216, "lr": 9.423047049631956e-07, "epoch": 1.6027397260273972, "percentage": 80.14, "elapsed_time": "0:10:18", "remaining_time": "0:02:33"}
236
+ {"current_steps": 235, "total_steps": 292, "loss": 0.0224, "lr": 9.111081886498374e-07, "epoch": 1.6095890410958904, "percentage": 80.48, "elapsed_time": "0:10:20", "remaining_time": "0:02:30"}
237
+ {"current_steps": 236, "total_steps": 292, "loss": 0.0189, "lr": 8.803849712122292e-07, "epoch": 1.6164383561643836, "percentage": 80.82, "elapsed_time": "0:10:23", "remaining_time": "0:02:27"}
238
+ {"current_steps": 237, "total_steps": 292, "loss": 0.025, "lr": 8.501386089352858e-07, "epoch": 1.6232876712328768, "percentage": 81.16, "elapsed_time": "0:10:26", "remaining_time": "0:02:25"}
239
+ {"current_steps": 238, "total_steps": 292, "loss": 0.0276, "lr": 8.203726029068149e-07, "epoch": 1.6301369863013697, "percentage": 81.51, "elapsed_time": "0:10:28", "remaining_time": "0:02:22"}
240
+ {"current_steps": 239, "total_steps": 292, "loss": 0.0219, "lr": 7.910903986122537e-07, "epoch": 1.6369863013698631, "percentage": 81.85, "elapsed_time": "0:10:31", "remaining_time": "0:02:19"}
241
+ {"current_steps": 240, "total_steps": 292, "loss": 0.0246, "lr": 7.622953855358456e-07, "epoch": 1.643835616438356, "percentage": 82.19, "elapsed_time": "0:10:33", "remaining_time": "0:02:17"}
242
+ {"current_steps": 241, "total_steps": 292, "loss": 0.0266, "lr": 7.339908967683007e-07, "epoch": 1.6506849315068495, "percentage": 82.53, "elapsed_time": "0:10:36", "remaining_time": "0:02:14"}
243
+ {"current_steps": 242, "total_steps": 292, "loss": 0.0231, "lr": 7.061802086209857e-07, "epoch": 1.6575342465753424, "percentage": 82.88, "elapsed_time": "0:10:38", "remaining_time": "0:02:11"}
244
+ {"current_steps": 243, "total_steps": 292, "loss": 0.0234, "lr": 6.788665402466782e-07, "epoch": 1.6643835616438356, "percentage": 83.22, "elapsed_time": "0:10:41", "remaining_time": "0:02:09"}
245
+ {"current_steps": 244, "total_steps": 292, "loss": 0.0228, "lr": 6.52053053266945e-07, "epoch": 1.6712328767123288, "percentage": 83.56, "elapsed_time": "0:10:44", "remaining_time": "0:02:06"}
246
+ {"current_steps": 245, "total_steps": 292, "loss": 0.0157, "lr": 6.257428514061764e-07, "epoch": 1.678082191780822, "percentage": 83.9, "elapsed_time": "0:10:47", "remaining_time": "0:02:04"}
247
+ {"current_steps": 246, "total_steps": 292, "loss": 0.0223, "lr": 5.999389801323219e-07, "epoch": 1.6849315068493151, "percentage": 84.25, "elapsed_time": "0:10:49", "remaining_time": "0:02:01"}
248
+ {"current_steps": 247, "total_steps": 292, "loss": 0.0265, "lr": 5.746444263043715e-07, "epoch": 1.691780821917808, "percentage": 84.59, "elapsed_time": "0:10:52", "remaining_time": "0:01:58"}
249
+ {"current_steps": 248, "total_steps": 292, "loss": 0.0213, "lr": 5.498621178266167e-07, "epoch": 1.6986301369863015, "percentage": 84.93, "elapsed_time": "0:10:54", "remaining_time": "0:01:56"}
250
+ {"current_steps": 249, "total_steps": 292, "loss": 0.0238, "lr": 5.255949233097451e-07, "epoch": 1.7054794520547945, "percentage": 85.27, "elapsed_time": "0:10:57", "remaining_time": "0:01:53"}
251
+ {"current_steps": 250, "total_steps": 292, "loss": 0.0254, "lr": 5.018456517387837e-07, "epoch": 1.7123287671232876, "percentage": 85.62, "elapsed_time": "0:11:00", "remaining_time": "0:01:50"}
252
+ {"current_steps": 251, "total_steps": 292, "loss": 0.0269, "lr": 4.786170521479588e-07, "epoch": 1.7191780821917808, "percentage": 85.96, "elapsed_time": "0:11:02", "remaining_time": "0:01:48"}
253
+ {"current_steps": 252, "total_steps": 292, "loss": 0.0337, "lr": 4.5591181330248534e-07, "epoch": 1.726027397260274, "percentage": 86.3, "elapsed_time": "0:11:05", "remaining_time": "0:01:45"}
254
+ {"current_steps": 253, "total_steps": 292, "loss": 0.0184, "lr": 4.3373256338733847e-07, "epoch": 1.7328767123287672, "percentage": 86.64, "elapsed_time": "0:11:07", "remaining_time": "0:01:42"}
255
+ {"current_steps": 254, "total_steps": 292, "loss": 0.0274, "lr": 4.1208186970303097e-07, "epoch": 1.7397260273972601, "percentage": 86.99, "elapsed_time": "0:11:10", "remaining_time": "0:01:40"}
256
+ {"current_steps": 255, "total_steps": 292, "loss": 0.0281, "lr": 3.90962238368448e-07, "epoch": 1.7465753424657535, "percentage": 87.33, "elapsed_time": "0:11:13", "remaining_time": "0:01:37"}
257
+ {"current_steps": 256, "total_steps": 292, "loss": 0.02, "lr": 3.70376114030751e-07, "epoch": 1.7534246575342465, "percentage": 87.67, "elapsed_time": "0:11:15", "remaining_time": "0:01:35"}
258
+ {"current_steps": 257, "total_steps": 292, "loss": 0.0207, "lr": 3.503258795824105e-07, "epoch": 1.7602739726027399, "percentage": 88.01, "elapsed_time": "0:11:18", "remaining_time": "0:01:32"}
259
+ {"current_steps": 258, "total_steps": 292, "loss": 0.0249, "lr": 3.308138558853746e-07, "epoch": 1.7671232876712328, "percentage": 88.36, "elapsed_time": "0:11:20", "remaining_time": "0:01:29"}
260
+ {"current_steps": 259, "total_steps": 292, "loss": 0.0221, "lr": 3.1184230150243025e-07, "epoch": 1.773972602739726, "percentage": 88.7, "elapsed_time": "0:11:23", "remaining_time": "0:01:27"}
261
+ {"current_steps": 260, "total_steps": 292, "loss": 0.0231, "lr": 2.934134124357646e-07, "epoch": 1.7808219178082192, "percentage": 89.04, "elapsed_time": "0:11:25", "remaining_time": "0:01:24"}
262
+ {"current_steps": 261, "total_steps": 292, "loss": 0.0247, "lr": 2.755293218727739e-07, "epoch": 1.7876712328767124, "percentage": 89.38, "elapsed_time": "0:11:28", "remaining_time": "0:01:21"}
263
+ {"current_steps": 262, "total_steps": 292, "loss": 0.0254, "lr": 2.5819209993914185e-07, "epoch": 1.7945205479452055, "percentage": 89.73, "elapsed_time": "0:11:30", "remaining_time": "0:01:19"}
264
+ {"current_steps": 263, "total_steps": 292, "loss": 0.0182, "lr": 2.4140375345921895e-07, "epoch": 1.8013698630136985, "percentage": 90.07, "elapsed_time": "0:11:33", "remaining_time": "0:01:16"}
265
+ {"current_steps": 264, "total_steps": 292, "loss": 0.0213, "lr": 2.2516622572372416e-07, "epoch": 1.808219178082192, "percentage": 90.41, "elapsed_time": "0:11:36", "remaining_time": "0:01:13"}
266
+ {"current_steps": 265, "total_steps": 292, "loss": 0.0286, "lr": 2.094813962648101e-07, "epoch": 1.8150684931506849, "percentage": 90.75, "elapsed_time": "0:11:38", "remaining_time": "0:01:11"}
267
+ {"current_steps": 266, "total_steps": 292, "loss": 0.0199, "lr": 1.9435108063849684e-07, "epoch": 1.821917808219178, "percentage": 91.1, "elapsed_time": "0:11:41", "remaining_time": "0:01:08"}
268
+ {"current_steps": 267, "total_steps": 292, "loss": 0.0282, "lr": 1.7977703021452185e-07, "epoch": 1.8287671232876712, "percentage": 91.44, "elapsed_time": "0:11:43", "remaining_time": "0:01:05"}
269
+ {"current_steps": 268, "total_steps": 292, "loss": 0.0243, "lr": 1.6576093197361253e-07, "epoch": 1.8356164383561644, "percentage": 91.78, "elapsed_time": "0:11:46", "remaining_time": "0:01:03"}
270
+ {"current_steps": 269, "total_steps": 292, "loss": 0.0244, "lr": 1.523044083122138e-07, "epoch": 1.8424657534246576, "percentage": 92.12, "elapsed_time": "0:11:49", "remaining_time": "0:01:00"}
271
+ {"current_steps": 270, "total_steps": 292, "loss": 0.028, "lr": 1.39409016854693e-07, "epoch": 1.8493150684931505, "percentage": 92.47, "elapsed_time": "0:11:51", "remaining_time": "0:00:57"}
272
+ {"current_steps": 271, "total_steps": 292, "loss": 0.0252, "lr": 1.2707625027304104e-07, "epoch": 1.856164383561644, "percentage": 92.81, "elapsed_time": "0:11:54", "remaining_time": "0:00:55"}
273
+ {"current_steps": 272, "total_steps": 292, "loss": 0.0268, "lr": 1.1530753611409151e-07, "epoch": 1.8630136986301369, "percentage": 93.15, "elapsed_time": "0:11:56", "remaining_time": "0:00:52"}
274
+ {"current_steps": 273, "total_steps": 292, "loss": 0.0256, "lr": 1.041042366342787e-07, "epoch": 1.8698630136986303, "percentage": 93.49, "elapsed_time": "0:11:59", "remaining_time": "0:00:50"}
275
+ {"current_steps": 274, "total_steps": 292, "loss": 0.0256, "lr": 9.346764864195335e-08, "epoch": 1.8767123287671232, "percentage": 93.84, "elapsed_time": "0:12:01", "remaining_time": "0:00:47"}
276
+ {"current_steps": 275, "total_steps": 292, "loss": 0.0315, "lr": 8.339900334727536e-08, "epoch": 1.8835616438356164, "percentage": 94.18, "elapsed_time": "0:12:04", "remaining_time": "0:00:44"}
277
+ {"current_steps": 276, "total_steps": 292, "loss": 0.0225, "lr": 7.389946621969679e-08, "epoch": 1.8904109589041096, "percentage": 94.52, "elapsed_time": "0:12:07", "remaining_time": "0:00:42"}
278
+ {"current_steps": 277, "total_steps": 292, "loss": 0.0205, "lr": 6.497013685305586e-08, "epoch": 1.8972602739726028, "percentage": 94.86, "elapsed_time": "0:12:10", "remaining_time": "0:00:39"}
279
+ {"current_steps": 278, "total_steps": 292, "loss": 0.0219, "lr": 5.661204883829763e-08, "epoch": 1.904109589041096, "percentage": 95.21, "elapsed_time": "0:12:12", "remaining_time": "0:00:36"}
280
+ {"current_steps": 279, "total_steps": 292, "loss": 0.0269, "lr": 4.8826169643832464e-08, "epoch": 1.910958904109589, "percentage": 95.55, "elapsed_time": "0:12:15", "remaining_time": "0:00:34"}
281
+ {"current_steps": 280, "total_steps": 292, "loss": 0.0267, "lr": 4.1613400503550114e-08, "epoch": 1.9178082191780823, "percentage": 95.89, "elapsed_time": "0:12:18", "remaining_time": "0:00:31"}
282
+ {"current_steps": 281, "total_steps": 292, "loss": 0.0272, "lr": 3.4974576312497564e-08, "epoch": 1.9246575342465753, "percentage": 96.23, "elapsed_time": "0:12:21", "remaining_time": "0:00:29"}
283
+ {"current_steps": 282, "total_steps": 292, "loss": 0.021, "lr": 2.8910465530240793e-08, "epoch": 1.9315068493150684, "percentage": 96.58, "elapsed_time": "0:12:23", "remaining_time": "0:00:26"}
284
+ {"current_steps": 283, "total_steps": 292, "loss": 0.0312, "lr": 2.3421770091912044e-08, "epoch": 1.9383561643835616, "percentage": 96.92, "elapsed_time": "0:12:26", "remaining_time": "0:00:23"}
285
+ {"current_steps": 284, "total_steps": 292, "loss": 0.0268, "lr": 1.850912532696092e-08, "epoch": 1.9452054794520548, "percentage": 97.26, "elapsed_time": "0:12:28", "remaining_time": "0:00:21"}
286
+ {"current_steps": 285, "total_steps": 292, "loss": 0.0244, "lr": 1.4173099885610997e-08, "epoch": 1.952054794520548, "percentage": 97.6, "elapsed_time": "0:12:31", "remaining_time": "0:00:18"}
287
+ {"current_steps": 286, "total_steps": 292, "loss": 0.0258, "lr": 1.041419567303914e-08, "epoch": 1.958904109589041, "percentage": 97.95, "elapsed_time": "0:12:35", "remaining_time": "0:00:15"}
288
+ {"current_steps": 287, "total_steps": 292, "loss": 0.0228, "lr": 7.2328477912769756e-09, "epoch": 1.9657534246575343, "percentage": 98.29, "elapsed_time": "0:12:37", "remaining_time": "0:00:13"}
289
+ {"current_steps": 288, "total_steps": 292, "loss": 0.0213, "lr": 4.629424488850065e-09, "epoch": 1.9726027397260273, "percentage": 98.63, "elapsed_time": "0:12:40", "remaining_time": "0:00:10"}
290
+ {"current_steps": 289, "total_steps": 292, "loss": 0.0232, "lr": 2.604227118148117e-09, "epoch": 1.9794520547945207, "percentage": 98.97, "elapsed_time": "0:12:42", "remaining_time": "0:00:07"}
291
+ {"current_steps": 290, "total_steps": 292, "loss": 0.0224, "lr": 1.1574901005456662e-09, "epoch": 1.9863013698630136, "percentage": 99.32, "elapsed_time": "0:12:45", "remaining_time": "0:00:05"}
292
+ {"current_steps": 291, "total_steps": 292, "loss": 0.0254, "lr": 2.89380899267111e-10, "epoch": 1.9931506849315068, "percentage": 99.66, "elapsed_time": "0:12:47", "remaining_time": "0:00:02"}
293
+ {"current_steps": 292, "total_steps": 292, "loss": 0.0212, "lr": 0.0, "epoch": 2.0, "percentage": 100.0, "elapsed_time": "0:12:50", "remaining_time": "0:00:00"}
294
+ {"current_steps": 292, "total_steps": 292, "epoch": 2.0, "percentage": 100.0, "elapsed_time": "0:13:53", "remaining_time": "0:00:00"}
trainer_state.json ADDED
@@ -0,0 +1,2094 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 2.0,
5
+ "eval_steps": 200,
6
+ "global_step": 292,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.00684931506849315,
13
+ "grad_norm": 0.9593834756973793,
14
+ "learning_rate": 9.999710619100732e-06,
15
+ "loss": 0.1231,
16
+ "step": 1
17
+ },
18
+ {
19
+ "epoch": 0.0136986301369863,
20
+ "grad_norm": 1.0366318450053902,
21
+ "learning_rate": 9.998842509899456e-06,
22
+ "loss": 0.1599,
23
+ "step": 2
24
+ },
25
+ {
26
+ "epoch": 0.02054794520547945,
27
+ "grad_norm": 0.6508585223819798,
28
+ "learning_rate": 9.997395772881853e-06,
29
+ "loss": 0.1042,
30
+ "step": 3
31
+ },
32
+ {
33
+ "epoch": 0.0273972602739726,
34
+ "grad_norm": 0.7657469202490956,
35
+ "learning_rate": 9.995370575511151e-06,
36
+ "loss": 0.114,
37
+ "step": 4
38
+ },
39
+ {
40
+ "epoch": 0.03424657534246575,
41
+ "grad_norm": 0.773450378447972,
42
+ "learning_rate": 9.992767152208724e-06,
43
+ "loss": 0.0993,
44
+ "step": 5
45
+ },
46
+ {
47
+ "epoch": 0.0410958904109589,
48
+ "grad_norm": 0.6264523234725071,
49
+ "learning_rate": 9.989585804326963e-06,
50
+ "loss": 0.0887,
51
+ "step": 6
52
+ },
53
+ {
54
+ "epoch": 0.04794520547945205,
55
+ "grad_norm": 0.7537995289017994,
56
+ "learning_rate": 9.985826900114391e-06,
57
+ "loss": 0.0992,
58
+ "step": 7
59
+ },
60
+ {
61
+ "epoch": 0.0547945205479452,
62
+ "grad_norm": 0.6902981451332041,
63
+ "learning_rate": 9.98149087467304e-06,
64
+ "loss": 0.0936,
65
+ "step": 8
66
+ },
67
+ {
68
+ "epoch": 0.06164383561643835,
69
+ "grad_norm": 0.7066295696196615,
70
+ "learning_rate": 9.97657822990809e-06,
71
+ "loss": 0.1073,
72
+ "step": 9
73
+ },
74
+ {
75
+ "epoch": 0.0684931506849315,
76
+ "grad_norm": 0.7056273280149921,
77
+ "learning_rate": 9.97108953446976e-06,
78
+ "loss": 0.0993,
79
+ "step": 10
80
+ },
81
+ {
82
+ "epoch": 0.07534246575342465,
83
+ "grad_norm": 0.8400214910429082,
84
+ "learning_rate": 9.965025423687505e-06,
85
+ "loss": 0.1084,
86
+ "step": 11
87
+ },
88
+ {
89
+ "epoch": 0.0821917808219178,
90
+ "grad_norm": 0.6548937347185184,
91
+ "learning_rate": 9.95838659949645e-06,
92
+ "loss": 0.0843,
93
+ "step": 12
94
+ },
95
+ {
96
+ "epoch": 0.08904109589041095,
97
+ "grad_norm": 0.6559408616855686,
98
+ "learning_rate": 9.951173830356168e-06,
99
+ "loss": 0.0894,
100
+ "step": 13
101
+ },
102
+ {
103
+ "epoch": 0.0958904109589041,
104
+ "grad_norm": 0.8356138855690183,
105
+ "learning_rate": 9.943387951161702e-06,
106
+ "loss": 0.1161,
107
+ "step": 14
108
+ },
109
+ {
110
+ "epoch": 0.10273972602739725,
111
+ "grad_norm": 0.6727072374629645,
112
+ "learning_rate": 9.935029863146946e-06,
113
+ "loss": 0.0872,
114
+ "step": 15
115
+ },
116
+ {
117
+ "epoch": 0.1095890410958904,
118
+ "grad_norm": 0.6205940232841655,
119
+ "learning_rate": 9.926100533780304e-06,
120
+ "loss": 0.0697,
121
+ "step": 16
122
+ },
123
+ {
124
+ "epoch": 0.11643835616438356,
125
+ "grad_norm": 0.8038247362280305,
126
+ "learning_rate": 9.916600996652726e-06,
127
+ "loss": 0.1001,
128
+ "step": 17
129
+ },
130
+ {
131
+ "epoch": 0.1232876712328767,
132
+ "grad_norm": 0.8703467157302192,
133
+ "learning_rate": 9.906532351358047e-06,
134
+ "loss": 0.1142,
135
+ "step": 18
136
+ },
137
+ {
138
+ "epoch": 0.13013698630136986,
139
+ "grad_norm": 0.749160596257599,
140
+ "learning_rate": 9.895895763365722e-06,
141
+ "loss": 0.0951,
142
+ "step": 19
143
+ },
144
+ {
145
+ "epoch": 0.136986301369863,
146
+ "grad_norm": 0.7754609595123414,
147
+ "learning_rate": 9.88469246388591e-06,
148
+ "loss": 0.1102,
149
+ "step": 20
150
+ },
151
+ {
152
+ "epoch": 0.14383561643835616,
153
+ "grad_norm": 0.7830298124590415,
154
+ "learning_rate": 9.872923749726959e-06,
155
+ "loss": 0.1102,
156
+ "step": 21
157
+ },
158
+ {
159
+ "epoch": 0.1506849315068493,
160
+ "grad_norm": 0.7078241134141917,
161
+ "learning_rate": 9.860590983145307e-06,
162
+ "loss": 0.0889,
163
+ "step": 22
164
+ },
165
+ {
166
+ "epoch": 0.15753424657534246,
167
+ "grad_norm": 0.8428672025857301,
168
+ "learning_rate": 9.847695591687788e-06,
169
+ "loss": 0.1128,
170
+ "step": 23
171
+ },
172
+ {
173
+ "epoch": 0.1643835616438356,
174
+ "grad_norm": 0.7220487174797594,
175
+ "learning_rate": 9.834239068026388e-06,
176
+ "loss": 0.099,
177
+ "step": 24
178
+ },
179
+ {
180
+ "epoch": 0.17123287671232876,
181
+ "grad_norm": 0.7064317613231897,
182
+ "learning_rate": 9.82022296978548e-06,
183
+ "loss": 0.0931,
184
+ "step": 25
185
+ },
186
+ {
187
+ "epoch": 0.1780821917808219,
188
+ "grad_norm": 0.6914687462455392,
189
+ "learning_rate": 9.805648919361505e-06,
190
+ "loss": 0.0875,
191
+ "step": 26
192
+ },
193
+ {
194
+ "epoch": 0.18493150684931506,
195
+ "grad_norm": 0.7424459388678591,
196
+ "learning_rate": 9.790518603735191e-06,
197
+ "loss": 0.1036,
198
+ "step": 27
199
+ },
200
+ {
201
+ "epoch": 0.1917808219178082,
202
+ "grad_norm": 0.735070163197544,
203
+ "learning_rate": 9.774833774276278e-06,
204
+ "loss": 0.0994,
205
+ "step": 28
206
+ },
207
+ {
208
+ "epoch": 0.19863013698630136,
209
+ "grad_norm": 0.8280959253160824,
210
+ "learning_rate": 9.758596246540782e-06,
211
+ "loss": 0.1192,
212
+ "step": 29
213
+ },
214
+ {
215
+ "epoch": 0.2054794520547945,
216
+ "grad_norm": 0.6597645775525501,
217
+ "learning_rate": 9.741807900060858e-06,
218
+ "loss": 0.0928,
219
+ "step": 30
220
+ },
221
+ {
222
+ "epoch": 0.21232876712328766,
223
+ "grad_norm": 0.763238425774405,
224
+ "learning_rate": 9.724470678127226e-06,
225
+ "loss": 0.1115,
226
+ "step": 31
227
+ },
228
+ {
229
+ "epoch": 0.2191780821917808,
230
+ "grad_norm": 0.704884344301177,
231
+ "learning_rate": 9.706586587564236e-06,
232
+ "loss": 0.109,
233
+ "step": 32
234
+ },
235
+ {
236
+ "epoch": 0.22602739726027396,
237
+ "grad_norm": 0.7005139949025764,
238
+ "learning_rate": 9.68815769849757e-06,
239
+ "loss": 0.1025,
240
+ "step": 33
241
+ },
242
+ {
243
+ "epoch": 0.2328767123287671,
244
+ "grad_norm": 0.805148188033095,
245
+ "learning_rate": 9.669186144114627e-06,
246
+ "loss": 0.1096,
247
+ "step": 34
248
+ },
249
+ {
250
+ "epoch": 0.23972602739726026,
251
+ "grad_norm": 0.7624408584402959,
252
+ "learning_rate": 9.649674120417591e-06,
253
+ "loss": 0.1092,
254
+ "step": 35
255
+ },
256
+ {
257
+ "epoch": 0.2465753424657534,
258
+ "grad_norm": 0.8131922614568783,
259
+ "learning_rate": 9.62962388596925e-06,
260
+ "loss": 0.1106,
261
+ "step": 36
262
+ },
263
+ {
264
+ "epoch": 0.2534246575342466,
265
+ "grad_norm": 0.8147548648435898,
266
+ "learning_rate": 9.609037761631552e-06,
267
+ "loss": 0.1164,
268
+ "step": 37
269
+ },
270
+ {
271
+ "epoch": 0.2602739726027397,
272
+ "grad_norm": 0.7389791528033364,
273
+ "learning_rate": 9.587918130296969e-06,
274
+ "loss": 0.1041,
275
+ "step": 38
276
+ },
277
+ {
278
+ "epoch": 0.2671232876712329,
279
+ "grad_norm": 0.7059065795605491,
280
+ "learning_rate": 9.566267436612662e-06,
281
+ "loss": 0.0961,
282
+ "step": 39
283
+ },
284
+ {
285
+ "epoch": 0.273972602739726,
286
+ "grad_norm": 0.6760733340665249,
287
+ "learning_rate": 9.544088186697515e-06,
288
+ "loss": 0.1033,
289
+ "step": 40
290
+ },
291
+ {
292
+ "epoch": 0.2808219178082192,
293
+ "grad_norm": 0.5673291242923014,
294
+ "learning_rate": 9.521382947852042e-06,
295
+ "loss": 0.0692,
296
+ "step": 41
297
+ },
298
+ {
299
+ "epoch": 0.2876712328767123,
300
+ "grad_norm": 0.7574437710944075,
301
+ "learning_rate": 9.498154348261217e-06,
302
+ "loss": 0.1024,
303
+ "step": 42
304
+ },
305
+ {
306
+ "epoch": 0.2945205479452055,
307
+ "grad_norm": 0.7041195553676743,
308
+ "learning_rate": 9.474405076690257e-06,
309
+ "loss": 0.1045,
310
+ "step": 43
311
+ },
312
+ {
313
+ "epoch": 0.3013698630136986,
314
+ "grad_norm": 0.6677385477116528,
315
+ "learning_rate": 9.450137882173385e-06,
316
+ "loss": 0.088,
317
+ "step": 44
318
+ },
319
+ {
320
+ "epoch": 0.3082191780821918,
321
+ "grad_norm": 0.6346381036971247,
322
+ "learning_rate": 9.425355573695628e-06,
323
+ "loss": 0.089,
324
+ "step": 45
325
+ },
326
+ {
327
+ "epoch": 0.3150684931506849,
328
+ "grad_norm": 0.8287038911978735,
329
+ "learning_rate": 9.40006101986768e-06,
330
+ "loss": 0.1076,
331
+ "step": 46
332
+ },
333
+ {
334
+ "epoch": 0.3219178082191781,
335
+ "grad_norm": 0.6982754394550266,
336
+ "learning_rate": 9.374257148593824e-06,
337
+ "loss": 0.0997,
338
+ "step": 47
339
+ },
340
+ {
341
+ "epoch": 0.3287671232876712,
342
+ "grad_norm": 0.7973278910995713,
343
+ "learning_rate": 9.347946946733055e-06,
344
+ "loss": 0.1089,
345
+ "step": 48
346
+ },
347
+ {
348
+ "epoch": 0.3356164383561644,
349
+ "grad_norm": 0.7330982797613164,
350
+ "learning_rate": 9.321133459753322e-06,
351
+ "loss": 0.1028,
352
+ "step": 49
353
+ },
354
+ {
355
+ "epoch": 0.3424657534246575,
356
+ "grad_norm": 0.7760454288407581,
357
+ "learning_rate": 9.293819791379016e-06,
358
+ "loss": 0.1166,
359
+ "step": 50
360
+ },
361
+ {
362
+ "epoch": 0.3493150684931507,
363
+ "grad_norm": 0.7600040395504281,
364
+ "learning_rate": 9.266009103231702e-06,
365
+ "loss": 0.1008,
366
+ "step": 51
367
+ },
368
+ {
369
+ "epoch": 0.3561643835616438,
370
+ "grad_norm": 0.8462820099684166,
371
+ "learning_rate": 9.237704614464157e-06,
372
+ "loss": 0.1126,
373
+ "step": 52
374
+ },
375
+ {
376
+ "epoch": 0.363013698630137,
377
+ "grad_norm": 0.7809644134536166,
378
+ "learning_rate": 9.208909601387748e-06,
379
+ "loss": 0.1081,
380
+ "step": 53
381
+ },
382
+ {
383
+ "epoch": 0.3698630136986301,
384
+ "grad_norm": 0.6806410480801305,
385
+ "learning_rate": 9.179627397093184e-06,
386
+ "loss": 0.0901,
387
+ "step": 54
388
+ },
389
+ {
390
+ "epoch": 0.3767123287671233,
391
+ "grad_norm": 0.6999141874681865,
392
+ "learning_rate": 9.149861391064714e-06,
393
+ "loss": 0.1017,
394
+ "step": 55
395
+ },
396
+ {
397
+ "epoch": 0.3835616438356164,
398
+ "grad_norm": 0.7362016061187906,
399
+ "learning_rate": 9.119615028787771e-06,
400
+ "loss": 0.1018,
401
+ "step": 56
402
+ },
403
+ {
404
+ "epoch": 0.3904109589041096,
405
+ "grad_norm": 0.796603408754819,
406
+ "learning_rate": 9.088891811350164e-06,
407
+ "loss": 0.1049,
408
+ "step": 57
409
+ },
410
+ {
411
+ "epoch": 0.3972602739726027,
412
+ "grad_norm": 0.7576527967226971,
413
+ "learning_rate": 9.057695295036806e-06,
414
+ "loss": 0.1042,
415
+ "step": 58
416
+ },
417
+ {
418
+ "epoch": 0.4041095890410959,
419
+ "grad_norm": 0.6427964831166068,
420
+ "learning_rate": 9.026029090918076e-06,
421
+ "loss": 0.0888,
422
+ "step": 59
423
+ },
424
+ {
425
+ "epoch": 0.410958904109589,
426
+ "grad_norm": 0.7481130379993173,
427
+ "learning_rate": 8.993896864431825e-06,
428
+ "loss": 0.1002,
429
+ "step": 60
430
+ },
431
+ {
432
+ "epoch": 0.4178082191780822,
433
+ "grad_norm": 0.6574786390576889,
434
+ "learning_rate": 8.96130233495909e-06,
435
+ "loss": 0.0881,
436
+ "step": 61
437
+ },
438
+ {
439
+ "epoch": 0.4246575342465753,
440
+ "grad_norm": 0.6754357749197657,
441
+ "learning_rate": 8.928249275393572e-06,
442
+ "loss": 0.0944,
443
+ "step": 62
444
+ },
445
+ {
446
+ "epoch": 0.4315068493150685,
447
+ "grad_norm": 0.6509678298973031,
448
+ "learning_rate": 8.894741511704911e-06,
449
+ "loss": 0.0874,
450
+ "step": 63
451
+ },
452
+ {
453
+ "epoch": 0.4383561643835616,
454
+ "grad_norm": 0.7125323018469738,
455
+ "learning_rate": 8.860782922495821e-06,
456
+ "loss": 0.1053,
457
+ "step": 64
458
+ },
459
+ {
460
+ "epoch": 0.4452054794520548,
461
+ "grad_norm": 0.7898553497859033,
462
+ "learning_rate": 8.826377438553138e-06,
463
+ "loss": 0.1098,
464
+ "step": 65
465
+ },
466
+ {
467
+ "epoch": 0.4520547945205479,
468
+ "grad_norm": 0.7270237572097952,
469
+ "learning_rate": 8.791529042392813e-06,
470
+ "loss": 0.0983,
471
+ "step": 66
472
+ },
473
+ {
474
+ "epoch": 0.4589041095890411,
475
+ "grad_norm": 0.6556709322157142,
476
+ "learning_rate": 8.756241767798934e-06,
477
+ "loss": 0.0922,
478
+ "step": 67
479
+ },
480
+ {
481
+ "epoch": 0.4657534246575342,
482
+ "grad_norm": 0.7330312516853001,
483
+ "learning_rate": 8.720519699356804e-06,
484
+ "loss": 0.1009,
485
+ "step": 68
486
+ },
487
+ {
488
+ "epoch": 0.4726027397260274,
489
+ "grad_norm": 0.7894192501819968,
490
+ "learning_rate": 8.684366971980139e-06,
491
+ "loss": 0.1136,
492
+ "step": 69
493
+ },
494
+ {
495
+ "epoch": 0.4794520547945205,
496
+ "grad_norm": 0.7620445217510202,
497
+ "learning_rate": 8.647787770432439e-06,
498
+ "loss": 0.106,
499
+ "step": 70
500
+ },
501
+ {
502
+ "epoch": 0.4863013698630137,
503
+ "grad_norm": 0.6309422495845969,
504
+ "learning_rate": 8.610786328842602e-06,
505
+ "loss": 0.0781,
506
+ "step": 71
507
+ },
508
+ {
509
+ "epoch": 0.4931506849315068,
510
+ "grad_norm": 0.7065600537534685,
511
+ "learning_rate": 8.573366930214807e-06,
512
+ "loss": 0.0953,
513
+ "step": 72
514
+ },
515
+ {
516
+ "epoch": 0.5,
517
+ "grad_norm": 0.7115736455959387,
518
+ "learning_rate": 8.535533905932739e-06,
519
+ "loss": 0.0977,
520
+ "step": 73
521
+ },
522
+ {
523
+ "epoch": 0.5068493150684932,
524
+ "grad_norm": 0.6999241584756221,
525
+ "learning_rate": 8.497291635258235e-06,
526
+ "loss": 0.0986,
527
+ "step": 74
528
+ },
529
+ {
530
+ "epoch": 0.5136986301369864,
531
+ "grad_norm": 0.8052347118427793,
532
+ "learning_rate": 8.458644544824371e-06,
533
+ "loss": 0.1066,
534
+ "step": 75
535
+ },
536
+ {
537
+ "epoch": 0.5205479452054794,
538
+ "grad_norm": 0.5684986456446331,
539
+ "learning_rate": 8.419597108123054e-06,
540
+ "loss": 0.0778,
541
+ "step": 76
542
+ },
543
+ {
544
+ "epoch": 0.5273972602739726,
545
+ "grad_norm": 0.6569153273607293,
546
+ "learning_rate": 8.380153844987225e-06,
547
+ "loss": 0.0885,
548
+ "step": 77
549
+ },
550
+ {
551
+ "epoch": 0.5342465753424658,
552
+ "grad_norm": 0.6543237226637773,
553
+ "learning_rate": 8.340319321067668e-06,
554
+ "loss": 0.0784,
555
+ "step": 78
556
+ },
557
+ {
558
+ "epoch": 0.541095890410959,
559
+ "grad_norm": 0.7091685827973174,
560
+ "learning_rate": 8.300098147304523e-06,
561
+ "loss": 0.1051,
562
+ "step": 79
563
+ },
564
+ {
565
+ "epoch": 0.547945205479452,
566
+ "grad_norm": 0.9118299925604293,
567
+ "learning_rate": 8.259494979393563e-06,
568
+ "loss": 0.1233,
569
+ "step": 80
570
+ },
571
+ {
572
+ "epoch": 0.5547945205479452,
573
+ "grad_norm": 0.6996397199985479,
574
+ "learning_rate": 8.218514517247287e-06,
575
+ "loss": 0.1009,
576
+ "step": 81
577
+ },
578
+ {
579
+ "epoch": 0.5616438356164384,
580
+ "grad_norm": 0.7319616286987115,
581
+ "learning_rate": 8.177161504450887e-06,
582
+ "loss": 0.0909,
583
+ "step": 82
584
+ },
585
+ {
586
+ "epoch": 0.5684931506849316,
587
+ "grad_norm": 0.7321313442715763,
588
+ "learning_rate": 8.135440727713179e-06,
589
+ "loss": 0.0965,
590
+ "step": 83
591
+ },
592
+ {
593
+ "epoch": 0.5753424657534246,
594
+ "grad_norm": 0.8922413248256611,
595
+ "learning_rate": 8.093357016312518e-06,
596
+ "loss": 0.1382,
597
+ "step": 84
598
+ },
599
+ {
600
+ "epoch": 0.5821917808219178,
601
+ "grad_norm": 0.8165751985420481,
602
+ "learning_rate": 8.050915241537802e-06,
603
+ "loss": 0.111,
604
+ "step": 85
605
+ },
606
+ {
607
+ "epoch": 0.589041095890411,
608
+ "grad_norm": 0.646721720523638,
609
+ "learning_rate": 8.008120316124612e-06,
610
+ "loss": 0.0903,
611
+ "step": 86
612
+ },
613
+ {
614
+ "epoch": 0.5958904109589042,
615
+ "grad_norm": 0.6836085431096287,
616
+ "learning_rate": 7.964977193686551e-06,
617
+ "loss": 0.0889,
618
+ "step": 87
619
+ },
620
+ {
621
+ "epoch": 0.6027397260273972,
622
+ "grad_norm": 0.7903980806529376,
623
+ "learning_rate": 7.921490868141843e-06,
624
+ "loss": 0.1025,
625
+ "step": 88
626
+ },
627
+ {
628
+ "epoch": 0.6095890410958904,
629
+ "grad_norm": 0.6568790448248424,
630
+ "learning_rate": 7.877666373135287e-06,
631
+ "loss": 0.0872,
632
+ "step": 89
633
+ },
634
+ {
635
+ "epoch": 0.6164383561643836,
636
+ "grad_norm": 0.7609820250058389,
637
+ "learning_rate": 7.83350878145559e-06,
638
+ "loss": 0.0976,
639
+ "step": 90
640
+ },
641
+ {
642
+ "epoch": 0.6232876712328768,
643
+ "grad_norm": 0.6136717465608816,
644
+ "learning_rate": 7.789023204448189e-06,
645
+ "loss": 0.0829,
646
+ "step": 91
647
+ },
648
+ {
649
+ "epoch": 0.6301369863013698,
650
+ "grad_norm": 0.6912288388438279,
651
+ "learning_rate": 7.744214791423597e-06,
652
+ "loss": 0.0862,
653
+ "step": 92
654
+ },
655
+ {
656
+ "epoch": 0.636986301369863,
657
+ "grad_norm": 0.8508509328110958,
658
+ "learning_rate": 7.699088729061355e-06,
659
+ "loss": 0.1227,
660
+ "step": 93
661
+ },
662
+ {
663
+ "epoch": 0.6438356164383562,
664
+ "grad_norm": 0.6878684979870703,
665
+ "learning_rate": 7.653650240809667e-06,
666
+ "loss": 0.0939,
667
+ "step": 94
668
+ },
669
+ {
670
+ "epoch": 0.6506849315068494,
671
+ "grad_norm": 0.7319689001661777,
672
+ "learning_rate": 7.60790458628077e-06,
673
+ "loss": 0.0957,
674
+ "step": 95
675
+ },
676
+ {
677
+ "epoch": 0.6575342465753424,
678
+ "grad_norm": 0.7472566372429058,
679
+ "learning_rate": 7.56185706064212e-06,
680
+ "loss": 0.0975,
681
+ "step": 96
682
+ },
683
+ {
684
+ "epoch": 0.6643835616438356,
685
+ "grad_norm": 0.6527988826637163,
686
+ "learning_rate": 7.5155129940034675e-06,
687
+ "loss": 0.0857,
688
+ "step": 97
689
+ },
690
+ {
691
+ "epoch": 0.6712328767123288,
692
+ "grad_norm": 0.7451882863373878,
693
+ "learning_rate": 7.468877750799887e-06,
694
+ "loss": 0.0995,
695
+ "step": 98
696
+ },
697
+ {
698
+ "epoch": 0.678082191780822,
699
+ "grad_norm": 0.6169267993802472,
700
+ "learning_rate": 7.421956729170823e-06,
701
+ "loss": 0.0779,
702
+ "step": 99
703
+ },
704
+ {
705
+ "epoch": 0.684931506849315,
706
+ "grad_norm": 0.6927338328840201,
707
+ "learning_rate": 7.374755360335253e-06,
708
+ "loss": 0.0908,
709
+ "step": 100
710
+ },
711
+ {
712
+ "epoch": 0.6917808219178082,
713
+ "grad_norm": 0.7461895323803881,
714
+ "learning_rate": 7.327279107962995e-06,
715
+ "loss": 0.094,
716
+ "step": 101
717
+ },
718
+ {
719
+ "epoch": 0.6986301369863014,
720
+ "grad_norm": 0.7678847568540728,
721
+ "learning_rate": 7.279533467542295e-06,
722
+ "loss": 0.1122,
723
+ "step": 102
724
+ },
725
+ {
726
+ "epoch": 0.7054794520547946,
727
+ "grad_norm": 0.747944270140034,
728
+ "learning_rate": 7.2315239657436955e-06,
729
+ "loss": 0.094,
730
+ "step": 103
731
+ },
732
+ {
733
+ "epoch": 0.7123287671232876,
734
+ "grad_norm": 0.8445562397107534,
735
+ "learning_rate": 7.183256159780321e-06,
736
+ "loss": 0.1022,
737
+ "step": 104
738
+ },
739
+ {
740
+ "epoch": 0.7191780821917808,
741
+ "grad_norm": 0.7483595593058716,
742
+ "learning_rate": 7.134735636764606e-06,
743
+ "loss": 0.0998,
744
+ "step": 105
745
+ },
746
+ {
747
+ "epoch": 0.726027397260274,
748
+ "grad_norm": 0.7776383870936369,
749
+ "learning_rate": 7.085968013061585e-06,
750
+ "loss": 0.1122,
751
+ "step": 106
752
+ },
753
+ {
754
+ "epoch": 0.7328767123287672,
755
+ "grad_norm": 0.6336882385097793,
756
+ "learning_rate": 7.036958933638779e-06,
757
+ "loss": 0.074,
758
+ "step": 107
759
+ },
760
+ {
761
+ "epoch": 0.7397260273972602,
762
+ "grad_norm": 0.8050211422597002,
763
+ "learning_rate": 6.987714071412781e-06,
764
+ "loss": 0.1103,
765
+ "step": 108
766
+ },
767
+ {
768
+ "epoch": 0.7465753424657534,
769
+ "grad_norm": 0.767920393441073,
770
+ "learning_rate": 6.938239126592592e-06,
771
+ "loss": 0.1014,
772
+ "step": 109
773
+ },
774
+ {
775
+ "epoch": 0.7534246575342466,
776
+ "grad_norm": 0.7736203031736444,
777
+ "learning_rate": 6.888539826019824e-06,
778
+ "loss": 0.0908,
779
+ "step": 110
780
+ },
781
+ {
782
+ "epoch": 0.7602739726027398,
783
+ "grad_norm": 0.7336376174406862,
784
+ "learning_rate": 6.8386219225057945e-06,
785
+ "loss": 0.0973,
786
+ "step": 111
787
+ },
788
+ {
789
+ "epoch": 0.7671232876712328,
790
+ "grad_norm": 0.7394637744781307,
791
+ "learning_rate": 6.788491194165629e-06,
792
+ "loss": 0.0983,
793
+ "step": 112
794
+ },
795
+ {
796
+ "epoch": 0.773972602739726,
797
+ "grad_norm": 0.657050477816448,
798
+ "learning_rate": 6.738153443749421e-06,
799
+ "loss": 0.101,
800
+ "step": 113
801
+ },
802
+ {
803
+ "epoch": 0.7808219178082192,
804
+ "grad_norm": 0.7903607521289585,
805
+ "learning_rate": 6.687614497970567e-06,
806
+ "loss": 0.0949,
807
+ "step": 114
808
+ },
809
+ {
810
+ "epoch": 0.7876712328767124,
811
+ "grad_norm": 0.742230209437882,
812
+ "learning_rate": 6.636880206831298e-06,
813
+ "loss": 0.1011,
814
+ "step": 115
815
+ },
816
+ {
817
+ "epoch": 0.7945205479452054,
818
+ "grad_norm": 0.6961952816223166,
819
+ "learning_rate": 6.585956442945531e-06,
820
+ "loss": 0.0872,
821
+ "step": 116
822
+ },
823
+ {
824
+ "epoch": 0.8013698630136986,
825
+ "grad_norm": 0.6312585843346639,
826
+ "learning_rate": 6.534849100859101e-06,
827
+ "loss": 0.0859,
828
+ "step": 117
829
+ },
830
+ {
831
+ "epoch": 0.8082191780821918,
832
+ "grad_norm": 0.7692870373605378,
833
+ "learning_rate": 6.483564096367452e-06,
834
+ "loss": 0.1142,
835
+ "step": 118
836
+ },
837
+ {
838
+ "epoch": 0.815068493150685,
839
+ "grad_norm": 0.6797050781586332,
840
+ "learning_rate": 6.432107365830872e-06,
841
+ "loss": 0.1047,
842
+ "step": 119
843
+ },
844
+ {
845
+ "epoch": 0.821917808219178,
846
+ "grad_norm": 0.7264370261717285,
847
+ "learning_rate": 6.380484865487346e-06,
848
+ "loss": 0.1174,
849
+ "step": 120
850
+ },
851
+ {
852
+ "epoch": 0.8287671232876712,
853
+ "grad_norm": 0.7138217749375094,
854
+ "learning_rate": 6.328702570763098e-06,
855
+ "loss": 0.1014,
856
+ "step": 121
857
+ },
858
+ {
859
+ "epoch": 0.8356164383561644,
860
+ "grad_norm": 0.6864241460283704,
861
+ "learning_rate": 6.276766475580935e-06,
862
+ "loss": 0.0888,
863
+ "step": 122
864
+ },
865
+ {
866
+ "epoch": 0.8424657534246576,
867
+ "grad_norm": 0.7861601718762463,
868
+ "learning_rate": 6.224682591666431e-06,
869
+ "loss": 0.1194,
870
+ "step": 123
871
+ },
872
+ {
873
+ "epoch": 0.8493150684931506,
874
+ "grad_norm": 0.6308802102336228,
875
+ "learning_rate": 6.1724569478520495e-06,
876
+ "loss": 0.0766,
877
+ "step": 124
878
+ },
879
+ {
880
+ "epoch": 0.8561643835616438,
881
+ "grad_norm": 0.7463619271167676,
882
+ "learning_rate": 6.120095589379299e-06,
883
+ "loss": 0.0999,
884
+ "step": 125
885
+ },
886
+ {
887
+ "epoch": 0.863013698630137,
888
+ "grad_norm": 0.723089306950006,
889
+ "learning_rate": 6.067604577198981e-06,
890
+ "loss": 0.1026,
891
+ "step": 126
892
+ },
893
+ {
894
+ "epoch": 0.8698630136986302,
895
+ "grad_norm": 0.8128427236232567,
896
+ "learning_rate": 6.014989987269617e-06,
897
+ "loss": 0.1177,
898
+ "step": 127
899
+ },
900
+ {
901
+ "epoch": 0.8767123287671232,
902
+ "grad_norm": 0.6557388016832486,
903
+ "learning_rate": 5.96225790985415e-06,
904
+ "loss": 0.0841,
905
+ "step": 128
906
+ },
907
+ {
908
+ "epoch": 0.8835616438356164,
909
+ "grad_norm": 0.5731463675088502,
910
+ "learning_rate": 5.909414448814971e-06,
911
+ "loss": 0.0668,
912
+ "step": 129
913
+ },
914
+ {
915
+ "epoch": 0.8904109589041096,
916
+ "grad_norm": 0.827794610936805,
917
+ "learning_rate": 5.856465720907388e-06,
918
+ "loss": 0.1175,
919
+ "step": 130
920
+ },
921
+ {
922
+ "epoch": 0.8972602739726028,
923
+ "grad_norm": 0.7900302418716827,
924
+ "learning_rate": 5.803417855071603e-06,
925
+ "loss": 0.1083,
926
+ "step": 131
927
+ },
928
+ {
929
+ "epoch": 0.9041095890410958,
930
+ "grad_norm": 0.6967481596548395,
931
+ "learning_rate": 5.7502769917232635e-06,
932
+ "loss": 0.1048,
933
+ "step": 132
934
+ },
935
+ {
936
+ "epoch": 0.910958904109589,
937
+ "grad_norm": 0.7431142462170286,
938
+ "learning_rate": 5.6970492820426994e-06,
939
+ "loss": 0.0984,
940
+ "step": 133
941
+ },
942
+ {
943
+ "epoch": 0.9178082191780822,
944
+ "grad_norm": 0.706859004886188,
945
+ "learning_rate": 5.643740887262905e-06,
946
+ "loss": 0.0909,
947
+ "step": 134
948
+ },
949
+ {
950
+ "epoch": 0.9246575342465754,
951
+ "grad_norm": 0.7439038100978435,
952
+ "learning_rate": 5.59035797795637e-06,
953
+ "loss": 0.1065,
954
+ "step": 135
955
+ },
956
+ {
957
+ "epoch": 0.9315068493150684,
958
+ "grad_norm": 0.7336777940517607,
959
+ "learning_rate": 5.536906733320816e-06,
960
+ "loss": 0.0923,
961
+ "step": 136
962
+ },
963
+ {
964
+ "epoch": 0.9383561643835616,
965
+ "grad_norm": 0.6486216523289474,
966
+ "learning_rate": 5.483393340463938e-06,
967
+ "loss": 0.0903,
968
+ "step": 137
969
+ },
970
+ {
971
+ "epoch": 0.9452054794520548,
972
+ "grad_norm": 0.738475458968018,
973
+ "learning_rate": 5.429823993687234e-06,
974
+ "loss": 0.1001,
975
+ "step": 138
976
+ },
977
+ {
978
+ "epoch": 0.952054794520548,
979
+ "grad_norm": 0.6195239351601184,
980
+ "learning_rate": 5.376204893769e-06,
981
+ "loss": 0.0828,
982
+ "step": 139
983
+ },
984
+ {
985
+ "epoch": 0.958904109589041,
986
+ "grad_norm": 0.6597631011909114,
987
+ "learning_rate": 5.322542247246583e-06,
988
+ "loss": 0.0895,
989
+ "step": 140
990
+ },
991
+ {
992
+ "epoch": 0.9657534246575342,
993
+ "grad_norm": 0.6247250694020478,
994
+ "learning_rate": 5.26884226569794e-06,
995
+ "loss": 0.0877,
996
+ "step": 141
997
+ },
998
+ {
999
+ "epoch": 0.9726027397260274,
1000
+ "grad_norm": 0.6907307523255068,
1001
+ "learning_rate": 5.215111165022653e-06,
1002
+ "loss": 0.0935,
1003
+ "step": 142
1004
+ },
1005
+ {
1006
+ "epoch": 0.9794520547945206,
1007
+ "grad_norm": 0.7157610963876196,
1008
+ "learning_rate": 5.161355164722416e-06,
1009
+ "loss": 0.105,
1010
+ "step": 143
1011
+ },
1012
+ {
1013
+ "epoch": 0.9863013698630136,
1014
+ "grad_norm": 0.6481915828513748,
1015
+ "learning_rate": 5.107580487181112e-06,
1016
+ "loss": 0.0811,
1017
+ "step": 144
1018
+ },
1019
+ {
1020
+ "epoch": 0.9931506849315068,
1021
+ "grad_norm": 0.5761545491887491,
1022
+ "learning_rate": 5.0537933569445585e-06,
1023
+ "loss": 0.0776,
1024
+ "step": 145
1025
+ },
1026
+ {
1027
+ "epoch": 1.0,
1028
+ "grad_norm": 0.6969449252695846,
1029
+ "learning_rate": 5e-06,
1030
+ "loss": 0.0813,
1031
+ "step": 146
1032
+ },
1033
+ {
1034
+ "epoch": 1.0068493150684932,
1035
+ "grad_norm": 0.40022211319597883,
1036
+ "learning_rate": 4.946206643055443e-06,
1037
+ "loss": 0.035,
1038
+ "step": 147
1039
+ },
1040
+ {
1041
+ "epoch": 1.0136986301369864,
1042
+ "grad_norm": 0.36996841036837175,
1043
+ "learning_rate": 4.89241951281889e-06,
1044
+ "loss": 0.0242,
1045
+ "step": 148
1046
+ },
1047
+ {
1048
+ "epoch": 1.0205479452054795,
1049
+ "grad_norm": 0.5889368440525016,
1050
+ "learning_rate": 4.838644835277585e-06,
1051
+ "loss": 0.0393,
1052
+ "step": 149
1053
+ },
1054
+ {
1055
+ "epoch": 1.0273972602739727,
1056
+ "grad_norm": 0.3820739637144992,
1057
+ "learning_rate": 4.784888834977347e-06,
1058
+ "loss": 0.0299,
1059
+ "step": 150
1060
+ },
1061
+ {
1062
+ "epoch": 1.0342465753424657,
1063
+ "grad_norm": 0.5379931997949438,
1064
+ "learning_rate": 4.731157734302063e-06,
1065
+ "loss": 0.0408,
1066
+ "step": 151
1067
+ },
1068
+ {
1069
+ "epoch": 1.0410958904109588,
1070
+ "grad_norm": 0.3693305843552891,
1071
+ "learning_rate": 4.6774577527534195e-06,
1072
+ "loss": 0.0294,
1073
+ "step": 152
1074
+ },
1075
+ {
1076
+ "epoch": 1.047945205479452,
1077
+ "grad_norm": 0.4888264501648255,
1078
+ "learning_rate": 4.623795106231001e-06,
1079
+ "loss": 0.0402,
1080
+ "step": 153
1081
+ },
1082
+ {
1083
+ "epoch": 1.0547945205479452,
1084
+ "grad_norm": 0.4134341070423952,
1085
+ "learning_rate": 4.570176006312769e-06,
1086
+ "loss": 0.0306,
1087
+ "step": 154
1088
+ },
1089
+ {
1090
+ "epoch": 1.0616438356164384,
1091
+ "grad_norm": 0.4186096948787815,
1092
+ "learning_rate": 4.516606659536063e-06,
1093
+ "loss": 0.0325,
1094
+ "step": 155
1095
+ },
1096
+ {
1097
+ "epoch": 1.0684931506849316,
1098
+ "grad_norm": 0.5562678365427425,
1099
+ "learning_rate": 4.463093266679185e-06,
1100
+ "loss": 0.041,
1101
+ "step": 156
1102
+ },
1103
+ {
1104
+ "epoch": 1.0753424657534247,
1105
+ "grad_norm": 0.41023005785548794,
1106
+ "learning_rate": 4.40964202204363e-06,
1107
+ "loss": 0.0278,
1108
+ "step": 157
1109
+ },
1110
+ {
1111
+ "epoch": 1.0821917808219177,
1112
+ "grad_norm": 0.4409264685189105,
1113
+ "learning_rate": 4.356259112737096e-06,
1114
+ "loss": 0.032,
1115
+ "step": 158
1116
+ },
1117
+ {
1118
+ "epoch": 1.0890410958904109,
1119
+ "grad_norm": 0.41264943175896446,
1120
+ "learning_rate": 4.302950717957304e-06,
1121
+ "loss": 0.0281,
1122
+ "step": 159
1123
+ },
1124
+ {
1125
+ "epoch": 1.095890410958904,
1126
+ "grad_norm": 0.42074167821091196,
1127
+ "learning_rate": 4.249723008276737e-06,
1128
+ "loss": 0.0263,
1129
+ "step": 160
1130
+ },
1131
+ {
1132
+ "epoch": 1.1027397260273972,
1133
+ "grad_norm": 0.6080366405585456,
1134
+ "learning_rate": 4.196582144928398e-06,
1135
+ "loss": 0.0326,
1136
+ "step": 161
1137
+ },
1138
+ {
1139
+ "epoch": 1.1095890410958904,
1140
+ "grad_norm": 0.4999009139678011,
1141
+ "learning_rate": 4.143534279092613e-06,
1142
+ "loss": 0.0292,
1143
+ "step": 162
1144
+ },
1145
+ {
1146
+ "epoch": 1.1164383561643836,
1147
+ "grad_norm": 0.6964542936679394,
1148
+ "learning_rate": 4.090585551185031e-06,
1149
+ "loss": 0.0408,
1150
+ "step": 163
1151
+ },
1152
+ {
1153
+ "epoch": 1.1232876712328768,
1154
+ "grad_norm": 0.48487172127635747,
1155
+ "learning_rate": 4.037742090145851e-06,
1156
+ "loss": 0.0261,
1157
+ "step": 164
1158
+ },
1159
+ {
1160
+ "epoch": 1.13013698630137,
1161
+ "grad_norm": 0.410607594047809,
1162
+ "learning_rate": 3.985010012730382e-06,
1163
+ "loss": 0.023,
1164
+ "step": 165
1165
+ },
1166
+ {
1167
+ "epoch": 1.1369863013698631,
1168
+ "grad_norm": 0.488422520515759,
1169
+ "learning_rate": 3.93239542280102e-06,
1170
+ "loss": 0.0289,
1171
+ "step": 166
1172
+ },
1173
+ {
1174
+ "epoch": 1.143835616438356,
1175
+ "grad_norm": 0.47037371049545285,
1176
+ "learning_rate": 3.879904410620703e-06,
1177
+ "loss": 0.0254,
1178
+ "step": 167
1179
+ },
1180
+ {
1181
+ "epoch": 1.1506849315068493,
1182
+ "grad_norm": 0.6996438632030281,
1183
+ "learning_rate": 3.827543052147952e-06,
1184
+ "loss": 0.0401,
1185
+ "step": 168
1186
+ },
1187
+ {
1188
+ "epoch": 1.1575342465753424,
1189
+ "grad_norm": 0.6482935966804099,
1190
+ "learning_rate": 3.775317408333571e-06,
1191
+ "loss": 0.041,
1192
+ "step": 169
1193
+ },
1194
+ {
1195
+ "epoch": 1.1643835616438356,
1196
+ "grad_norm": 0.5348791028923409,
1197
+ "learning_rate": 3.7232335244190656e-06,
1198
+ "loss": 0.0304,
1199
+ "step": 170
1200
+ },
1201
+ {
1202
+ "epoch": 1.1712328767123288,
1203
+ "grad_norm": 0.6824645649985049,
1204
+ "learning_rate": 3.6712974292369035e-06,
1205
+ "loss": 0.026,
1206
+ "step": 171
1207
+ },
1208
+ {
1209
+ "epoch": 1.178082191780822,
1210
+ "grad_norm": 0.5222355032203745,
1211
+ "learning_rate": 3.6195151345126556e-06,
1212
+ "loss": 0.0248,
1213
+ "step": 172
1214
+ },
1215
+ {
1216
+ "epoch": 1.1849315068493151,
1217
+ "grad_norm": 0.6436945077378039,
1218
+ "learning_rate": 3.5678926341691283e-06,
1219
+ "loss": 0.0327,
1220
+ "step": 173
1221
+ },
1222
+ {
1223
+ "epoch": 1.191780821917808,
1224
+ "grad_norm": 0.4815469429272036,
1225
+ "learning_rate": 3.5164359036325483e-06,
1226
+ "loss": 0.0255,
1227
+ "step": 174
1228
+ },
1229
+ {
1230
+ "epoch": 1.1986301369863013,
1231
+ "grad_norm": 0.6105258583091369,
1232
+ "learning_rate": 3.4651508991409016e-06,
1233
+ "loss": 0.0273,
1234
+ "step": 175
1235
+ },
1236
+ {
1237
+ "epoch": 1.2054794520547945,
1238
+ "grad_norm": 0.7436330215160468,
1239
+ "learning_rate": 3.4140435570544708e-06,
1240
+ "loss": 0.0334,
1241
+ "step": 176
1242
+ },
1243
+ {
1244
+ "epoch": 1.2123287671232876,
1245
+ "grad_norm": 0.5039349276354249,
1246
+ "learning_rate": 3.363119793168704e-06,
1247
+ "loss": 0.0258,
1248
+ "step": 177
1249
+ },
1250
+ {
1251
+ "epoch": 1.2191780821917808,
1252
+ "grad_norm": 0.7800878927795044,
1253
+ "learning_rate": 3.3123855020294344e-06,
1254
+ "loss": 0.0336,
1255
+ "step": 178
1256
+ },
1257
+ {
1258
+ "epoch": 1.226027397260274,
1259
+ "grad_norm": 0.5432005292406065,
1260
+ "learning_rate": 3.26184655625058e-06,
1261
+ "loss": 0.0241,
1262
+ "step": 179
1263
+ },
1264
+ {
1265
+ "epoch": 1.2328767123287672,
1266
+ "grad_norm": 0.4788982798162723,
1267
+ "learning_rate": 3.2115088058343725e-06,
1268
+ "loss": 0.0198,
1269
+ "step": 180
1270
+ },
1271
+ {
1272
+ "epoch": 1.2397260273972603,
1273
+ "grad_norm": 0.5332736570342096,
1274
+ "learning_rate": 3.161378077494205e-06,
1275
+ "loss": 0.0203,
1276
+ "step": 181
1277
+ },
1278
+ {
1279
+ "epoch": 1.2465753424657535,
1280
+ "grad_norm": 0.6018244302290219,
1281
+ "learning_rate": 3.111460173980175e-06,
1282
+ "loss": 0.0247,
1283
+ "step": 182
1284
+ },
1285
+ {
1286
+ "epoch": 1.2534246575342465,
1287
+ "grad_norm": 0.4714750435429761,
1288
+ "learning_rate": 3.06176087340741e-06,
1289
+ "loss": 0.0212,
1290
+ "step": 183
1291
+ },
1292
+ {
1293
+ "epoch": 1.2602739726027397,
1294
+ "grad_norm": 0.5173165747302255,
1295
+ "learning_rate": 3.0122859285872214e-06,
1296
+ "loss": 0.028,
1297
+ "step": 184
1298
+ },
1299
+ {
1300
+ "epoch": 1.2671232876712328,
1301
+ "grad_norm": 0.5340467637611864,
1302
+ "learning_rate": 2.9630410663612226e-06,
1303
+ "loss": 0.0272,
1304
+ "step": 185
1305
+ },
1306
+ {
1307
+ "epoch": 1.273972602739726,
1308
+ "grad_norm": 0.6775945861048723,
1309
+ "learning_rate": 2.914031986938417e-06,
1310
+ "loss": 0.0301,
1311
+ "step": 186
1312
+ },
1313
+ {
1314
+ "epoch": 1.2808219178082192,
1315
+ "grad_norm": 0.5833141967005896,
1316
+ "learning_rate": 2.865264363235396e-06,
1317
+ "loss": 0.028,
1318
+ "step": 187
1319
+ },
1320
+ {
1321
+ "epoch": 1.2876712328767124,
1322
+ "grad_norm": 0.655533441118794,
1323
+ "learning_rate": 2.816743840219681e-06,
1324
+ "loss": 0.0287,
1325
+ "step": 188
1326
+ },
1327
+ {
1328
+ "epoch": 1.2945205479452055,
1329
+ "grad_norm": 0.5511262540225013,
1330
+ "learning_rate": 2.7684760342563045e-06,
1331
+ "loss": 0.0266,
1332
+ "step": 189
1333
+ },
1334
+ {
1335
+ "epoch": 1.3013698630136985,
1336
+ "grad_norm": 0.5252392637950674,
1337
+ "learning_rate": 2.720466532457707e-06,
1338
+ "loss": 0.0221,
1339
+ "step": 190
1340
+ },
1341
+ {
1342
+ "epoch": 1.308219178082192,
1343
+ "grad_norm": 0.5688735522785462,
1344
+ "learning_rate": 2.6727208920370063e-06,
1345
+ "loss": 0.0284,
1346
+ "step": 191
1347
+ },
1348
+ {
1349
+ "epoch": 1.3150684931506849,
1350
+ "grad_norm": 0.6107186649801015,
1351
+ "learning_rate": 2.6252446396647503e-06,
1352
+ "loss": 0.026,
1353
+ "step": 192
1354
+ },
1355
+ {
1356
+ "epoch": 1.321917808219178,
1357
+ "grad_norm": 0.5508581227820072,
1358
+ "learning_rate": 2.578043270829178e-06,
1359
+ "loss": 0.0246,
1360
+ "step": 193
1361
+ },
1362
+ {
1363
+ "epoch": 1.3287671232876712,
1364
+ "grad_norm": 0.6823030158545308,
1365
+ "learning_rate": 2.531122249200114e-06,
1366
+ "loss": 0.03,
1367
+ "step": 194
1368
+ },
1369
+ {
1370
+ "epoch": 1.3356164383561644,
1371
+ "grad_norm": 0.5426691220715485,
1372
+ "learning_rate": 2.4844870059965337e-06,
1373
+ "loss": 0.0262,
1374
+ "step": 195
1375
+ },
1376
+ {
1377
+ "epoch": 1.3424657534246576,
1378
+ "grad_norm": 0.6755955651401638,
1379
+ "learning_rate": 2.438142939357882e-06,
1380
+ "loss": 0.0307,
1381
+ "step": 196
1382
+ },
1383
+ {
1384
+ "epoch": 1.3493150684931507,
1385
+ "grad_norm": 0.6100256813331606,
1386
+ "learning_rate": 2.392095413719231e-06,
1387
+ "loss": 0.0316,
1388
+ "step": 197
1389
+ },
1390
+ {
1391
+ "epoch": 1.356164383561644,
1392
+ "grad_norm": 0.6463879601128194,
1393
+ "learning_rate": 2.346349759190332e-06,
1394
+ "loss": 0.0326,
1395
+ "step": 198
1396
+ },
1397
+ {
1398
+ "epoch": 1.3630136986301369,
1399
+ "grad_norm": 0.5778357649758314,
1400
+ "learning_rate": 2.3009112709386454e-06,
1401
+ "loss": 0.0287,
1402
+ "step": 199
1403
+ },
1404
+ {
1405
+ "epoch": 1.36986301369863,
1406
+ "grad_norm": 0.5411990616960265,
1407
+ "learning_rate": 2.2557852085764053e-06,
1408
+ "loss": 0.0233,
1409
+ "step": 200
1410
+ },
1411
+ {
1412
+ "epoch": 1.36986301369863,
1413
+ "eval_loss": 0.12782636284828186,
1414
+ "eval_runtime": 3.606,
1415
+ "eval_samples_per_second": 6.656,
1416
+ "eval_steps_per_second": 1.664,
1417
+ "step": 200
1418
+ },
1419
+ {
1420
+ "epoch": 1.3767123287671232,
1421
+ "grad_norm": 0.4606747783778257,
1422
+ "learning_rate": 2.2109767955518135e-06,
1423
+ "loss": 0.0205,
1424
+ "step": 201
1425
+ },
1426
+ {
1427
+ "epoch": 1.3835616438356164,
1428
+ "grad_norm": 0.5644346220855605,
1429
+ "learning_rate": 2.1664912185444127e-06,
1430
+ "loss": 0.0234,
1431
+ "step": 202
1432
+ },
1433
+ {
1434
+ "epoch": 1.3904109589041096,
1435
+ "grad_norm": 0.48298285290916276,
1436
+ "learning_rate": 2.1223336268647154e-06,
1437
+ "loss": 0.0249,
1438
+ "step": 203
1439
+ },
1440
+ {
1441
+ "epoch": 1.3972602739726028,
1442
+ "grad_norm": 0.5415449063607428,
1443
+ "learning_rate": 2.0785091318581577e-06,
1444
+ "loss": 0.0224,
1445
+ "step": 204
1446
+ },
1447
+ {
1448
+ "epoch": 1.404109589041096,
1449
+ "grad_norm": 0.6133699712229087,
1450
+ "learning_rate": 2.035022806313449e-06,
1451
+ "loss": 0.0302,
1452
+ "step": 205
1453
+ },
1454
+ {
1455
+ "epoch": 1.410958904109589,
1456
+ "grad_norm": 0.6514447134449943,
1457
+ "learning_rate": 1.991879683875386e-06,
1458
+ "loss": 0.0273,
1459
+ "step": 206
1460
+ },
1461
+ {
1462
+ "epoch": 1.4178082191780823,
1463
+ "grad_norm": 0.5197921872912981,
1464
+ "learning_rate": 1.9490847584621993e-06,
1465
+ "loss": 0.022,
1466
+ "step": 207
1467
+ },
1468
+ {
1469
+ "epoch": 1.4246575342465753,
1470
+ "grad_norm": 0.48059243417518727,
1471
+ "learning_rate": 1.9066429836874844e-06,
1472
+ "loss": 0.0216,
1473
+ "step": 208
1474
+ },
1475
+ {
1476
+ "epoch": 1.4315068493150684,
1477
+ "grad_norm": 0.6832678607875455,
1478
+ "learning_rate": 1.8645592722868223e-06,
1479
+ "loss": 0.032,
1480
+ "step": 209
1481
+ },
1482
+ {
1483
+ "epoch": 1.4383561643835616,
1484
+ "grad_norm": 0.573234705352323,
1485
+ "learning_rate": 1.8228384955491136e-06,
1486
+ "loss": 0.0301,
1487
+ "step": 210
1488
+ },
1489
+ {
1490
+ "epoch": 1.4452054794520548,
1491
+ "grad_norm": 0.5953906384483378,
1492
+ "learning_rate": 1.7814854827527144e-06,
1493
+ "loss": 0.0236,
1494
+ "step": 211
1495
+ },
1496
+ {
1497
+ "epoch": 1.452054794520548,
1498
+ "grad_norm": 0.6950223099104754,
1499
+ "learning_rate": 1.7405050206064372e-06,
1500
+ "loss": 0.0309,
1501
+ "step": 212
1502
+ },
1503
+ {
1504
+ "epoch": 1.4589041095890412,
1505
+ "grad_norm": 0.5081896383994274,
1506
+ "learning_rate": 1.6999018526954775e-06,
1507
+ "loss": 0.0187,
1508
+ "step": 213
1509
+ },
1510
+ {
1511
+ "epoch": 1.4657534246575343,
1512
+ "grad_norm": 0.5249858681016251,
1513
+ "learning_rate": 1.6596806789323317e-06,
1514
+ "loss": 0.0241,
1515
+ "step": 214
1516
+ },
1517
+ {
1518
+ "epoch": 1.4726027397260273,
1519
+ "grad_norm": 0.5572741587536835,
1520
+ "learning_rate": 1.6198461550127758e-06,
1521
+ "loss": 0.0239,
1522
+ "step": 215
1523
+ },
1524
+ {
1525
+ "epoch": 1.4794520547945205,
1526
+ "grad_norm": 0.529648002049523,
1527
+ "learning_rate": 1.5804028918769488e-06,
1528
+ "loss": 0.0231,
1529
+ "step": 216
1530
+ },
1531
+ {
1532
+ "epoch": 1.4863013698630136,
1533
+ "grad_norm": 0.5699037866710787,
1534
+ "learning_rate": 1.5413554551756321e-06,
1535
+ "loss": 0.0282,
1536
+ "step": 217
1537
+ },
1538
+ {
1539
+ "epoch": 1.4931506849315068,
1540
+ "grad_norm": 0.540327055337863,
1541
+ "learning_rate": 1.5027083647417657e-06,
1542
+ "loss": 0.0292,
1543
+ "step": 218
1544
+ },
1545
+ {
1546
+ "epoch": 1.5,
1547
+ "grad_norm": 0.5581385603660535,
1548
+ "learning_rate": 1.4644660940672628e-06,
1549
+ "loss": 0.0277,
1550
+ "step": 219
1551
+ },
1552
+ {
1553
+ "epoch": 1.5068493150684932,
1554
+ "grad_norm": 0.544646773379327,
1555
+ "learning_rate": 1.4266330697851955e-06,
1556
+ "loss": 0.0232,
1557
+ "step": 220
1558
+ },
1559
+ {
1560
+ "epoch": 1.5136986301369864,
1561
+ "grad_norm": 0.4803602047141029,
1562
+ "learning_rate": 1.3892136711573983e-06,
1563
+ "loss": 0.0187,
1564
+ "step": 221
1565
+ },
1566
+ {
1567
+ "epoch": 1.5205479452054793,
1568
+ "grad_norm": 0.5153130049065987,
1569
+ "learning_rate": 1.3522122295675616e-06,
1570
+ "loss": 0.0228,
1571
+ "step": 222
1572
+ },
1573
+ {
1574
+ "epoch": 1.5273972602739727,
1575
+ "grad_norm": 0.6096781687815058,
1576
+ "learning_rate": 1.3156330280198637e-06,
1577
+ "loss": 0.0344,
1578
+ "step": 223
1579
+ },
1580
+ {
1581
+ "epoch": 1.5342465753424657,
1582
+ "grad_norm": 0.6798958042444144,
1583
+ "learning_rate": 1.2794803006431984e-06,
1584
+ "loss": 0.0258,
1585
+ "step": 224
1586
+ },
1587
+ {
1588
+ "epoch": 1.541095890410959,
1589
+ "grad_norm": 0.574437025110474,
1590
+ "learning_rate": 1.2437582322010672e-06,
1591
+ "loss": 0.0229,
1592
+ "step": 225
1593
+ },
1594
+ {
1595
+ "epoch": 1.547945205479452,
1596
+ "grad_norm": 0.581621714813575,
1597
+ "learning_rate": 1.2084709576071885e-06,
1598
+ "loss": 0.0306,
1599
+ "step": 226
1600
+ },
1601
+ {
1602
+ "epoch": 1.5547945205479452,
1603
+ "grad_norm": 0.48322576235961967,
1604
+ "learning_rate": 1.1736225614468627e-06,
1605
+ "loss": 0.0195,
1606
+ "step": 227
1607
+ },
1608
+ {
1609
+ "epoch": 1.5616438356164384,
1610
+ "grad_norm": 0.6228610203390602,
1611
+ "learning_rate": 1.1392170775041788e-06,
1612
+ "loss": 0.0356,
1613
+ "step": 228
1614
+ },
1615
+ {
1616
+ "epoch": 1.5684931506849316,
1617
+ "grad_norm": 0.5106649557751511,
1618
+ "learning_rate": 1.1052584882950896e-06,
1619
+ "loss": 0.0265,
1620
+ "step": 229
1621
+ },
1622
+ {
1623
+ "epoch": 1.5753424657534247,
1624
+ "grad_norm": 0.5699134903899079,
1625
+ "learning_rate": 1.0717507246064273e-06,
1626
+ "loss": 0.0264,
1627
+ "step": 230
1628
+ },
1629
+ {
1630
+ "epoch": 1.5821917808219177,
1631
+ "grad_norm": 0.5082138048285709,
1632
+ "learning_rate": 1.0386976650409102e-06,
1633
+ "loss": 0.0216,
1634
+ "step": 231
1635
+ },
1636
+ {
1637
+ "epoch": 1.589041095890411,
1638
+ "grad_norm": 0.5660787920681069,
1639
+ "learning_rate": 1.0061031355681766e-06,
1640
+ "loss": 0.0243,
1641
+ "step": 232
1642
+ },
1643
+ {
1644
+ "epoch": 1.595890410958904,
1645
+ "grad_norm": 0.564593525018267,
1646
+ "learning_rate": 9.739709090819254e-07,
1647
+ "loss": 0.0267,
1648
+ "step": 233
1649
+ },
1650
+ {
1651
+ "epoch": 1.6027397260273972,
1652
+ "grad_norm": 0.4505995387354749,
1653
+ "learning_rate": 9.423047049631956e-07,
1654
+ "loss": 0.0216,
1655
+ "step": 234
1656
+ },
1657
+ {
1658
+ "epoch": 1.6095890410958904,
1659
+ "grad_norm": 0.49897796704328956,
1660
+ "learning_rate": 9.111081886498374e-07,
1661
+ "loss": 0.0224,
1662
+ "step": 235
1663
+ },
1664
+ {
1665
+ "epoch": 1.6164383561643836,
1666
+ "grad_norm": 0.48246709394966497,
1667
+ "learning_rate": 8.803849712122292e-07,
1668
+ "loss": 0.0189,
1669
+ "step": 236
1670
+ },
1671
+ {
1672
+ "epoch": 1.6232876712328768,
1673
+ "grad_norm": 0.5048987533509625,
1674
+ "learning_rate": 8.501386089352858e-07,
1675
+ "loss": 0.025,
1676
+ "step": 237
1677
+ },
1678
+ {
1679
+ "epoch": 1.6301369863013697,
1680
+ "grad_norm": 0.5303344493323716,
1681
+ "learning_rate": 8.203726029068149e-07,
1682
+ "loss": 0.0276,
1683
+ "step": 238
1684
+ },
1685
+ {
1686
+ "epoch": 1.6369863013698631,
1687
+ "grad_norm": 0.45632405891796,
1688
+ "learning_rate": 7.910903986122537e-07,
1689
+ "loss": 0.0219,
1690
+ "step": 239
1691
+ },
1692
+ {
1693
+ "epoch": 1.643835616438356,
1694
+ "grad_norm": 0.5807701211688502,
1695
+ "learning_rate": 7.622953855358456e-07,
1696
+ "loss": 0.0246,
1697
+ "step": 240
1698
+ },
1699
+ {
1700
+ "epoch": 1.6506849315068495,
1701
+ "grad_norm": 0.5598140370861285,
1702
+ "learning_rate": 7.339908967683007e-07,
1703
+ "loss": 0.0266,
1704
+ "step": 241
1705
+ },
1706
+ {
1707
+ "epoch": 1.6575342465753424,
1708
+ "grad_norm": 0.533124504980895,
1709
+ "learning_rate": 7.061802086209857e-07,
1710
+ "loss": 0.0231,
1711
+ "step": 242
1712
+ },
1713
+ {
1714
+ "epoch": 1.6643835616438356,
1715
+ "grad_norm": 0.487914295898038,
1716
+ "learning_rate": 6.788665402466782e-07,
1717
+ "loss": 0.0234,
1718
+ "step": 243
1719
+ },
1720
+ {
1721
+ "epoch": 1.6712328767123288,
1722
+ "grad_norm": 0.5310859286103036,
1723
+ "learning_rate": 6.52053053266945e-07,
1724
+ "loss": 0.0228,
1725
+ "step": 244
1726
+ },
1727
+ {
1728
+ "epoch": 1.678082191780822,
1729
+ "grad_norm": 0.3943206473013647,
1730
+ "learning_rate": 6.257428514061764e-07,
1731
+ "loss": 0.0157,
1732
+ "step": 245
1733
+ },
1734
+ {
1735
+ "epoch": 1.6849315068493151,
1736
+ "grad_norm": 0.5082623094622303,
1737
+ "learning_rate": 5.999389801323219e-07,
1738
+ "loss": 0.0223,
1739
+ "step": 246
1740
+ },
1741
+ {
1742
+ "epoch": 1.691780821917808,
1743
+ "grad_norm": 0.5520449974096024,
1744
+ "learning_rate": 5.746444263043715e-07,
1745
+ "loss": 0.0265,
1746
+ "step": 247
1747
+ },
1748
+ {
1749
+ "epoch": 1.6986301369863015,
1750
+ "grad_norm": 0.4739165044710361,
1751
+ "learning_rate": 5.498621178266167e-07,
1752
+ "loss": 0.0213,
1753
+ "step": 248
1754
+ },
1755
+ {
1756
+ "epoch": 1.7054794520547945,
1757
+ "grad_norm": 0.543019095966297,
1758
+ "learning_rate": 5.255949233097451e-07,
1759
+ "loss": 0.0238,
1760
+ "step": 249
1761
+ },
1762
+ {
1763
+ "epoch": 1.7123287671232876,
1764
+ "grad_norm": 0.5610967838400929,
1765
+ "learning_rate": 5.018456517387837e-07,
1766
+ "loss": 0.0254,
1767
+ "step": 250
1768
+ },
1769
+ {
1770
+ "epoch": 1.7191780821917808,
1771
+ "grad_norm": 0.6020409159639404,
1772
+ "learning_rate": 4.786170521479588e-07,
1773
+ "loss": 0.0269,
1774
+ "step": 251
1775
+ },
1776
+ {
1777
+ "epoch": 1.726027397260274,
1778
+ "grad_norm": 0.6720413988173921,
1779
+ "learning_rate": 4.5591181330248534e-07,
1780
+ "loss": 0.0337,
1781
+ "step": 252
1782
+ },
1783
+ {
1784
+ "epoch": 1.7328767123287672,
1785
+ "grad_norm": 0.4658420917547858,
1786
+ "learning_rate": 4.3373256338733847e-07,
1787
+ "loss": 0.0184,
1788
+ "step": 253
1789
+ },
1790
+ {
1791
+ "epoch": 1.7397260273972601,
1792
+ "grad_norm": 0.6139558415380756,
1793
+ "learning_rate": 4.1208186970303097e-07,
1794
+ "loss": 0.0274,
1795
+ "step": 254
1796
+ },
1797
+ {
1798
+ "epoch": 1.7465753424657535,
1799
+ "grad_norm": 0.6603667053997885,
1800
+ "learning_rate": 3.90962238368448e-07,
1801
+ "loss": 0.0281,
1802
+ "step": 255
1803
+ },
1804
+ {
1805
+ "epoch": 1.7534246575342465,
1806
+ "grad_norm": 0.533881147566802,
1807
+ "learning_rate": 3.70376114030751e-07,
1808
+ "loss": 0.02,
1809
+ "step": 256
1810
+ },
1811
+ {
1812
+ "epoch": 1.7602739726027399,
1813
+ "grad_norm": 0.5016506115967129,
1814
+ "learning_rate": 3.503258795824105e-07,
1815
+ "loss": 0.0207,
1816
+ "step": 257
1817
+ },
1818
+ {
1819
+ "epoch": 1.7671232876712328,
1820
+ "grad_norm": 0.5933464109254851,
1821
+ "learning_rate": 3.308138558853746e-07,
1822
+ "loss": 0.0249,
1823
+ "step": 258
1824
+ },
1825
+ {
1826
+ "epoch": 1.773972602739726,
1827
+ "grad_norm": 0.5429151978483335,
1828
+ "learning_rate": 3.1184230150243025e-07,
1829
+ "loss": 0.0221,
1830
+ "step": 259
1831
+ },
1832
+ {
1833
+ "epoch": 1.7808219178082192,
1834
+ "grad_norm": 0.5263783415786707,
1835
+ "learning_rate": 2.934134124357646e-07,
1836
+ "loss": 0.0231,
1837
+ "step": 260
1838
+ },
1839
+ {
1840
+ "epoch": 1.7876712328767124,
1841
+ "grad_norm": 0.547290301145945,
1842
+ "learning_rate": 2.755293218727739e-07,
1843
+ "loss": 0.0247,
1844
+ "step": 261
1845
+ },
1846
+ {
1847
+ "epoch": 1.7945205479452055,
1848
+ "grad_norm": 0.5879041613883544,
1849
+ "learning_rate": 2.5819209993914185e-07,
1850
+ "loss": 0.0254,
1851
+ "step": 262
1852
+ },
1853
+ {
1854
+ "epoch": 1.8013698630136985,
1855
+ "grad_norm": 0.4761793429556001,
1856
+ "learning_rate": 2.4140375345921895e-07,
1857
+ "loss": 0.0182,
1858
+ "step": 263
1859
+ },
1860
+ {
1861
+ "epoch": 1.808219178082192,
1862
+ "grad_norm": 0.4464490137111368,
1863
+ "learning_rate": 2.2516622572372416e-07,
1864
+ "loss": 0.0213,
1865
+ "step": 264
1866
+ },
1867
+ {
1868
+ "epoch": 1.8150684931506849,
1869
+ "grad_norm": 0.5484527193426002,
1870
+ "learning_rate": 2.094813962648101e-07,
1871
+ "loss": 0.0286,
1872
+ "step": 265
1873
+ },
1874
+ {
1875
+ "epoch": 1.821917808219178,
1876
+ "grad_norm": 0.4957780798831515,
1877
+ "learning_rate": 1.9435108063849684e-07,
1878
+ "loss": 0.0199,
1879
+ "step": 266
1880
+ },
1881
+ {
1882
+ "epoch": 1.8287671232876712,
1883
+ "grad_norm": 0.5267703907525366,
1884
+ "learning_rate": 1.7977703021452185e-07,
1885
+ "loss": 0.0282,
1886
+ "step": 267
1887
+ },
1888
+ {
1889
+ "epoch": 1.8356164383561644,
1890
+ "grad_norm": 0.46606674643151164,
1891
+ "learning_rate": 1.6576093197361253e-07,
1892
+ "loss": 0.0243,
1893
+ "step": 268
1894
+ },
1895
+ {
1896
+ "epoch": 1.8424657534246576,
1897
+ "grad_norm": 0.5978206977692369,
1898
+ "learning_rate": 1.523044083122138e-07,
1899
+ "loss": 0.0244,
1900
+ "step": 269
1901
+ },
1902
+ {
1903
+ "epoch": 1.8493150684931505,
1904
+ "grad_norm": 0.6813332575839723,
1905
+ "learning_rate": 1.39409016854693e-07,
1906
+ "loss": 0.028,
1907
+ "step": 270
1908
+ },
1909
+ {
1910
+ "epoch": 1.856164383561644,
1911
+ "grad_norm": 0.6027615619066997,
1912
+ "learning_rate": 1.2707625027304104e-07,
1913
+ "loss": 0.0252,
1914
+ "step": 271
1915
+ },
1916
+ {
1917
+ "epoch": 1.8630136986301369,
1918
+ "grad_norm": 0.5524351842468936,
1919
+ "learning_rate": 1.1530753611409151e-07,
1920
+ "loss": 0.0268,
1921
+ "step": 272
1922
+ },
1923
+ {
1924
+ "epoch": 1.8698630136986303,
1925
+ "grad_norm": 0.5640622999698867,
1926
+ "learning_rate": 1.041042366342787e-07,
1927
+ "loss": 0.0256,
1928
+ "step": 273
1929
+ },
1930
+ {
1931
+ "epoch": 1.8767123287671232,
1932
+ "grad_norm": 0.536202910263522,
1933
+ "learning_rate": 9.346764864195335e-08,
1934
+ "loss": 0.0256,
1935
+ "step": 274
1936
+ },
1937
+ {
1938
+ "epoch": 1.8835616438356164,
1939
+ "grad_norm": 0.5919425450976535,
1940
+ "learning_rate": 8.339900334727536e-08,
1941
+ "loss": 0.0315,
1942
+ "step": 275
1943
+ },
1944
+ {
1945
+ "epoch": 1.8904109589041096,
1946
+ "grad_norm": 0.5574691459244832,
1947
+ "learning_rate": 7.389946621969679e-08,
1948
+ "loss": 0.0225,
1949
+ "step": 276
1950
+ },
1951
+ {
1952
+ "epoch": 1.8972602739726028,
1953
+ "grad_norm": 0.5505059019532733,
1954
+ "learning_rate": 6.497013685305586e-08,
1955
+ "loss": 0.0205,
1956
+ "step": 277
1957
+ },
1958
+ {
1959
+ "epoch": 1.904109589041096,
1960
+ "grad_norm": 0.4886781912164835,
1961
+ "learning_rate": 5.661204883829763e-08,
1962
+ "loss": 0.0219,
1963
+ "step": 278
1964
+ },
1965
+ {
1966
+ "epoch": 1.910958904109589,
1967
+ "grad_norm": 0.5635253689144598,
1968
+ "learning_rate": 4.8826169643832464e-08,
1969
+ "loss": 0.0269,
1970
+ "step": 279
1971
+ },
1972
+ {
1973
+ "epoch": 1.9178082191780823,
1974
+ "grad_norm": 0.5462586475286777,
1975
+ "learning_rate": 4.1613400503550114e-08,
1976
+ "loss": 0.0267,
1977
+ "step": 280
1978
+ },
1979
+ {
1980
+ "epoch": 1.9246575342465753,
1981
+ "grad_norm": 0.5451325842479025,
1982
+ "learning_rate": 3.4974576312497564e-08,
1983
+ "loss": 0.0272,
1984
+ "step": 281
1985
+ },
1986
+ {
1987
+ "epoch": 1.9315068493150684,
1988
+ "grad_norm": 0.42215818334666344,
1989
+ "learning_rate": 2.8910465530240793e-08,
1990
+ "loss": 0.021,
1991
+ "step": 282
1992
+ },
1993
+ {
1994
+ "epoch": 1.9383561643835616,
1995
+ "grad_norm": 0.6580299707133164,
1996
+ "learning_rate": 2.3421770091912044e-08,
1997
+ "loss": 0.0312,
1998
+ "step": 283
1999
+ },
2000
+ {
2001
+ "epoch": 1.9452054794520548,
2002
+ "grad_norm": 0.6439576519711626,
2003
+ "learning_rate": 1.850912532696092e-08,
2004
+ "loss": 0.0268,
2005
+ "step": 284
2006
+ },
2007
+ {
2008
+ "epoch": 1.952054794520548,
2009
+ "grad_norm": 0.5683181922618227,
2010
+ "learning_rate": 1.4173099885610997e-08,
2011
+ "loss": 0.0244,
2012
+ "step": 285
2013
+ },
2014
+ {
2015
+ "epoch": 1.958904109589041,
2016
+ "grad_norm": 0.5240614569580544,
2017
+ "learning_rate": 1.041419567303914e-08,
2018
+ "loss": 0.0258,
2019
+ "step": 286
2020
+ },
2021
+ {
2022
+ "epoch": 1.9657534246575343,
2023
+ "grad_norm": 0.514575631763928,
2024
+ "learning_rate": 7.2328477912769756e-09,
2025
+ "loss": 0.0228,
2026
+ "step": 287
2027
+ },
2028
+ {
2029
+ "epoch": 1.9726027397260273,
2030
+ "grad_norm": 0.5015672539098924,
2031
+ "learning_rate": 4.629424488850065e-09,
2032
+ "loss": 0.0213,
2033
+ "step": 288
2034
+ },
2035
+ {
2036
+ "epoch": 1.9794520547945207,
2037
+ "grad_norm": 0.5696234496920316,
2038
+ "learning_rate": 2.604227118148117e-09,
2039
+ "loss": 0.0232,
2040
+ "step": 289
2041
+ },
2042
+ {
2043
+ "epoch": 1.9863013698630136,
2044
+ "grad_norm": 0.5030448428911224,
2045
+ "learning_rate": 1.1574901005456662e-09,
2046
+ "loss": 0.0224,
2047
+ "step": 290
2048
+ },
2049
+ {
2050
+ "epoch": 1.9931506849315068,
2051
+ "grad_norm": 0.5733211189336573,
2052
+ "learning_rate": 2.89380899267111e-10,
2053
+ "loss": 0.0254,
2054
+ "step": 291
2055
+ },
2056
+ {
2057
+ "epoch": 2.0,
2058
+ "grad_norm": 0.4940211340324302,
2059
+ "learning_rate": 0.0,
2060
+ "loss": 0.0212,
2061
+ "step": 292
2062
+ },
2063
+ {
2064
+ "epoch": 2.0,
2065
+ "step": 292,
2066
+ "total_flos": 8378343751680.0,
2067
+ "train_loss": 0.06280981206449948,
2068
+ "train_runtime": 833.6474,
2069
+ "train_samples_per_second": 5.587,
2070
+ "train_steps_per_second": 0.35
2071
+ }
2072
+ ],
2073
+ "logging_steps": 1,
2074
+ "max_steps": 292,
2075
+ "num_input_tokens_seen": 0,
2076
+ "num_train_epochs": 2,
2077
+ "save_steps": 5000,
2078
+ "stateful_callbacks": {
2079
+ "TrainerControl": {
2080
+ "args": {
2081
+ "should_epoch_stop": false,
2082
+ "should_evaluate": false,
2083
+ "should_log": false,
2084
+ "should_save": true,
2085
+ "should_training_stop": true
2086
+ },
2087
+ "attributes": {}
2088
+ }
2089
+ },
2090
+ "total_flos": 8378343751680.0,
2091
+ "train_batch_size": 4,
2092
+ "trial_name": null,
2093
+ "trial_params": null
2094
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe65efe7fe2a5a9adcdaf00960ef52a6b87150f1eccbad0f148b8a50a8a9d964
3
+ size 7288
training_eval_loss.png ADDED
training_loss.png ADDED
vocab.json ADDED
The diff for this file is too large to render. See raw diff