jeiku commited on
Commit
53ceff1
·
verified ·
1 Parent(s): da41aa1

Delete checkpoint-224

Browse files
checkpoint-224/config.json DELETED
@@ -1,36 +0,0 @@
1
- {
2
- "_name_or_path": "jeiku/instructered4B",
3
- "architectures": [
4
- "LlamaForCausalLM"
5
- ],
6
- "attention_bias": false,
7
- "attention_dropout": 0.0,
8
- "bos_token_id": 128000,
9
- "eos_token_id": 128019,
10
- "head_dim": 128,
11
- "hidden_act": "silu",
12
- "hidden_size": 3072,
13
- "initializer_range": 0.02,
14
- "intermediate_size": 9216,
15
- "max_position_embeddings": 131072,
16
- "mlp_bias": false,
17
- "model_type": "llama",
18
- "num_attention_heads": 32,
19
- "num_hidden_layers": 32,
20
- "num_key_value_heads": 8,
21
- "pretraining_tp": 1,
22
- "rms_norm_eps": 1e-05,
23
- "rope_scaling": {
24
- "factor": 8.0,
25
- "high_freq_factor": 4.0,
26
- "low_freq_factor": 1.0,
27
- "original_max_position_embeddings": 8192,
28
- "rope_type": "llama3"
29
- },
30
- "rope_theta": 500000.0,
31
- "tie_word_embeddings": false,
32
- "torch_dtype": "bfloat16",
33
- "transformers_version": "4.45.1",
34
- "use_cache": false,
35
- "vocab_size": 128256
36
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-224/generation_config.json DELETED
@@ -1,7 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 128000,
4
- "do_sample": true,
5
- "eos_token_id": 128001,
6
- "transformers_version": "4.45.1"
7
- }
 
 
 
 
 
 
 
 
checkpoint-224/global_step224/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:7c7fcfe685606e0b04223f51bd19add9979abf2cd6354f59ea3c446aba2f3e05
3
- size 13609806979
 
 
 
 
checkpoint-224/global_step224/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:413607e220cdce0b038f2ae026dd636994698eaab908a8559da38d0707ecf122
3
- size 13609806979
 
 
 
 
checkpoint-224/global_step224/zero_pp_rank_0_mp_rank_00_model_states.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:856a6ec44ecedf5e27a5f77fe6ed05c9d95cd68ae9bcbb23c8445c4ed6ad5969
3
- size 152293
 
 
 
 
checkpoint-224/global_step224/zero_pp_rank_1_mp_rank_00_model_states.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:2ebe8e1787d3db2771380d322af7186942eacd8641e2d636c8c83cff795517b9
3
- size 152293
 
 
 
 
checkpoint-224/latest DELETED
@@ -1 +0,0 @@
1
- global_step224
 
 
checkpoint-224/model-00001-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:c384928732ff1a2a0127bd30b464e66102692847ded7c98f09c5fa0f3f51ff04
3
- size 4978354640
 
 
 
 
checkpoint-224/model-00002-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a498da3a1dc2da409a1e82f94ed4028df397166fc457d7be1b7e07d1d7d2fe1e
3
- size 4047172128
 
 
 
 
checkpoint-224/model.safetensors.index.json DELETED
@@ -1,298 +0,0 @@
1
- {
2
- "metadata": {
3
- "total_size": 9025492992
4
- },
5
- "weight_map": {
6
- "lm_head.weight": "model-00002-of-00002.safetensors",
7
- "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
8
- "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
9
- "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
10
- "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
11
- "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
12
- "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
13
- "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
14
- "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
15
- "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
16
- "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
17
- "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
18
- "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
19
- "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
20
- "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
21
- "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
22
- "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
23
- "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
24
- "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
25
- "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
26
- "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
27
- "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
28
- "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
29
- "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
30
- "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
31
- "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
32
- "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
33
- "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
34
- "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
35
- "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
36
- "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
37
- "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
38
- "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
39
- "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
40
- "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
41
- "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
42
- "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
43
- "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
44
- "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
45
- "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
46
- "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
47
- "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
48
- "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
49
- "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
50
- "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
51
- "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
52
- "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
53
- "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
54
- "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
55
- "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
56
- "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
57
- "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
58
- "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
59
- "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
60
- "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
61
- "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
62
- "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
63
- "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
64
- "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
65
- "model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
66
- "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
67
- "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
68
- "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
69
- "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
70
- "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
71
- "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
72
- "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
73
- "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
74
- "model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
75
- "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
76
- "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
77
- "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
78
- "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
79
- "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
80
- "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
81
- "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
82
- "model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
83
- "model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
84
- "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
85
- "model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
86
- "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
87
- "model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
88
- "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
89
- "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
90
- "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
91
- "model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
92
- "model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
93
- "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
94
- "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
95
- "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
96
- "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
97
- "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
98
- "model.layers.18.input_layernorm.weight": "model-00002-of-00002.safetensors",
99
- "model.layers.18.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
100
- "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
101
- "model.layers.18.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
102
- "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
103
- "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
104
- "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
105
- "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
106
- "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
107
- "model.layers.19.input_layernorm.weight": "model-00002-of-00002.safetensors",
108
- "model.layers.19.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
109
- "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
110
- "model.layers.19.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
111
- "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
112
- "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
113
- "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
114
- "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
115
- "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
116
- "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
117
- "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
118
- "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
119
- "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
120
- "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
121
- "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
122
- "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
123
- "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
124
- "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
125
- "model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors",
126
- "model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
127
- "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
128
- "model.layers.20.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
129
- "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
130
- "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
131
- "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
132
- "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
133
- "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
134
- "model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
135
- "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
136
- "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
137
- "model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
138
- "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
139
- "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
140
- "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
141
- "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
142
- "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
143
- "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
144
- "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
145
- "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
146
- "model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
147
- "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
148
- "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
149
- "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
150
- "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
151
- "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
152
- "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
153
- "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
154
- "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
155
- "model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
156
- "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
157
- "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
158
- "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
159
- "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
160
- "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
161
- "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
162
- "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
163
- "model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
164
- "model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
165
- "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
166
- "model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
167
- "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
168
- "model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
169
- "model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
170
- "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
171
- "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
172
- "model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
173
- "model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
174
- "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
175
- "model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
176
- "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
177
- "model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
178
- "model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
179
- "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
180
- "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
181
- "model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
182
- "model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
183
- "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
184
- "model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
185
- "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
186
- "model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
187
- "model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
188
- "model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
189
- "model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
190
- "model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
191
- "model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
192
- "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
193
- "model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
194
- "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
195
- "model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
196
- "model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
197
- "model.layers.28.input_layernorm.weight": "model-00002-of-00002.safetensors",
198
- "model.layers.28.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
199
- "model.layers.28.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
200
- "model.layers.28.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
201
- "model.layers.28.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
202
- "model.layers.28.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
203
- "model.layers.28.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
204
- "model.layers.28.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
205
- "model.layers.28.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
206
- "model.layers.29.input_layernorm.weight": "model-00002-of-00002.safetensors",
207
- "model.layers.29.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
208
- "model.layers.29.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
209
- "model.layers.29.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
210
- "model.layers.29.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
211
- "model.layers.29.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
212
- "model.layers.29.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
213
- "model.layers.29.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
214
- "model.layers.29.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
215
- "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
216
- "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
217
- "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
218
- "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
219
- "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
220
- "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
221
- "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
222
- "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
223
- "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
224
- "model.layers.30.input_layernorm.weight": "model-00002-of-00002.safetensors",
225
- "model.layers.30.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
226
- "model.layers.30.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
227
- "model.layers.30.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
228
- "model.layers.30.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
229
- "model.layers.30.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
230
- "model.layers.30.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
231
- "model.layers.30.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
232
- "model.layers.30.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
233
- "model.layers.31.input_layernorm.weight": "model-00002-of-00002.safetensors",
234
- "model.layers.31.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
235
- "model.layers.31.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
236
- "model.layers.31.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
237
- "model.layers.31.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
238
- "model.layers.31.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
239
- "model.layers.31.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
240
- "model.layers.31.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
241
- "model.layers.31.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
242
- "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
243
- "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
244
- "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
245
- "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
246
- "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
247
- "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
248
- "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
249
- "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
250
- "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
251
- "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
252
- "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
253
- "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
254
- "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
255
- "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
256
- "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
257
- "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
258
- "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
259
- "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
260
- "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
261
- "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
262
- "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
263
- "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
264
- "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
265
- "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
266
- "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
267
- "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
268
- "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
269
- "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
270
- "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
271
- "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
272
- "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
273
- "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
274
- "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
275
- "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
276
- "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
277
- "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
278
- "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
279
- "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
280
- "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
281
- "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
282
- "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
283
- "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
284
- "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
285
- "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
286
- "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
287
- "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
288
- "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
289
- "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
290
- "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
291
- "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
292
- "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
293
- "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
294
- "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
295
- "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
296
- "model.norm.weight": "model-00002-of-00002.safetensors"
297
- }
298
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-224/rng_state_0.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:d5aeb0c54903210b6bb77aabf8f4802e4126d4bae40ff815b9d0b63767286cff
3
- size 14512
 
 
 
 
checkpoint-224/rng_state_1.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:2087fa1159897fc8e7870700fdb75275c4b88dbf7d3cd02c5397018e197c58f1
3
- size 14512
 
 
 
 
checkpoint-224/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:1d90e1acb0addf826b67ce817354274df45a547fc1fc2d61aa91b5e34e71e956
3
- size 1064
 
 
 
 
checkpoint-224/special_tokens_map.json DELETED
@@ -1,23 +0,0 @@
1
- {
2
- "bos_token": {
3
- "content": "<|begin_of_text|>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|im_end|>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": {
17
- "content": "<|finetune_right_pad_id|>",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- }
23
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-224/tokenizer.json DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:907a7b3b13afcc9d481433f17277a6dd7cf852c6185262597f1a849d2ebeaa45
3
- size 17209884
 
 
 
 
checkpoint-224/tokenizer_config.json DELETED
@@ -1,2063 +0,0 @@
1
- {
2
- "added_tokens_decoder": {
3
- "128000": {
4
- "content": "<|begin_of_text|>",
5
- "lstrip": false,
6
- "normalized": false,
7
- "rstrip": false,
8
- "single_word": false,
9
- "special": true
10
- },
11
- "128001": {
12
- "content": "<|end_of_text|>",
13
- "lstrip": false,
14
- "normalized": false,
15
- "rstrip": false,
16
- "single_word": false,
17
- "special": true
18
- },
19
- "128002": {
20
- "content": "<|reserved_special_token_0|>",
21
- "lstrip": false,
22
- "normalized": false,
23
- "rstrip": false,
24
- "single_word": false,
25
- "special": true
26
- },
27
- "128003": {
28
- "content": "<|reserved_special_token_1|>",
29
- "lstrip": false,
30
- "normalized": false,
31
- "rstrip": false,
32
- "single_word": false,
33
- "special": true
34
- },
35
- "128004": {
36
- "content": "<|finetune_right_pad_id|>",
37
- "lstrip": false,
38
- "normalized": false,
39
- "rstrip": false,
40
- "single_word": false,
41
- "special": true
42
- },
43
- "128005": {
44
- "content": "<|reserved_special_token_2|>",
45
- "lstrip": false,
46
- "normalized": false,
47
- "rstrip": false,
48
- "single_word": false,
49
- "special": true
50
- },
51
- "128006": {
52
- "content": "<|start_header_id|>",
53
- "lstrip": false,
54
- "normalized": false,
55
- "rstrip": false,
56
- "single_word": false,
57
- "special": true
58
- },
59
- "128007": {
60
- "content": "<|end_header_id|>",
61
- "lstrip": false,
62
- "normalized": false,
63
- "rstrip": false,
64
- "single_word": false,
65
- "special": true
66
- },
67
- "128008": {
68
- "content": "<|eom_id|>",
69
- "lstrip": false,
70
- "normalized": false,
71
- "rstrip": false,
72
- "single_word": false,
73
- "special": true
74
- },
75
- "128009": {
76
- "content": "<|eot_id|>",
77
- "lstrip": false,
78
- "normalized": false,
79
- "rstrip": false,
80
- "single_word": false,
81
- "special": true
82
- },
83
- "128010": {
84
- "content": "<|python_tag|>",
85
- "lstrip": false,
86
- "normalized": false,
87
- "rstrip": false,
88
- "single_word": false,
89
- "special": true
90
- },
91
- "128011": {
92
- "content": "<|reserved_special_token_3|>",
93
- "lstrip": false,
94
- "normalized": false,
95
- "rstrip": false,
96
- "single_word": false,
97
- "special": true
98
- },
99
- "128012": {
100
- "content": "<|reserved_special_token_4|>",
101
- "lstrip": false,
102
- "normalized": false,
103
- "rstrip": false,
104
- "single_word": false,
105
- "special": true
106
- },
107
- "128013": {
108
- "content": "<|reserved_special_token_5|>",
109
- "lstrip": false,
110
- "normalized": false,
111
- "rstrip": false,
112
- "single_word": false,
113
- "special": true
114
- },
115
- "128014": {
116
- "content": "<|reserved_special_token_6|>",
117
- "lstrip": false,
118
- "normalized": false,
119
- "rstrip": false,
120
- "single_word": false,
121
- "special": true
122
- },
123
- "128015": {
124
- "content": "<|reserved_special_token_7|>",
125
- "lstrip": false,
126
- "normalized": false,
127
- "rstrip": false,
128
- "single_word": false,
129
- "special": true
130
- },
131
- "128016": {
132
- "content": "<|reserved_special_token_8|>",
133
- "lstrip": false,
134
- "normalized": false,
135
- "rstrip": false,
136
- "single_word": false,
137
- "special": true
138
- },
139
- "128017": {
140
- "content": "<|reserved_special_token_9|>",
141
- "lstrip": false,
142
- "normalized": false,
143
- "rstrip": false,
144
- "single_word": false,
145
- "special": true
146
- },
147
- "128018": {
148
- "content": "<|im_start|>",
149
- "lstrip": false,
150
- "normalized": false,
151
- "rstrip": false,
152
- "single_word": false,
153
- "special": true
154
- },
155
- "128019": {
156
- "content": "<|im_end|>",
157
- "lstrip": false,
158
- "normalized": false,
159
- "rstrip": false,
160
- "single_word": false,
161
- "special": true
162
- },
163
- "128020": {
164
- "content": "<|reserved_special_token_12|>",
165
- "lstrip": false,
166
- "normalized": false,
167
- "rstrip": false,
168
- "single_word": false,
169
- "special": true
170
- },
171
- "128021": {
172
- "content": "<|reserved_special_token_13|>",
173
- "lstrip": false,
174
- "normalized": false,
175
- "rstrip": false,
176
- "single_word": false,
177
- "special": true
178
- },
179
- "128022": {
180
- "content": "<|reserved_special_token_14|>",
181
- "lstrip": false,
182
- "normalized": false,
183
- "rstrip": false,
184
- "single_word": false,
185
- "special": true
186
- },
187
- "128023": {
188
- "content": "<|reserved_special_token_15|>",
189
- "lstrip": false,
190
- "normalized": false,
191
- "rstrip": false,
192
- "single_word": false,
193
- "special": true
194
- },
195
- "128024": {
196
- "content": "<|reserved_special_token_16|>",
197
- "lstrip": false,
198
- "normalized": false,
199
- "rstrip": false,
200
- "single_word": false,
201
- "special": true
202
- },
203
- "128025": {
204
- "content": "<|reserved_special_token_17|>",
205
- "lstrip": false,
206
- "normalized": false,
207
- "rstrip": false,
208
- "single_word": false,
209
- "special": true
210
- },
211
- "128026": {
212
- "content": "<|reserved_special_token_18|>",
213
- "lstrip": false,
214
- "normalized": false,
215
- "rstrip": false,
216
- "single_word": false,
217
- "special": true
218
- },
219
- "128027": {
220
- "content": "<|reserved_special_token_19|>",
221
- "lstrip": false,
222
- "normalized": false,
223
- "rstrip": false,
224
- "single_word": false,
225
- "special": true
226
- },
227
- "128028": {
228
- "content": "<|reserved_special_token_20|>",
229
- "lstrip": false,
230
- "normalized": false,
231
- "rstrip": false,
232
- "single_word": false,
233
- "special": true
234
- },
235
- "128029": {
236
- "content": "<|reserved_special_token_21|>",
237
- "lstrip": false,
238
- "normalized": false,
239
- "rstrip": false,
240
- "single_word": false,
241
- "special": true
242
- },
243
- "128030": {
244
- "content": "<|reserved_special_token_22|>",
245
- "lstrip": false,
246
- "normalized": false,
247
- "rstrip": false,
248
- "single_word": false,
249
- "special": true
250
- },
251
- "128031": {
252
- "content": "<|reserved_special_token_23|>",
253
- "lstrip": false,
254
- "normalized": false,
255
- "rstrip": false,
256
- "single_word": false,
257
- "special": true
258
- },
259
- "128032": {
260
- "content": "<|reserved_special_token_24|>",
261
- "lstrip": false,
262
- "normalized": false,
263
- "rstrip": false,
264
- "single_word": false,
265
- "special": true
266
- },
267
- "128033": {
268
- "content": "<|reserved_special_token_25|>",
269
- "lstrip": false,
270
- "normalized": false,
271
- "rstrip": false,
272
- "single_word": false,
273
- "special": true
274
- },
275
- "128034": {
276
- "content": "<|reserved_special_token_26|>",
277
- "lstrip": false,
278
- "normalized": false,
279
- "rstrip": false,
280
- "single_word": false,
281
- "special": true
282
- },
283
- "128035": {
284
- "content": "<|reserved_special_token_27|>",
285
- "lstrip": false,
286
- "normalized": false,
287
- "rstrip": false,
288
- "single_word": false,
289
- "special": true
290
- },
291
- "128036": {
292
- "content": "<|reserved_special_token_28|>",
293
- "lstrip": false,
294
- "normalized": false,
295
- "rstrip": false,
296
- "single_word": false,
297
- "special": true
298
- },
299
- "128037": {
300
- "content": "<|reserved_special_token_29|>",
301
- "lstrip": false,
302
- "normalized": false,
303
- "rstrip": false,
304
- "single_word": false,
305
- "special": true
306
- },
307
- "128038": {
308
- "content": "<|reserved_special_token_30|>",
309
- "lstrip": false,
310
- "normalized": false,
311
- "rstrip": false,
312
- "single_word": false,
313
- "special": true
314
- },
315
- "128039": {
316
- "content": "<|reserved_special_token_31|>",
317
- "lstrip": false,
318
- "normalized": false,
319
- "rstrip": false,
320
- "single_word": false,
321
- "special": true
322
- },
323
- "128040": {
324
- "content": "<|reserved_special_token_32|>",
325
- "lstrip": false,
326
- "normalized": false,
327
- "rstrip": false,
328
- "single_word": false,
329
- "special": true
330
- },
331
- "128041": {
332
- "content": "<|reserved_special_token_33|>",
333
- "lstrip": false,
334
- "normalized": false,
335
- "rstrip": false,
336
- "single_word": false,
337
- "special": true
338
- },
339
- "128042": {
340
- "content": "<|reserved_special_token_34|>",
341
- "lstrip": false,
342
- "normalized": false,
343
- "rstrip": false,
344
- "single_word": false,
345
- "special": true
346
- },
347
- "128043": {
348
- "content": "<|reserved_special_token_35|>",
349
- "lstrip": false,
350
- "normalized": false,
351
- "rstrip": false,
352
- "single_word": false,
353
- "special": true
354
- },
355
- "128044": {
356
- "content": "<|reserved_special_token_36|>",
357
- "lstrip": false,
358
- "normalized": false,
359
- "rstrip": false,
360
- "single_word": false,
361
- "special": true
362
- },
363
- "128045": {
364
- "content": "<|reserved_special_token_37|>",
365
- "lstrip": false,
366
- "normalized": false,
367
- "rstrip": false,
368
- "single_word": false,
369
- "special": true
370
- },
371
- "128046": {
372
- "content": "<|reserved_special_token_38|>",
373
- "lstrip": false,
374
- "normalized": false,
375
- "rstrip": false,
376
- "single_word": false,
377
- "special": true
378
- },
379
- "128047": {
380
- "content": "<|reserved_special_token_39|>",
381
- "lstrip": false,
382
- "normalized": false,
383
- "rstrip": false,
384
- "single_word": false,
385
- "special": true
386
- },
387
- "128048": {
388
- "content": "<|reserved_special_token_40|>",
389
- "lstrip": false,
390
- "normalized": false,
391
- "rstrip": false,
392
- "single_word": false,
393
- "special": true
394
- },
395
- "128049": {
396
- "content": "<|reserved_special_token_41|>",
397
- "lstrip": false,
398
- "normalized": false,
399
- "rstrip": false,
400
- "single_word": false,
401
- "special": true
402
- },
403
- "128050": {
404
- "content": "<|reserved_special_token_42|>",
405
- "lstrip": false,
406
- "normalized": false,
407
- "rstrip": false,
408
- "single_word": false,
409
- "special": true
410
- },
411
- "128051": {
412
- "content": "<|reserved_special_token_43|>",
413
- "lstrip": false,
414
- "normalized": false,
415
- "rstrip": false,
416
- "single_word": false,
417
- "special": true
418
- },
419
- "128052": {
420
- "content": "<|reserved_special_token_44|>",
421
- "lstrip": false,
422
- "normalized": false,
423
- "rstrip": false,
424
- "single_word": false,
425
- "special": true
426
- },
427
- "128053": {
428
- "content": "<|reserved_special_token_45|>",
429
- "lstrip": false,
430
- "normalized": false,
431
- "rstrip": false,
432
- "single_word": false,
433
- "special": true
434
- },
435
- "128054": {
436
- "content": "<|reserved_special_token_46|>",
437
- "lstrip": false,
438
- "normalized": false,
439
- "rstrip": false,
440
- "single_word": false,
441
- "special": true
442
- },
443
- "128055": {
444
- "content": "<|reserved_special_token_47|>",
445
- "lstrip": false,
446
- "normalized": false,
447
- "rstrip": false,
448
- "single_word": false,
449
- "special": true
450
- },
451
- "128056": {
452
- "content": "<|reserved_special_token_48|>",
453
- "lstrip": false,
454
- "normalized": false,
455
- "rstrip": false,
456
- "single_word": false,
457
- "special": true
458
- },
459
- "128057": {
460
- "content": "<|reserved_special_token_49|>",
461
- "lstrip": false,
462
- "normalized": false,
463
- "rstrip": false,
464
- "single_word": false,
465
- "special": true
466
- },
467
- "128058": {
468
- "content": "<|reserved_special_token_50|>",
469
- "lstrip": false,
470
- "normalized": false,
471
- "rstrip": false,
472
- "single_word": false,
473
- "special": true
474
- },
475
- "128059": {
476
- "content": "<|reserved_special_token_51|>",
477
- "lstrip": false,
478
- "normalized": false,
479
- "rstrip": false,
480
- "single_word": false,
481
- "special": true
482
- },
483
- "128060": {
484
- "content": "<|reserved_special_token_52|>",
485
- "lstrip": false,
486
- "normalized": false,
487
- "rstrip": false,
488
- "single_word": false,
489
- "special": true
490
- },
491
- "128061": {
492
- "content": "<|reserved_special_token_53|>",
493
- "lstrip": false,
494
- "normalized": false,
495
- "rstrip": false,
496
- "single_word": false,
497
- "special": true
498
- },
499
- "128062": {
500
- "content": "<|reserved_special_token_54|>",
501
- "lstrip": false,
502
- "normalized": false,
503
- "rstrip": false,
504
- "single_word": false,
505
- "special": true
506
- },
507
- "128063": {
508
- "content": "<|reserved_special_token_55|>",
509
- "lstrip": false,
510
- "normalized": false,
511
- "rstrip": false,
512
- "single_word": false,
513
- "special": true
514
- },
515
- "128064": {
516
- "content": "<|reserved_special_token_56|>",
517
- "lstrip": false,
518
- "normalized": false,
519
- "rstrip": false,
520
- "single_word": false,
521
- "special": true
522
- },
523
- "128065": {
524
- "content": "<|reserved_special_token_57|>",
525
- "lstrip": false,
526
- "normalized": false,
527
- "rstrip": false,
528
- "single_word": false,
529
- "special": true
530
- },
531
- "128066": {
532
- "content": "<|reserved_special_token_58|>",
533
- "lstrip": false,
534
- "normalized": false,
535
- "rstrip": false,
536
- "single_word": false,
537
- "special": true
538
- },
539
- "128067": {
540
- "content": "<|reserved_special_token_59|>",
541
- "lstrip": false,
542
- "normalized": false,
543
- "rstrip": false,
544
- "single_word": false,
545
- "special": true
546
- },
547
- "128068": {
548
- "content": "<|reserved_special_token_60|>",
549
- "lstrip": false,
550
- "normalized": false,
551
- "rstrip": false,
552
- "single_word": false,
553
- "special": true
554
- },
555
- "128069": {
556
- "content": "<|reserved_special_token_61|>",
557
- "lstrip": false,
558
- "normalized": false,
559
- "rstrip": false,
560
- "single_word": false,
561
- "special": true
562
- },
563
- "128070": {
564
- "content": "<|reserved_special_token_62|>",
565
- "lstrip": false,
566
- "normalized": false,
567
- "rstrip": false,
568
- "single_word": false,
569
- "special": true
570
- },
571
- "128071": {
572
- "content": "<|reserved_special_token_63|>",
573
- "lstrip": false,
574
- "normalized": false,
575
- "rstrip": false,
576
- "single_word": false,
577
- "special": true
578
- },
579
- "128072": {
580
- "content": "<|reserved_special_token_64|>",
581
- "lstrip": false,
582
- "normalized": false,
583
- "rstrip": false,
584
- "single_word": false,
585
- "special": true
586
- },
587
- "128073": {
588
- "content": "<|reserved_special_token_65|>",
589
- "lstrip": false,
590
- "normalized": false,
591
- "rstrip": false,
592
- "single_word": false,
593
- "special": true
594
- },
595
- "128074": {
596
- "content": "<|reserved_special_token_66|>",
597
- "lstrip": false,
598
- "normalized": false,
599
- "rstrip": false,
600
- "single_word": false,
601
- "special": true
602
- },
603
- "128075": {
604
- "content": "<|reserved_special_token_67|>",
605
- "lstrip": false,
606
- "normalized": false,
607
- "rstrip": false,
608
- "single_word": false,
609
- "special": true
610
- },
611
- "128076": {
612
- "content": "<|reserved_special_token_68|>",
613
- "lstrip": false,
614
- "normalized": false,
615
- "rstrip": false,
616
- "single_word": false,
617
- "special": true
618
- },
619
- "128077": {
620
- "content": "<|reserved_special_token_69|>",
621
- "lstrip": false,
622
- "normalized": false,
623
- "rstrip": false,
624
- "single_word": false,
625
- "special": true
626
- },
627
- "128078": {
628
- "content": "<|reserved_special_token_70|>",
629
- "lstrip": false,
630
- "normalized": false,
631
- "rstrip": false,
632
- "single_word": false,
633
- "special": true
634
- },
635
- "128079": {
636
- "content": "<|reserved_special_token_71|>",
637
- "lstrip": false,
638
- "normalized": false,
639
- "rstrip": false,
640
- "single_word": false,
641
- "special": true
642
- },
643
- "128080": {
644
- "content": "<|reserved_special_token_72|>",
645
- "lstrip": false,
646
- "normalized": false,
647
- "rstrip": false,
648
- "single_word": false,
649
- "special": true
650
- },
651
- "128081": {
652
- "content": "<|reserved_special_token_73|>",
653
- "lstrip": false,
654
- "normalized": false,
655
- "rstrip": false,
656
- "single_word": false,
657
- "special": true
658
- },
659
- "128082": {
660
- "content": "<|reserved_special_token_74|>",
661
- "lstrip": false,
662
- "normalized": false,
663
- "rstrip": false,
664
- "single_word": false,
665
- "special": true
666
- },
667
- "128083": {
668
- "content": "<|reserved_special_token_75|>",
669
- "lstrip": false,
670
- "normalized": false,
671
- "rstrip": false,
672
- "single_word": false,
673
- "special": true
674
- },
675
- "128084": {
676
- "content": "<|reserved_special_token_76|>",
677
- "lstrip": false,
678
- "normalized": false,
679
- "rstrip": false,
680
- "single_word": false,
681
- "special": true
682
- },
683
- "128085": {
684
- "content": "<|reserved_special_token_77|>",
685
- "lstrip": false,
686
- "normalized": false,
687
- "rstrip": false,
688
- "single_word": false,
689
- "special": true
690
- },
691
- "128086": {
692
- "content": "<|reserved_special_token_78|>",
693
- "lstrip": false,
694
- "normalized": false,
695
- "rstrip": false,
696
- "single_word": false,
697
- "special": true
698
- },
699
- "128087": {
700
- "content": "<|reserved_special_token_79|>",
701
- "lstrip": false,
702
- "normalized": false,
703
- "rstrip": false,
704
- "single_word": false,
705
- "special": true
706
- },
707
- "128088": {
708
- "content": "<|reserved_special_token_80|>",
709
- "lstrip": false,
710
- "normalized": false,
711
- "rstrip": false,
712
- "single_word": false,
713
- "special": true
714
- },
715
- "128089": {
716
- "content": "<|reserved_special_token_81|>",
717
- "lstrip": false,
718
- "normalized": false,
719
- "rstrip": false,
720
- "single_word": false,
721
- "special": true
722
- },
723
- "128090": {
724
- "content": "<|reserved_special_token_82|>",
725
- "lstrip": false,
726
- "normalized": false,
727
- "rstrip": false,
728
- "single_word": false,
729
- "special": true
730
- },
731
- "128091": {
732
- "content": "<|reserved_special_token_83|>",
733
- "lstrip": false,
734
- "normalized": false,
735
- "rstrip": false,
736
- "single_word": false,
737
- "special": true
738
- },
739
- "128092": {
740
- "content": "<|reserved_special_token_84|>",
741
- "lstrip": false,
742
- "normalized": false,
743
- "rstrip": false,
744
- "single_word": false,
745
- "special": true
746
- },
747
- "128093": {
748
- "content": "<|reserved_special_token_85|>",
749
- "lstrip": false,
750
- "normalized": false,
751
- "rstrip": false,
752
- "single_word": false,
753
- "special": true
754
- },
755
- "128094": {
756
- "content": "<|reserved_special_token_86|>",
757
- "lstrip": false,
758
- "normalized": false,
759
- "rstrip": false,
760
- "single_word": false,
761
- "special": true
762
- },
763
- "128095": {
764
- "content": "<|reserved_special_token_87|>",
765
- "lstrip": false,
766
- "normalized": false,
767
- "rstrip": false,
768
- "single_word": false,
769
- "special": true
770
- },
771
- "128096": {
772
- "content": "<|reserved_special_token_88|>",
773
- "lstrip": false,
774
- "normalized": false,
775
- "rstrip": false,
776
- "single_word": false,
777
- "special": true
778
- },
779
- "128097": {
780
- "content": "<|reserved_special_token_89|>",
781
- "lstrip": false,
782
- "normalized": false,
783
- "rstrip": false,
784
- "single_word": false,
785
- "special": true
786
- },
787
- "128098": {
788
- "content": "<|reserved_special_token_90|>",
789
- "lstrip": false,
790
- "normalized": false,
791
- "rstrip": false,
792
- "single_word": false,
793
- "special": true
794
- },
795
- "128099": {
796
- "content": "<|reserved_special_token_91|>",
797
- "lstrip": false,
798
- "normalized": false,
799
- "rstrip": false,
800
- "single_word": false,
801
- "special": true
802
- },
803
- "128100": {
804
- "content": "<|reserved_special_token_92|>",
805
- "lstrip": false,
806
- "normalized": false,
807
- "rstrip": false,
808
- "single_word": false,
809
- "special": true
810
- },
811
- "128101": {
812
- "content": "<|reserved_special_token_93|>",
813
- "lstrip": false,
814
- "normalized": false,
815
- "rstrip": false,
816
- "single_word": false,
817
- "special": true
818
- },
819
- "128102": {
820
- "content": "<|reserved_special_token_94|>",
821
- "lstrip": false,
822
- "normalized": false,
823
- "rstrip": false,
824
- "single_word": false,
825
- "special": true
826
- },
827
- "128103": {
828
- "content": "<|reserved_special_token_95|>",
829
- "lstrip": false,
830
- "normalized": false,
831
- "rstrip": false,
832
- "single_word": false,
833
- "special": true
834
- },
835
- "128104": {
836
- "content": "<|reserved_special_token_96|>",
837
- "lstrip": false,
838
- "normalized": false,
839
- "rstrip": false,
840
- "single_word": false,
841
- "special": true
842
- },
843
- "128105": {
844
- "content": "<|reserved_special_token_97|>",
845
- "lstrip": false,
846
- "normalized": false,
847
- "rstrip": false,
848
- "single_word": false,
849
- "special": true
850
- },
851
- "128106": {
852
- "content": "<|reserved_special_token_98|>",
853
- "lstrip": false,
854
- "normalized": false,
855
- "rstrip": false,
856
- "single_word": false,
857
- "special": true
858
- },
859
- "128107": {
860
- "content": "<|reserved_special_token_99|>",
861
- "lstrip": false,
862
- "normalized": false,
863
- "rstrip": false,
864
- "single_word": false,
865
- "special": true
866
- },
867
- "128108": {
868
- "content": "<|reserved_special_token_100|>",
869
- "lstrip": false,
870
- "normalized": false,
871
- "rstrip": false,
872
- "single_word": false,
873
- "special": true
874
- },
875
- "128109": {
876
- "content": "<|reserved_special_token_101|>",
877
- "lstrip": false,
878
- "normalized": false,
879
- "rstrip": false,
880
- "single_word": false,
881
- "special": true
882
- },
883
- "128110": {
884
- "content": "<|reserved_special_token_102|>",
885
- "lstrip": false,
886
- "normalized": false,
887
- "rstrip": false,
888
- "single_word": false,
889
- "special": true
890
- },
891
- "128111": {
892
- "content": "<|reserved_special_token_103|>",
893
- "lstrip": false,
894
- "normalized": false,
895
- "rstrip": false,
896
- "single_word": false,
897
- "special": true
898
- },
899
- "128112": {
900
- "content": "<|reserved_special_token_104|>",
901
- "lstrip": false,
902
- "normalized": false,
903
- "rstrip": false,
904
- "single_word": false,
905
- "special": true
906
- },
907
- "128113": {
908
- "content": "<|reserved_special_token_105|>",
909
- "lstrip": false,
910
- "normalized": false,
911
- "rstrip": false,
912
- "single_word": false,
913
- "special": true
914
- },
915
- "128114": {
916
- "content": "<|reserved_special_token_106|>",
917
- "lstrip": false,
918
- "normalized": false,
919
- "rstrip": false,
920
- "single_word": false,
921
- "special": true
922
- },
923
- "128115": {
924
- "content": "<|reserved_special_token_107|>",
925
- "lstrip": false,
926
- "normalized": false,
927
- "rstrip": false,
928
- "single_word": false,
929
- "special": true
930
- },
931
- "128116": {
932
- "content": "<|reserved_special_token_108|>",
933
- "lstrip": false,
934
- "normalized": false,
935
- "rstrip": false,
936
- "single_word": false,
937
- "special": true
938
- },
939
- "128117": {
940
- "content": "<|reserved_special_token_109|>",
941
- "lstrip": false,
942
- "normalized": false,
943
- "rstrip": false,
944
- "single_word": false,
945
- "special": true
946
- },
947
- "128118": {
948
- "content": "<|reserved_special_token_110|>",
949
- "lstrip": false,
950
- "normalized": false,
951
- "rstrip": false,
952
- "single_word": false,
953
- "special": true
954
- },
955
- "128119": {
956
- "content": "<|reserved_special_token_111|>",
957
- "lstrip": false,
958
- "normalized": false,
959
- "rstrip": false,
960
- "single_word": false,
961
- "special": true
962
- },
963
- "128120": {
964
- "content": "<|reserved_special_token_112|>",
965
- "lstrip": false,
966
- "normalized": false,
967
- "rstrip": false,
968
- "single_word": false,
969
- "special": true
970
- },
971
- "128121": {
972
- "content": "<|reserved_special_token_113|>",
973
- "lstrip": false,
974
- "normalized": false,
975
- "rstrip": false,
976
- "single_word": false,
977
- "special": true
978
- },
979
- "128122": {
980
- "content": "<|reserved_special_token_114|>",
981
- "lstrip": false,
982
- "normalized": false,
983
- "rstrip": false,
984
- "single_word": false,
985
- "special": true
986
- },
987
- "128123": {
988
- "content": "<|reserved_special_token_115|>",
989
- "lstrip": false,
990
- "normalized": false,
991
- "rstrip": false,
992
- "single_word": false,
993
- "special": true
994
- },
995
- "128124": {
996
- "content": "<|reserved_special_token_116|>",
997
- "lstrip": false,
998
- "normalized": false,
999
- "rstrip": false,
1000
- "single_word": false,
1001
- "special": true
1002
- },
1003
- "128125": {
1004
- "content": "<|reserved_special_token_117|>",
1005
- "lstrip": false,
1006
- "normalized": false,
1007
- "rstrip": false,
1008
- "single_word": false,
1009
- "special": true
1010
- },
1011
- "128126": {
1012
- "content": "<|reserved_special_token_118|>",
1013
- "lstrip": false,
1014
- "normalized": false,
1015
- "rstrip": false,
1016
- "single_word": false,
1017
- "special": true
1018
- },
1019
- "128127": {
1020
- "content": "<|reserved_special_token_119|>",
1021
- "lstrip": false,
1022
- "normalized": false,
1023
- "rstrip": false,
1024
- "single_word": false,
1025
- "special": true
1026
- },
1027
- "128128": {
1028
- "content": "<|reserved_special_token_120|>",
1029
- "lstrip": false,
1030
- "normalized": false,
1031
- "rstrip": false,
1032
- "single_word": false,
1033
- "special": true
1034
- },
1035
- "128129": {
1036
- "content": "<|reserved_special_token_121|>",
1037
- "lstrip": false,
1038
- "normalized": false,
1039
- "rstrip": false,
1040
- "single_word": false,
1041
- "special": true
1042
- },
1043
- "128130": {
1044
- "content": "<|reserved_special_token_122|>",
1045
- "lstrip": false,
1046
- "normalized": false,
1047
- "rstrip": false,
1048
- "single_word": false,
1049
- "special": true
1050
- },
1051
- "128131": {
1052
- "content": "<|reserved_special_token_123|>",
1053
- "lstrip": false,
1054
- "normalized": false,
1055
- "rstrip": false,
1056
- "single_word": false,
1057
- "special": true
1058
- },
1059
- "128132": {
1060
- "content": "<|reserved_special_token_124|>",
1061
- "lstrip": false,
1062
- "normalized": false,
1063
- "rstrip": false,
1064
- "single_word": false,
1065
- "special": true
1066
- },
1067
- "128133": {
1068
- "content": "<|reserved_special_token_125|>",
1069
- "lstrip": false,
1070
- "normalized": false,
1071
- "rstrip": false,
1072
- "single_word": false,
1073
- "special": true
1074
- },
1075
- "128134": {
1076
- "content": "<|reserved_special_token_126|>",
1077
- "lstrip": false,
1078
- "normalized": false,
1079
- "rstrip": false,
1080
- "single_word": false,
1081
- "special": true
1082
- },
1083
- "128135": {
1084
- "content": "<|reserved_special_token_127|>",
1085
- "lstrip": false,
1086
- "normalized": false,
1087
- "rstrip": false,
1088
- "single_word": false,
1089
- "special": true
1090
- },
1091
- "128136": {
1092
- "content": "<|reserved_special_token_128|>",
1093
- "lstrip": false,
1094
- "normalized": false,
1095
- "rstrip": false,
1096
- "single_word": false,
1097
- "special": true
1098
- },
1099
- "128137": {
1100
- "content": "<|reserved_special_token_129|>",
1101
- "lstrip": false,
1102
- "normalized": false,
1103
- "rstrip": false,
1104
- "single_word": false,
1105
- "special": true
1106
- },
1107
- "128138": {
1108
- "content": "<|reserved_special_token_130|>",
1109
- "lstrip": false,
1110
- "normalized": false,
1111
- "rstrip": false,
1112
- "single_word": false,
1113
- "special": true
1114
- },
1115
- "128139": {
1116
- "content": "<|reserved_special_token_131|>",
1117
- "lstrip": false,
1118
- "normalized": false,
1119
- "rstrip": false,
1120
- "single_word": false,
1121
- "special": true
1122
- },
1123
- "128140": {
1124
- "content": "<|reserved_special_token_132|>",
1125
- "lstrip": false,
1126
- "normalized": false,
1127
- "rstrip": false,
1128
- "single_word": false,
1129
- "special": true
1130
- },
1131
- "128141": {
1132
- "content": "<|reserved_special_token_133|>",
1133
- "lstrip": false,
1134
- "normalized": false,
1135
- "rstrip": false,
1136
- "single_word": false,
1137
- "special": true
1138
- },
1139
- "128142": {
1140
- "content": "<|reserved_special_token_134|>",
1141
- "lstrip": false,
1142
- "normalized": false,
1143
- "rstrip": false,
1144
- "single_word": false,
1145
- "special": true
1146
- },
1147
- "128143": {
1148
- "content": "<|reserved_special_token_135|>",
1149
- "lstrip": false,
1150
- "normalized": false,
1151
- "rstrip": false,
1152
- "single_word": false,
1153
- "special": true
1154
- },
1155
- "128144": {
1156
- "content": "<|reserved_special_token_136|>",
1157
- "lstrip": false,
1158
- "normalized": false,
1159
- "rstrip": false,
1160
- "single_word": false,
1161
- "special": true
1162
- },
1163
- "128145": {
1164
- "content": "<|reserved_special_token_137|>",
1165
- "lstrip": false,
1166
- "normalized": false,
1167
- "rstrip": false,
1168
- "single_word": false,
1169
- "special": true
1170
- },
1171
- "128146": {
1172
- "content": "<|reserved_special_token_138|>",
1173
- "lstrip": false,
1174
- "normalized": false,
1175
- "rstrip": false,
1176
- "single_word": false,
1177
- "special": true
1178
- },
1179
- "128147": {
1180
- "content": "<|reserved_special_token_139|>",
1181
- "lstrip": false,
1182
- "normalized": false,
1183
- "rstrip": false,
1184
- "single_word": false,
1185
- "special": true
1186
- },
1187
- "128148": {
1188
- "content": "<|reserved_special_token_140|>",
1189
- "lstrip": false,
1190
- "normalized": false,
1191
- "rstrip": false,
1192
- "single_word": false,
1193
- "special": true
1194
- },
1195
- "128149": {
1196
- "content": "<|reserved_special_token_141|>",
1197
- "lstrip": false,
1198
- "normalized": false,
1199
- "rstrip": false,
1200
- "single_word": false,
1201
- "special": true
1202
- },
1203
- "128150": {
1204
- "content": "<|reserved_special_token_142|>",
1205
- "lstrip": false,
1206
- "normalized": false,
1207
- "rstrip": false,
1208
- "single_word": false,
1209
- "special": true
1210
- },
1211
- "128151": {
1212
- "content": "<|reserved_special_token_143|>",
1213
- "lstrip": false,
1214
- "normalized": false,
1215
- "rstrip": false,
1216
- "single_word": false,
1217
- "special": true
1218
- },
1219
- "128152": {
1220
- "content": "<|reserved_special_token_144|>",
1221
- "lstrip": false,
1222
- "normalized": false,
1223
- "rstrip": false,
1224
- "single_word": false,
1225
- "special": true
1226
- },
1227
- "128153": {
1228
- "content": "<|reserved_special_token_145|>",
1229
- "lstrip": false,
1230
- "normalized": false,
1231
- "rstrip": false,
1232
- "single_word": false,
1233
- "special": true
1234
- },
1235
- "128154": {
1236
- "content": "<|reserved_special_token_146|>",
1237
- "lstrip": false,
1238
- "normalized": false,
1239
- "rstrip": false,
1240
- "single_word": false,
1241
- "special": true
1242
- },
1243
- "128155": {
1244
- "content": "<|reserved_special_token_147|>",
1245
- "lstrip": false,
1246
- "normalized": false,
1247
- "rstrip": false,
1248
- "single_word": false,
1249
- "special": true
1250
- },
1251
- "128156": {
1252
- "content": "<|reserved_special_token_148|>",
1253
- "lstrip": false,
1254
- "normalized": false,
1255
- "rstrip": false,
1256
- "single_word": false,
1257
- "special": true
1258
- },
1259
- "128157": {
1260
- "content": "<|reserved_special_token_149|>",
1261
- "lstrip": false,
1262
- "normalized": false,
1263
- "rstrip": false,
1264
- "single_word": false,
1265
- "special": true
1266
- },
1267
- "128158": {
1268
- "content": "<|reserved_special_token_150|>",
1269
- "lstrip": false,
1270
- "normalized": false,
1271
- "rstrip": false,
1272
- "single_word": false,
1273
- "special": true
1274
- },
1275
- "128159": {
1276
- "content": "<|reserved_special_token_151|>",
1277
- "lstrip": false,
1278
- "normalized": false,
1279
- "rstrip": false,
1280
- "single_word": false,
1281
- "special": true
1282
- },
1283
- "128160": {
1284
- "content": "<|reserved_special_token_152|>",
1285
- "lstrip": false,
1286
- "normalized": false,
1287
- "rstrip": false,
1288
- "single_word": false,
1289
- "special": true
1290
- },
1291
- "128161": {
1292
- "content": "<|reserved_special_token_153|>",
1293
- "lstrip": false,
1294
- "normalized": false,
1295
- "rstrip": false,
1296
- "single_word": false,
1297
- "special": true
1298
- },
1299
- "128162": {
1300
- "content": "<|reserved_special_token_154|>",
1301
- "lstrip": false,
1302
- "normalized": false,
1303
- "rstrip": false,
1304
- "single_word": false,
1305
- "special": true
1306
- },
1307
- "128163": {
1308
- "content": "<|reserved_special_token_155|>",
1309
- "lstrip": false,
1310
- "normalized": false,
1311
- "rstrip": false,
1312
- "single_word": false,
1313
- "special": true
1314
- },
1315
- "128164": {
1316
- "content": "<|reserved_special_token_156|>",
1317
- "lstrip": false,
1318
- "normalized": false,
1319
- "rstrip": false,
1320
- "single_word": false,
1321
- "special": true
1322
- },
1323
- "128165": {
1324
- "content": "<|reserved_special_token_157|>",
1325
- "lstrip": false,
1326
- "normalized": false,
1327
- "rstrip": false,
1328
- "single_word": false,
1329
- "special": true
1330
- },
1331
- "128166": {
1332
- "content": "<|reserved_special_token_158|>",
1333
- "lstrip": false,
1334
- "normalized": false,
1335
- "rstrip": false,
1336
- "single_word": false,
1337
- "special": true
1338
- },
1339
- "128167": {
1340
- "content": "<|reserved_special_token_159|>",
1341
- "lstrip": false,
1342
- "normalized": false,
1343
- "rstrip": false,
1344
- "single_word": false,
1345
- "special": true
1346
- },
1347
- "128168": {
1348
- "content": "<|reserved_special_token_160|>",
1349
- "lstrip": false,
1350
- "normalized": false,
1351
- "rstrip": false,
1352
- "single_word": false,
1353
- "special": true
1354
- },
1355
- "128169": {
1356
- "content": "<|reserved_special_token_161|>",
1357
- "lstrip": false,
1358
- "normalized": false,
1359
- "rstrip": false,
1360
- "single_word": false,
1361
- "special": true
1362
- },
1363
- "128170": {
1364
- "content": "<|reserved_special_token_162|>",
1365
- "lstrip": false,
1366
- "normalized": false,
1367
- "rstrip": false,
1368
- "single_word": false,
1369
- "special": true
1370
- },
1371
- "128171": {
1372
- "content": "<|reserved_special_token_163|>",
1373
- "lstrip": false,
1374
- "normalized": false,
1375
- "rstrip": false,
1376
- "single_word": false,
1377
- "special": true
1378
- },
1379
- "128172": {
1380
- "content": "<|reserved_special_token_164|>",
1381
- "lstrip": false,
1382
- "normalized": false,
1383
- "rstrip": false,
1384
- "single_word": false,
1385
- "special": true
1386
- },
1387
- "128173": {
1388
- "content": "<|reserved_special_token_165|>",
1389
- "lstrip": false,
1390
- "normalized": false,
1391
- "rstrip": false,
1392
- "single_word": false,
1393
- "special": true
1394
- },
1395
- "128174": {
1396
- "content": "<|reserved_special_token_166|>",
1397
- "lstrip": false,
1398
- "normalized": false,
1399
- "rstrip": false,
1400
- "single_word": false,
1401
- "special": true
1402
- },
1403
- "128175": {
1404
- "content": "<|reserved_special_token_167|>",
1405
- "lstrip": false,
1406
- "normalized": false,
1407
- "rstrip": false,
1408
- "single_word": false,
1409
- "special": true
1410
- },
1411
- "128176": {
1412
- "content": "<|reserved_special_token_168|>",
1413
- "lstrip": false,
1414
- "normalized": false,
1415
- "rstrip": false,
1416
- "single_word": false,
1417
- "special": true
1418
- },
1419
- "128177": {
1420
- "content": "<|reserved_special_token_169|>",
1421
- "lstrip": false,
1422
- "normalized": false,
1423
- "rstrip": false,
1424
- "single_word": false,
1425
- "special": true
1426
- },
1427
- "128178": {
1428
- "content": "<|reserved_special_token_170|>",
1429
- "lstrip": false,
1430
- "normalized": false,
1431
- "rstrip": false,
1432
- "single_word": false,
1433
- "special": true
1434
- },
1435
- "128179": {
1436
- "content": "<|reserved_special_token_171|>",
1437
- "lstrip": false,
1438
- "normalized": false,
1439
- "rstrip": false,
1440
- "single_word": false,
1441
- "special": true
1442
- },
1443
- "128180": {
1444
- "content": "<|reserved_special_token_172|>",
1445
- "lstrip": false,
1446
- "normalized": false,
1447
- "rstrip": false,
1448
- "single_word": false,
1449
- "special": true
1450
- },
1451
- "128181": {
1452
- "content": "<|reserved_special_token_173|>",
1453
- "lstrip": false,
1454
- "normalized": false,
1455
- "rstrip": false,
1456
- "single_word": false,
1457
- "special": true
1458
- },
1459
- "128182": {
1460
- "content": "<|reserved_special_token_174|>",
1461
- "lstrip": false,
1462
- "normalized": false,
1463
- "rstrip": false,
1464
- "single_word": false,
1465
- "special": true
1466
- },
1467
- "128183": {
1468
- "content": "<|reserved_special_token_175|>",
1469
- "lstrip": false,
1470
- "normalized": false,
1471
- "rstrip": false,
1472
- "single_word": false,
1473
- "special": true
1474
- },
1475
- "128184": {
1476
- "content": "<|reserved_special_token_176|>",
1477
- "lstrip": false,
1478
- "normalized": false,
1479
- "rstrip": false,
1480
- "single_word": false,
1481
- "special": true
1482
- },
1483
- "128185": {
1484
- "content": "<|reserved_special_token_177|>",
1485
- "lstrip": false,
1486
- "normalized": false,
1487
- "rstrip": false,
1488
- "single_word": false,
1489
- "special": true
1490
- },
1491
- "128186": {
1492
- "content": "<|reserved_special_token_178|>",
1493
- "lstrip": false,
1494
- "normalized": false,
1495
- "rstrip": false,
1496
- "single_word": false,
1497
- "special": true
1498
- },
1499
- "128187": {
1500
- "content": "<|reserved_special_token_179|>",
1501
- "lstrip": false,
1502
- "normalized": false,
1503
- "rstrip": false,
1504
- "single_word": false,
1505
- "special": true
1506
- },
1507
- "128188": {
1508
- "content": "<|reserved_special_token_180|>",
1509
- "lstrip": false,
1510
- "normalized": false,
1511
- "rstrip": false,
1512
- "single_word": false,
1513
- "special": true
1514
- },
1515
- "128189": {
1516
- "content": "<|reserved_special_token_181|>",
1517
- "lstrip": false,
1518
- "normalized": false,
1519
- "rstrip": false,
1520
- "single_word": false,
1521
- "special": true
1522
- },
1523
- "128190": {
1524
- "content": "<|reserved_special_token_182|>",
1525
- "lstrip": false,
1526
- "normalized": false,
1527
- "rstrip": false,
1528
- "single_word": false,
1529
- "special": true
1530
- },
1531
- "128191": {
1532
- "content": "<|reserved_special_token_183|>",
1533
- "lstrip": false,
1534
- "normalized": false,
1535
- "rstrip": false,
1536
- "single_word": false,
1537
- "special": true
1538
- },
1539
- "128192": {
1540
- "content": "<|reserved_special_token_184|>",
1541
- "lstrip": false,
1542
- "normalized": false,
1543
- "rstrip": false,
1544
- "single_word": false,
1545
- "special": true
1546
- },
1547
- "128193": {
1548
- "content": "<|reserved_special_token_185|>",
1549
- "lstrip": false,
1550
- "normalized": false,
1551
- "rstrip": false,
1552
- "single_word": false,
1553
- "special": true
1554
- },
1555
- "128194": {
1556
- "content": "<|reserved_special_token_186|>",
1557
- "lstrip": false,
1558
- "normalized": false,
1559
- "rstrip": false,
1560
- "single_word": false,
1561
- "special": true
1562
- },
1563
- "128195": {
1564
- "content": "<|reserved_special_token_187|>",
1565
- "lstrip": false,
1566
- "normalized": false,
1567
- "rstrip": false,
1568
- "single_word": false,
1569
- "special": true
1570
- },
1571
- "128196": {
1572
- "content": "<|reserved_special_token_188|>",
1573
- "lstrip": false,
1574
- "normalized": false,
1575
- "rstrip": false,
1576
- "single_word": false,
1577
- "special": true
1578
- },
1579
- "128197": {
1580
- "content": "<|reserved_special_token_189|>",
1581
- "lstrip": false,
1582
- "normalized": false,
1583
- "rstrip": false,
1584
- "single_word": false,
1585
- "special": true
1586
- },
1587
- "128198": {
1588
- "content": "<|reserved_special_token_190|>",
1589
- "lstrip": false,
1590
- "normalized": false,
1591
- "rstrip": false,
1592
- "single_word": false,
1593
- "special": true
1594
- },
1595
- "128199": {
1596
- "content": "<|reserved_special_token_191|>",
1597
- "lstrip": false,
1598
- "normalized": false,
1599
- "rstrip": false,
1600
- "single_word": false,
1601
- "special": true
1602
- },
1603
- "128200": {
1604
- "content": "<|reserved_special_token_192|>",
1605
- "lstrip": false,
1606
- "normalized": false,
1607
- "rstrip": false,
1608
- "single_word": false,
1609
- "special": true
1610
- },
1611
- "128201": {
1612
- "content": "<|reserved_special_token_193|>",
1613
- "lstrip": false,
1614
- "normalized": false,
1615
- "rstrip": false,
1616
- "single_word": false,
1617
- "special": true
1618
- },
1619
- "128202": {
1620
- "content": "<|reserved_special_token_194|>",
1621
- "lstrip": false,
1622
- "normalized": false,
1623
- "rstrip": false,
1624
- "single_word": false,
1625
- "special": true
1626
- },
1627
- "128203": {
1628
- "content": "<|reserved_special_token_195|>",
1629
- "lstrip": false,
1630
- "normalized": false,
1631
- "rstrip": false,
1632
- "single_word": false,
1633
- "special": true
1634
- },
1635
- "128204": {
1636
- "content": "<|reserved_special_token_196|>",
1637
- "lstrip": false,
1638
- "normalized": false,
1639
- "rstrip": false,
1640
- "single_word": false,
1641
- "special": true
1642
- },
1643
- "128205": {
1644
- "content": "<|reserved_special_token_197|>",
1645
- "lstrip": false,
1646
- "normalized": false,
1647
- "rstrip": false,
1648
- "single_word": false,
1649
- "special": true
1650
- },
1651
- "128206": {
1652
- "content": "<|reserved_special_token_198|>",
1653
- "lstrip": false,
1654
- "normalized": false,
1655
- "rstrip": false,
1656
- "single_word": false,
1657
- "special": true
1658
- },
1659
- "128207": {
1660
- "content": "<|reserved_special_token_199|>",
1661
- "lstrip": false,
1662
- "normalized": false,
1663
- "rstrip": false,
1664
- "single_word": false,
1665
- "special": true
1666
- },
1667
- "128208": {
1668
- "content": "<|reserved_special_token_200|>",
1669
- "lstrip": false,
1670
- "normalized": false,
1671
- "rstrip": false,
1672
- "single_word": false,
1673
- "special": true
1674
- },
1675
- "128209": {
1676
- "content": "<|reserved_special_token_201|>",
1677
- "lstrip": false,
1678
- "normalized": false,
1679
- "rstrip": false,
1680
- "single_word": false,
1681
- "special": true
1682
- },
1683
- "128210": {
1684
- "content": "<|reserved_special_token_202|>",
1685
- "lstrip": false,
1686
- "normalized": false,
1687
- "rstrip": false,
1688
- "single_word": false,
1689
- "special": true
1690
- },
1691
- "128211": {
1692
- "content": "<|reserved_special_token_203|>",
1693
- "lstrip": false,
1694
- "normalized": false,
1695
- "rstrip": false,
1696
- "single_word": false,
1697
- "special": true
1698
- },
1699
- "128212": {
1700
- "content": "<|reserved_special_token_204|>",
1701
- "lstrip": false,
1702
- "normalized": false,
1703
- "rstrip": false,
1704
- "single_word": false,
1705
- "special": true
1706
- },
1707
- "128213": {
1708
- "content": "<|reserved_special_token_205|>",
1709
- "lstrip": false,
1710
- "normalized": false,
1711
- "rstrip": false,
1712
- "single_word": false,
1713
- "special": true
1714
- },
1715
- "128214": {
1716
- "content": "<|reserved_special_token_206|>",
1717
- "lstrip": false,
1718
- "normalized": false,
1719
- "rstrip": false,
1720
- "single_word": false,
1721
- "special": true
1722
- },
1723
- "128215": {
1724
- "content": "<|reserved_special_token_207|>",
1725
- "lstrip": false,
1726
- "normalized": false,
1727
- "rstrip": false,
1728
- "single_word": false,
1729
- "special": true
1730
- },
1731
- "128216": {
1732
- "content": "<|reserved_special_token_208|>",
1733
- "lstrip": false,
1734
- "normalized": false,
1735
- "rstrip": false,
1736
- "single_word": false,
1737
- "special": true
1738
- },
1739
- "128217": {
1740
- "content": "<|reserved_special_token_209|>",
1741
- "lstrip": false,
1742
- "normalized": false,
1743
- "rstrip": false,
1744
- "single_word": false,
1745
- "special": true
1746
- },
1747
- "128218": {
1748
- "content": "<|reserved_special_token_210|>",
1749
- "lstrip": false,
1750
- "normalized": false,
1751
- "rstrip": false,
1752
- "single_word": false,
1753
- "special": true
1754
- },
1755
- "128219": {
1756
- "content": "<|reserved_special_token_211|>",
1757
- "lstrip": false,
1758
- "normalized": false,
1759
- "rstrip": false,
1760
- "single_word": false,
1761
- "special": true
1762
- },
1763
- "128220": {
1764
- "content": "<|reserved_special_token_212|>",
1765
- "lstrip": false,
1766
- "normalized": false,
1767
- "rstrip": false,
1768
- "single_word": false,
1769
- "special": true
1770
- },
1771
- "128221": {
1772
- "content": "<|reserved_special_token_213|>",
1773
- "lstrip": false,
1774
- "normalized": false,
1775
- "rstrip": false,
1776
- "single_word": false,
1777
- "special": true
1778
- },
1779
- "128222": {
1780
- "content": "<|reserved_special_token_214|>",
1781
- "lstrip": false,
1782
- "normalized": false,
1783
- "rstrip": false,
1784
- "single_word": false,
1785
- "special": true
1786
- },
1787
- "128223": {
1788
- "content": "<|reserved_special_token_215|>",
1789
- "lstrip": false,
1790
- "normalized": false,
1791
- "rstrip": false,
1792
- "single_word": false,
1793
- "special": true
1794
- },
1795
- "128224": {
1796
- "content": "<|reserved_special_token_216|>",
1797
- "lstrip": false,
1798
- "normalized": false,
1799
- "rstrip": false,
1800
- "single_word": false,
1801
- "special": true
1802
- },
1803
- "128225": {
1804
- "content": "<|reserved_special_token_217|>",
1805
- "lstrip": false,
1806
- "normalized": false,
1807
- "rstrip": false,
1808
- "single_word": false,
1809
- "special": true
1810
- },
1811
- "128226": {
1812
- "content": "<|reserved_special_token_218|>",
1813
- "lstrip": false,
1814
- "normalized": false,
1815
- "rstrip": false,
1816
- "single_word": false,
1817
- "special": true
1818
- },
1819
- "128227": {
1820
- "content": "<|reserved_special_token_219|>",
1821
- "lstrip": false,
1822
- "normalized": false,
1823
- "rstrip": false,
1824
- "single_word": false,
1825
- "special": true
1826
- },
1827
- "128228": {
1828
- "content": "<|reserved_special_token_220|>",
1829
- "lstrip": false,
1830
- "normalized": false,
1831
- "rstrip": false,
1832
- "single_word": false,
1833
- "special": true
1834
- },
1835
- "128229": {
1836
- "content": "<|reserved_special_token_221|>",
1837
- "lstrip": false,
1838
- "normalized": false,
1839
- "rstrip": false,
1840
- "single_word": false,
1841
- "special": true
1842
- },
1843
- "128230": {
1844
- "content": "<|reserved_special_token_222|>",
1845
- "lstrip": false,
1846
- "normalized": false,
1847
- "rstrip": false,
1848
- "single_word": false,
1849
- "special": true
1850
- },
1851
- "128231": {
1852
- "content": "<|reserved_special_token_223|>",
1853
- "lstrip": false,
1854
- "normalized": false,
1855
- "rstrip": false,
1856
- "single_word": false,
1857
- "special": true
1858
- },
1859
- "128232": {
1860
- "content": "<|reserved_special_token_224|>",
1861
- "lstrip": false,
1862
- "normalized": false,
1863
- "rstrip": false,
1864
- "single_word": false,
1865
- "special": true
1866
- },
1867
- "128233": {
1868
- "content": "<|reserved_special_token_225|>",
1869
- "lstrip": false,
1870
- "normalized": false,
1871
- "rstrip": false,
1872
- "single_word": false,
1873
- "special": true
1874
- },
1875
- "128234": {
1876
- "content": "<|reserved_special_token_226|>",
1877
- "lstrip": false,
1878
- "normalized": false,
1879
- "rstrip": false,
1880
- "single_word": false,
1881
- "special": true
1882
- },
1883
- "128235": {
1884
- "content": "<|reserved_special_token_227|>",
1885
- "lstrip": false,
1886
- "normalized": false,
1887
- "rstrip": false,
1888
- "single_word": false,
1889
- "special": true
1890
- },
1891
- "128236": {
1892
- "content": "<|reserved_special_token_228|>",
1893
- "lstrip": false,
1894
- "normalized": false,
1895
- "rstrip": false,
1896
- "single_word": false,
1897
- "special": true
1898
- },
1899
- "128237": {
1900
- "content": "<|reserved_special_token_229|>",
1901
- "lstrip": false,
1902
- "normalized": false,
1903
- "rstrip": false,
1904
- "single_word": false,
1905
- "special": true
1906
- },
1907
- "128238": {
1908
- "content": "<|reserved_special_token_230|>",
1909
- "lstrip": false,
1910
- "normalized": false,
1911
- "rstrip": false,
1912
- "single_word": false,
1913
- "special": true
1914
- },
1915
- "128239": {
1916
- "content": "<|reserved_special_token_231|>",
1917
- "lstrip": false,
1918
- "normalized": false,
1919
- "rstrip": false,
1920
- "single_word": false,
1921
- "special": true
1922
- },
1923
- "128240": {
1924
- "content": "<|reserved_special_token_232|>",
1925
- "lstrip": false,
1926
- "normalized": false,
1927
- "rstrip": false,
1928
- "single_word": false,
1929
- "special": true
1930
- },
1931
- "128241": {
1932
- "content": "<|reserved_special_token_233|>",
1933
- "lstrip": false,
1934
- "normalized": false,
1935
- "rstrip": false,
1936
- "single_word": false,
1937
- "special": true
1938
- },
1939
- "128242": {
1940
- "content": "<|reserved_special_token_234|>",
1941
- "lstrip": false,
1942
- "normalized": false,
1943
- "rstrip": false,
1944
- "single_word": false,
1945
- "special": true
1946
- },
1947
- "128243": {
1948
- "content": "<|reserved_special_token_235|>",
1949
- "lstrip": false,
1950
- "normalized": false,
1951
- "rstrip": false,
1952
- "single_word": false,
1953
- "special": true
1954
- },
1955
- "128244": {
1956
- "content": "<|reserved_special_token_236|>",
1957
- "lstrip": false,
1958
- "normalized": false,
1959
- "rstrip": false,
1960
- "single_word": false,
1961
- "special": true
1962
- },
1963
- "128245": {
1964
- "content": "<|reserved_special_token_237|>",
1965
- "lstrip": false,
1966
- "normalized": false,
1967
- "rstrip": false,
1968
- "single_word": false,
1969
- "special": true
1970
- },
1971
- "128246": {
1972
- "content": "<|reserved_special_token_238|>",
1973
- "lstrip": false,
1974
- "normalized": false,
1975
- "rstrip": false,
1976
- "single_word": false,
1977
- "special": true
1978
- },
1979
- "128247": {
1980
- "content": "<|reserved_special_token_239|>",
1981
- "lstrip": false,
1982
- "normalized": false,
1983
- "rstrip": false,
1984
- "single_word": false,
1985
- "special": true
1986
- },
1987
- "128248": {
1988
- "content": "<|reserved_special_token_240|>",
1989
- "lstrip": false,
1990
- "normalized": false,
1991
- "rstrip": false,
1992
- "single_word": false,
1993
- "special": true
1994
- },
1995
- "128249": {
1996
- "content": "<|reserved_special_token_241|>",
1997
- "lstrip": false,
1998
- "normalized": false,
1999
- "rstrip": false,
2000
- "single_word": false,
2001
- "special": true
2002
- },
2003
- "128250": {
2004
- "content": "<|reserved_special_token_242|>",
2005
- "lstrip": false,
2006
- "normalized": false,
2007
- "rstrip": false,
2008
- "single_word": false,
2009
- "special": true
2010
- },
2011
- "128251": {
2012
- "content": "<|reserved_special_token_243|>",
2013
- "lstrip": false,
2014
- "normalized": false,
2015
- "rstrip": false,
2016
- "single_word": false,
2017
- "special": true
2018
- },
2019
- "128252": {
2020
- "content": "<|reserved_special_token_244|>",
2021
- "lstrip": false,
2022
- "normalized": false,
2023
- "rstrip": false,
2024
- "single_word": false,
2025
- "special": true
2026
- },
2027
- "128253": {
2028
- "content": "<|reserved_special_token_245|>",
2029
- "lstrip": false,
2030
- "normalized": false,
2031
- "rstrip": false,
2032
- "single_word": false,
2033
- "special": true
2034
- },
2035
- "128254": {
2036
- "content": "<|reserved_special_token_246|>",
2037
- "lstrip": false,
2038
- "normalized": false,
2039
- "rstrip": false,
2040
- "single_word": false,
2041
- "special": true
2042
- },
2043
- "128255": {
2044
- "content": "<|reserved_special_token_247|>",
2045
- "lstrip": false,
2046
- "normalized": false,
2047
- "rstrip": false,
2048
- "single_word": false,
2049
- "special": true
2050
- }
2051
- },
2052
- "bos_token": "<|begin_of_text|>",
2053
- "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
2054
- "clean_up_tokenization_spaces": true,
2055
- "eos_token": "<|im_end|>",
2056
- "model_input_names": [
2057
- "input_ids",
2058
- "attention_mask"
2059
- ],
2060
- "model_max_length": 131072,
2061
- "pad_token": "<|finetune_right_pad_id|>",
2062
- "tokenizer_class": "PreTrainedTokenizerFast"
2063
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-224/trainer_state.json DELETED
@@ -1,1673 +0,0 @@
1
- {
2
- "best_metric": null,
3
- "best_model_checkpoint": null,
4
- "epoch": 1.995539033457249,
5
- "eval_steps": 28,
6
- "global_step": 224,
7
- "is_hyper_param_search": false,
8
- "is_local_process_zero": true,
9
- "is_world_process_zero": true,
10
- "log_history": [
11
- {
12
- "epoch": 0.008895478131949592,
13
- "grad_norm": 2.962439156483445,
14
- "learning_rate": 4.5454545454545457e-07,
15
- "loss": 1.8805,
16
- "step": 1
17
- },
18
- {
19
- "epoch": 0.008895478131949592,
20
- "eval_loss": 2.7425005435943604,
21
- "eval_runtime": 5.0857,
22
- "eval_samples_per_second": 14.354,
23
- "eval_steps_per_second": 2.556,
24
- "step": 1
25
- },
26
- {
27
- "epoch": 0.017790956263899184,
28
- "grad_norm": 2.7396320345650156,
29
- "learning_rate": 9.090909090909091e-07,
30
- "loss": 1.8922,
31
- "step": 2
32
- },
33
- {
34
- "epoch": 0.026686434395848776,
35
- "grad_norm": 2.8238110399477696,
36
- "learning_rate": 1.3636363636363636e-06,
37
- "loss": 1.8672,
38
- "step": 3
39
- },
40
- {
41
- "epoch": 0.03558191252779837,
42
- "grad_norm": 2.825892310577778,
43
- "learning_rate": 1.8181818181818183e-06,
44
- "loss": 1.9474,
45
- "step": 4
46
- },
47
- {
48
- "epoch": 0.04447739065974796,
49
- "grad_norm": 2.9182980055251515,
50
- "learning_rate": 2.2727272727272728e-06,
51
- "loss": 1.8873,
52
- "step": 5
53
- },
54
- {
55
- "epoch": 0.05337286879169755,
56
- "grad_norm": 1.825111219165535,
57
- "learning_rate": 2.7272727272727272e-06,
58
- "loss": 1.8892,
59
- "step": 6
60
- },
61
- {
62
- "epoch": 0.06226834692364715,
63
- "grad_norm": 1.603886963757723,
64
- "learning_rate": 3.181818181818182e-06,
65
- "loss": 1.8928,
66
- "step": 7
67
- },
68
- {
69
- "epoch": 0.07116382505559674,
70
- "grad_norm": 1.9982114208239075,
71
- "learning_rate": 3.6363636363636366e-06,
72
- "loss": 1.8816,
73
- "step": 8
74
- },
75
- {
76
- "epoch": 0.08005930318754632,
77
- "grad_norm": 2.2210111384360443,
78
- "learning_rate": 4.0909090909090915e-06,
79
- "loss": 1.814,
80
- "step": 9
81
- },
82
- {
83
- "epoch": 0.08895478131949593,
84
- "grad_norm": 1.8833804876805869,
85
- "learning_rate": 4.5454545454545455e-06,
86
- "loss": 1.8789,
87
- "step": 10
88
- },
89
- {
90
- "epoch": 0.09785025945144551,
91
- "grad_norm": 2.1696360746306986,
92
- "learning_rate": 5e-06,
93
- "loss": 1.8948,
94
- "step": 11
95
- },
96
- {
97
- "epoch": 0.1067457375833951,
98
- "grad_norm": 1.9189154524725964,
99
- "learning_rate": 5.4545454545454545e-06,
100
- "loss": 1.8899,
101
- "step": 12
102
- },
103
- {
104
- "epoch": 0.1156412157153447,
105
- "grad_norm": 1.5982373309558937,
106
- "learning_rate": 5.90909090909091e-06,
107
- "loss": 1.935,
108
- "step": 13
109
- },
110
- {
111
- "epoch": 0.1245366938472943,
112
- "grad_norm": 1.2867259496282597,
113
- "learning_rate": 6.363636363636364e-06,
114
- "loss": 1.8012,
115
- "step": 14
116
- },
117
- {
118
- "epoch": 0.1334321719792439,
119
- "grad_norm": 1.3487164419710964,
120
- "learning_rate": 6.818181818181818e-06,
121
- "loss": 1.8439,
122
- "step": 15
123
- },
124
- {
125
- "epoch": 0.14232765011119347,
126
- "grad_norm": 1.4367061847474465,
127
- "learning_rate": 7.272727272727273e-06,
128
- "loss": 1.8463,
129
- "step": 16
130
- },
131
- {
132
- "epoch": 0.15122312824314307,
133
- "grad_norm": 1.3999547484681398,
134
- "learning_rate": 7.727272727272727e-06,
135
- "loss": 1.8302,
136
- "step": 17
137
- },
138
- {
139
- "epoch": 0.16011860637509265,
140
- "grad_norm": 1.0580202838750912,
141
- "learning_rate": 8.181818181818183e-06,
142
- "loss": 1.8602,
143
- "step": 18
144
- },
145
- {
146
- "epoch": 0.16901408450704225,
147
- "grad_norm": 1.0918338120114914,
148
- "learning_rate": 8.636363636363637e-06,
149
- "loss": 1.7858,
150
- "step": 19
151
- },
152
- {
153
- "epoch": 0.17790956263899185,
154
- "grad_norm": 1.0275858730350056,
155
- "learning_rate": 9.090909090909091e-06,
156
- "loss": 1.8628,
157
- "step": 20
158
- },
159
- {
160
- "epoch": 0.18680504077094143,
161
- "grad_norm": 1.061639393901565,
162
- "learning_rate": 9.545454545454547e-06,
163
- "loss": 1.8227,
164
- "step": 21
165
- },
166
- {
167
- "epoch": 0.19570051890289103,
168
- "grad_norm": 0.9240081464912561,
169
- "learning_rate": 1e-05,
170
- "loss": 1.7978,
171
- "step": 22
172
- },
173
- {
174
- "epoch": 0.20459599703484063,
175
- "grad_norm": 0.9652630279266269,
176
- "learning_rate": 9.999395316300748e-06,
177
- "loss": 1.7734,
178
- "step": 23
179
- },
180
- {
181
- "epoch": 0.2134914751667902,
182
- "grad_norm": 0.9039339352165564,
183
- "learning_rate": 9.99758141145994e-06,
184
- "loss": 1.849,
185
- "step": 24
186
- },
187
- {
188
- "epoch": 0.2223869532987398,
189
- "grad_norm": 0.9643956642662512,
190
- "learning_rate": 9.994558724213056e-06,
191
- "loss": 1.825,
192
- "step": 25
193
- },
194
- {
195
- "epoch": 0.2312824314306894,
196
- "grad_norm": 0.9167678356949895,
197
- "learning_rate": 9.990327985667972e-06,
198
- "loss": 1.7981,
199
- "step": 26
200
- },
201
- {
202
- "epoch": 0.24017790956263899,
203
- "grad_norm": 0.8874816186232574,
204
- "learning_rate": 9.984890219128148e-06,
205
- "loss": 1.7821,
206
- "step": 27
207
- },
208
- {
209
- "epoch": 0.2490733876945886,
210
- "grad_norm": 0.8787539356844203,
211
- "learning_rate": 9.978246739845095e-06,
212
- "loss": 1.7985,
213
- "step": 28
214
- },
215
- {
216
- "epoch": 0.2490733876945886,
217
- "eval_loss": 2.290811777114868,
218
- "eval_runtime": 4.938,
219
- "eval_samples_per_second": 14.783,
220
- "eval_steps_per_second": 2.633,
221
- "step": 28
222
- },
223
- {
224
- "epoch": 0.25796886582653816,
225
- "grad_norm": 0.838340542158105,
226
- "learning_rate": 9.970399154700264e-06,
227
- "loss": 1.7882,
228
- "step": 29
229
- },
230
- {
231
- "epoch": 0.2668643439584878,
232
- "grad_norm": 0.8285328505624969,
233
- "learning_rate": 9.961349361816384e-06,
234
- "loss": 1.8555,
235
- "step": 30
236
- },
237
- {
238
- "epoch": 0.27575982209043737,
239
- "grad_norm": 0.9331640421116788,
240
- "learning_rate": 9.951099550098349e-06,
241
- "loss": 1.7626,
242
- "step": 31
243
- },
244
- {
245
- "epoch": 0.28465530022238694,
246
- "grad_norm": 0.7822775533301098,
247
- "learning_rate": 9.939652198703785e-06,
248
- "loss": 1.7815,
249
- "step": 32
250
- },
251
- {
252
- "epoch": 0.2935507783543366,
253
- "grad_norm": 0.8091655666026496,
254
- "learning_rate": 9.927010076443408e-06,
255
- "loss": 1.7973,
256
- "step": 33
257
- },
258
- {
259
- "epoch": 0.30244625648628615,
260
- "grad_norm": 0.8067559509453439,
261
- "learning_rate": 9.91317624111132e-06,
262
- "loss": 1.7843,
263
- "step": 34
264
- },
265
- {
266
- "epoch": 0.3113417346182357,
267
- "grad_norm": 0.791639209925201,
268
- "learning_rate": 9.898154038745408e-06,
269
- "loss": 1.8203,
270
- "step": 35
271
- },
272
- {
273
- "epoch": 0.3202372127501853,
274
- "grad_norm": 0.8397439084797836,
275
- "learning_rate": 9.881947102818036e-06,
276
- "loss": 1.7982,
277
- "step": 36
278
- },
279
- {
280
- "epoch": 0.3291326908821349,
281
- "grad_norm": 0.8069438258809916,
282
- "learning_rate": 9.864559353357189e-06,
283
- "loss": 1.7751,
284
- "step": 37
285
- },
286
- {
287
- "epoch": 0.3380281690140845,
288
- "grad_norm": 0.8901339589432326,
289
- "learning_rate": 9.845994995998332e-06,
290
- "loss": 1.8393,
291
- "step": 38
292
- },
293
- {
294
- "epoch": 0.3469236471460341,
295
- "grad_norm": 0.7778163619401842,
296
- "learning_rate": 9.826258520967178e-06,
297
- "loss": 1.6691,
298
- "step": 39
299
- },
300
- {
301
- "epoch": 0.3558191252779837,
302
- "grad_norm": 0.8503632973674836,
303
- "learning_rate": 9.805354701993624e-06,
304
- "loss": 1.8385,
305
- "step": 40
306
- },
307
- {
308
- "epoch": 0.3647146034099333,
309
- "grad_norm": 0.8510317916033313,
310
- "learning_rate": 9.7832885951571e-06,
311
- "loss": 1.7921,
312
- "step": 41
313
- },
314
- {
315
- "epoch": 0.37361008154188285,
316
- "grad_norm": 0.8242453476305008,
317
- "learning_rate": 9.76006553766365e-06,
318
- "loss": 1.7588,
319
- "step": 42
320
- },
321
- {
322
- "epoch": 0.3825055596738325,
323
- "grad_norm": 0.8490334887805663,
324
- "learning_rate": 9.735691146555002e-06,
325
- "loss": 1.7596,
326
- "step": 43
327
- },
328
- {
329
- "epoch": 0.39140103780578206,
330
- "grad_norm": 0.940027149340783,
331
- "learning_rate": 9.710171317349946e-06,
332
- "loss": 1.7265,
333
- "step": 44
334
- },
335
- {
336
- "epoch": 0.40029651593773163,
337
- "grad_norm": 0.7800160596846283,
338
- "learning_rate": 9.683512222618376e-06,
339
- "loss": 1.8351,
340
- "step": 45
341
- },
342
- {
343
- "epoch": 0.40919199406968126,
344
- "grad_norm": 0.8750191358059531,
345
- "learning_rate": 9.655720310488298e-06,
346
- "loss": 1.7463,
347
- "step": 46
348
- },
349
- {
350
- "epoch": 0.41808747220163084,
351
- "grad_norm": 0.7890987028235364,
352
- "learning_rate": 9.62680230308621e-06,
353
- "loss": 1.7175,
354
- "step": 47
355
- },
356
- {
357
- "epoch": 0.4269829503335804,
358
- "grad_norm": 0.9776075848478134,
359
- "learning_rate": 9.596765194911182e-06,
360
- "loss": 1.7658,
361
- "step": 48
362
- },
363
- {
364
- "epoch": 0.43587842846553004,
365
- "grad_norm": 0.8025568758744139,
366
- "learning_rate": 9.565616251143094e-06,
367
- "loss": 1.7425,
368
- "step": 49
369
- },
370
- {
371
- "epoch": 0.4447739065974796,
372
- "grad_norm": 0.8932281966392491,
373
- "learning_rate": 9.533363005885362e-06,
374
- "loss": 1.7751,
375
- "step": 50
376
- },
377
- {
378
- "epoch": 0.4536693847294292,
379
- "grad_norm": 0.8459501071139652,
380
- "learning_rate": 9.50001326034265e-06,
381
- "loss": 1.7298,
382
- "step": 51
383
- },
384
- {
385
- "epoch": 0.4625648628613788,
386
- "grad_norm": 0.7921546527189047,
387
- "learning_rate": 9.465575080933959e-06,
388
- "loss": 1.7313,
389
- "step": 52
390
- },
391
- {
392
- "epoch": 0.4714603409933284,
393
- "grad_norm": 0.8367967581956659,
394
- "learning_rate": 9.430056797341574e-06,
395
- "loss": 1.7492,
396
- "step": 53
397
- },
398
- {
399
- "epoch": 0.48035581912527797,
400
- "grad_norm": 0.7931936215221228,
401
- "learning_rate": 9.393467000496345e-06,
402
- "loss": 1.7865,
403
- "step": 54
404
- },
405
- {
406
- "epoch": 0.4892512972572276,
407
- "grad_norm": 0.984751704749124,
408
- "learning_rate": 9.355814540499753e-06,
409
- "loss": 1.7336,
410
- "step": 55
411
- },
412
- {
413
- "epoch": 0.4981467753891772,
414
- "grad_norm": 0.8227951034631744,
415
- "learning_rate": 9.317108524483319e-06,
416
- "loss": 1.727,
417
- "step": 56
418
- },
419
- {
420
- "epoch": 0.4981467753891772,
421
- "eval_loss": 2.1942620277404785,
422
- "eval_runtime": 4.9502,
423
- "eval_samples_per_second": 14.747,
424
- "eval_steps_per_second": 2.626,
425
- "step": 56
426
- },
427
- {
428
- "epoch": 0.5070422535211268,
429
- "grad_norm": 0.8411282879097969,
430
- "learning_rate": 9.27735831440582e-06,
431
- "loss": 1.7821,
432
- "step": 57
433
- },
434
- {
435
- "epoch": 0.5159377316530763,
436
- "grad_norm": 0.7781618648296661,
437
- "learning_rate": 9.236573524788888e-06,
438
- "loss": 1.7347,
439
- "step": 58
440
- },
441
- {
442
- "epoch": 0.5248332097850259,
443
- "grad_norm": 0.9088753275683533,
444
- "learning_rate": 9.194764020391507e-06,
445
- "loss": 1.7248,
446
- "step": 59
447
- },
448
- {
449
- "epoch": 0.5337286879169756,
450
- "grad_norm": 0.8554987544215692,
451
- "learning_rate": 9.151939913823988e-06,
452
- "loss": 1.7127,
453
- "step": 60
454
- },
455
- {
456
- "epoch": 0.5426241660489252,
457
- "grad_norm": 0.8266548000477859,
458
- "learning_rate": 9.108111563102005e-06,
459
- "loss": 1.7336,
460
- "step": 61
461
- },
462
- {
463
- "epoch": 0.5515196441808747,
464
- "grad_norm": 0.8317314054903447,
465
- "learning_rate": 9.063289569141251e-06,
466
- "loss": 1.6577,
467
- "step": 62
468
- },
469
- {
470
- "epoch": 0.5604151223128243,
471
- "grad_norm": 0.7907803326669282,
472
- "learning_rate": 9.01748477319338e-06,
473
- "loss": 1.7629,
474
- "step": 63
475
- },
476
- {
477
- "epoch": 0.5693106004447739,
478
- "grad_norm": 0.9329359503827674,
479
- "learning_rate": 8.970708254223768e-06,
480
- "loss": 1.7681,
481
- "step": 64
482
- },
483
- {
484
- "epoch": 0.5782060785767235,
485
- "grad_norm": 0.7480056903847685,
486
- "learning_rate": 8.92297132623183e-06,
487
- "loss": 1.7051,
488
- "step": 65
489
- },
490
- {
491
- "epoch": 0.5871015567086731,
492
- "grad_norm": 0.9077672572079196,
493
- "learning_rate": 8.87428553551445e-06,
494
- "loss": 1.7093,
495
- "step": 66
496
- },
497
- {
498
- "epoch": 0.5959970348406227,
499
- "grad_norm": 0.880358315923339,
500
- "learning_rate": 8.82466265787324e-06,
501
- "loss": 1.6933,
502
- "step": 67
503
- },
504
- {
505
- "epoch": 0.6048925129725723,
506
- "grad_norm": 0.7941154116764113,
507
- "learning_rate": 8.774114695766286e-06,
508
- "loss": 1.7358,
509
- "step": 68
510
- },
511
- {
512
- "epoch": 0.6137879911045219,
513
- "grad_norm": 0.8525197282856878,
514
- "learning_rate": 8.722653875405077e-06,
515
- "loss": 1.781,
516
- "step": 69
517
- },
518
- {
519
- "epoch": 0.6226834692364714,
520
- "grad_norm": 0.8265406397838687,
521
- "learning_rate": 8.670292643797302e-06,
522
- "loss": 1.7098,
523
- "step": 70
524
- },
525
- {
526
- "epoch": 0.631578947368421,
527
- "grad_norm": 0.7680796533145033,
528
- "learning_rate": 8.61704366573625e-06,
529
- "loss": 1.6796,
530
- "step": 71
531
- },
532
- {
533
- "epoch": 0.6404744255003706,
534
- "grad_norm": 0.8042854169648783,
535
- "learning_rate": 8.562919820737537e-06,
536
- "loss": 1.6849,
537
- "step": 72
538
- },
539
- {
540
- "epoch": 0.6493699036323203,
541
- "grad_norm": 0.821753341080785,
542
- "learning_rate": 8.507934199923884e-06,
543
- "loss": 1.6899,
544
- "step": 73
545
- },
546
- {
547
- "epoch": 0.6582653817642699,
548
- "grad_norm": 0.8111947542056863,
549
- "learning_rate": 8.452100102858734e-06,
550
- "loss": 1.7003,
551
- "step": 74
552
- },
553
- {
554
- "epoch": 0.6671608598962194,
555
- "grad_norm": 0.7818461027480149,
556
- "learning_rate": 8.395431034329431e-06,
557
- "loss": 1.7192,
558
- "step": 75
559
- },
560
- {
561
- "epoch": 0.676056338028169,
562
- "grad_norm": 0.7685968932142347,
563
- "learning_rate": 8.33794070108077e-06,
564
- "loss": 1.7977,
565
- "step": 76
566
- },
567
- {
568
- "epoch": 0.6849518161601186,
569
- "grad_norm": 0.8139919630919948,
570
- "learning_rate": 8.2796430084997e-06,
571
- "loss": 1.7318,
572
- "step": 77
573
- },
574
- {
575
- "epoch": 0.6938472942920682,
576
- "grad_norm": 0.7794723242422334,
577
- "learning_rate": 8.22055205725199e-06,
578
- "loss": 1.7151,
579
- "step": 78
580
- },
581
- {
582
- "epoch": 0.7027427724240178,
583
- "grad_norm": 0.728460212640379,
584
- "learning_rate": 8.160682139871634e-06,
585
- "loss": 1.7313,
586
- "step": 79
587
- },
588
- {
589
- "epoch": 0.7116382505559674,
590
- "grad_norm": 0.7956691993406653,
591
- "learning_rate": 8.100047737303877e-06,
592
- "loss": 1.7096,
593
- "step": 80
594
- },
595
- {
596
- "epoch": 0.720533728687917,
597
- "grad_norm": 0.711208685534124,
598
- "learning_rate": 8.038663515402659e-06,
599
- "loss": 1.7281,
600
- "step": 81
601
- },
602
- {
603
- "epoch": 0.7294292068198666,
604
- "grad_norm": 0.775501063890523,
605
- "learning_rate": 7.97654432138333e-06,
606
- "loss": 1.6985,
607
- "step": 82
608
- },
609
- {
610
- "epoch": 0.7383246849518161,
611
- "grad_norm": 0.7926863015669294,
612
- "learning_rate": 7.913705180231505e-06,
613
- "loss": 1.6493,
614
- "step": 83
615
- },
616
- {
617
- "epoch": 0.7472201630837657,
618
- "grad_norm": 0.7646163799523064,
619
- "learning_rate": 7.850161291068915e-06,
620
- "loss": 1.7429,
621
- "step": 84
622
- },
623
- {
624
- "epoch": 0.7472201630837657,
625
- "eval_loss": 2.166506052017212,
626
- "eval_runtime": 5.0566,
627
- "eval_samples_per_second": 14.436,
628
- "eval_steps_per_second": 2.571,
629
- "step": 84
630
- },
631
- {
632
- "epoch": 0.7561156412157154,
633
- "grad_norm": 0.7863441465872285,
634
- "learning_rate": 7.785928023477142e-06,
635
- "loss": 1.6836,
636
- "step": 85
637
- },
638
- {
639
- "epoch": 0.765011119347665,
640
- "grad_norm": 0.8191984365366046,
641
- "learning_rate": 7.721020913780137e-06,
642
- "loss": 1.6842,
643
- "step": 86
644
- },
645
- {
646
- "epoch": 0.7739065974796145,
647
- "grad_norm": 0.8006782198319303,
648
- "learning_rate": 7.655455661286376e-06,
649
- "loss": 1.6494,
650
- "step": 87
651
- },
652
- {
653
- "epoch": 0.7828020756115641,
654
- "grad_norm": 0.7654197589814793,
655
- "learning_rate": 7.589248124491627e-06,
656
- "loss": 1.7664,
657
- "step": 88
658
- },
659
- {
660
- "epoch": 0.7916975537435137,
661
- "grad_norm": 0.8599660274656371,
662
- "learning_rate": 7.5224143172432e-06,
663
- "loss": 1.7169,
664
- "step": 89
665
- },
666
- {
667
- "epoch": 0.8005930318754633,
668
- "grad_norm": 0.7576298501441915,
669
- "learning_rate": 7.454970404866612e-06,
670
- "loss": 1.6947,
671
- "step": 90
672
- },
673
- {
674
- "epoch": 0.809488510007413,
675
- "grad_norm": 0.8174817690479476,
676
- "learning_rate": 7.386932700255635e-06,
677
- "loss": 1.6878,
678
- "step": 91
679
- },
680
- {
681
- "epoch": 0.8183839881393625,
682
- "grad_norm": 0.8235881096708558,
683
- "learning_rate": 7.318317659926637e-06,
684
- "loss": 1.7236,
685
- "step": 92
686
- },
687
- {
688
- "epoch": 0.8272794662713121,
689
- "grad_norm": 0.8312635054795164,
690
- "learning_rate": 7.249141880038181e-06,
691
- "loss": 1.7094,
692
- "step": 93
693
- },
694
- {
695
- "epoch": 0.8361749444032617,
696
- "grad_norm": 0.8883212124220771,
697
- "learning_rate": 7.179422092376856e-06,
698
- "loss": 1.6923,
699
- "step": 94
700
- },
701
- {
702
- "epoch": 0.8450704225352113,
703
- "grad_norm": 0.8180549283263809,
704
- "learning_rate": 7.109175160310312e-06,
705
- "loss": 1.712,
706
- "step": 95
707
- },
708
- {
709
- "epoch": 0.8539659006671608,
710
- "grad_norm": 0.7568720711350465,
711
- "learning_rate": 7.038418074708444e-06,
712
- "loss": 1.6887,
713
- "step": 96
714
- },
715
- {
716
- "epoch": 0.8628613787991104,
717
- "grad_norm": 0.8561076366911579,
718
- "learning_rate": 6.967167949833763e-06,
719
- "loss": 1.7062,
720
- "step": 97
721
- },
722
- {
723
- "epoch": 0.8717568569310601,
724
- "grad_norm": 0.8079541369639384,
725
- "learning_rate": 6.895442019201898e-06,
726
- "loss": 1.731,
727
- "step": 98
728
- },
729
- {
730
- "epoch": 0.8806523350630097,
731
- "grad_norm": 0.7614430270741105,
732
- "learning_rate": 6.8232576314132755e-06,
733
- "loss": 1.7106,
734
- "step": 99
735
- },
736
- {
737
- "epoch": 0.8895478131949592,
738
- "grad_norm": 0.7604432010057705,
739
- "learning_rate": 6.750632245956954e-06,
740
- "loss": 1.7534,
741
- "step": 100
742
- },
743
- {
744
- "epoch": 0.8984432913269088,
745
- "grad_norm": 0.7782042216719071,
746
- "learning_rate": 6.677583428987625e-06,
747
- "loss": 1.6793,
748
- "step": 101
749
- },
750
- {
751
- "epoch": 0.9073387694588584,
752
- "grad_norm": 0.7457350333389989,
753
- "learning_rate": 6.6041288490768385e-06,
754
- "loss": 1.6527,
755
- "step": 102
756
- },
757
- {
758
- "epoch": 0.916234247590808,
759
- "grad_norm": 0.7714830457964388,
760
- "learning_rate": 6.530286272939438e-06,
761
- "loss": 1.7262,
762
- "step": 103
763
- },
764
- {
765
- "epoch": 0.9251297257227576,
766
- "grad_norm": 0.7598538699042868,
767
- "learning_rate": 6.456073561136261e-06,
768
- "loss": 1.6938,
769
- "step": 104
770
- },
771
- {
772
- "epoch": 0.9340252038547072,
773
- "grad_norm": 0.743820000982299,
774
- "learning_rate": 6.381508663754152e-06,
775
- "loss": 1.654,
776
- "step": 105
777
- },
778
- {
779
- "epoch": 0.9429206819866568,
780
- "grad_norm": 0.7344076336222899,
781
- "learning_rate": 6.306609616064304e-06,
782
- "loss": 1.7266,
783
- "step": 106
784
- },
785
- {
786
- "epoch": 0.9518161601186064,
787
- "grad_norm": 0.7846174979334468,
788
- "learning_rate": 6.231394534160008e-06,
789
- "loss": 1.7026,
790
- "step": 107
791
- },
792
- {
793
- "epoch": 0.9607116382505559,
794
- "grad_norm": 0.7558875505010646,
795
- "learning_rate": 6.15588161057485e-06,
796
- "loss": 1.7292,
797
- "step": 108
798
- },
799
- {
800
- "epoch": 0.9696071163825055,
801
- "grad_norm": 0.8255512223024366,
802
- "learning_rate": 6.080089109882419e-06,
803
- "loss": 1.6389,
804
- "step": 109
805
- },
806
- {
807
- "epoch": 0.9785025945144552,
808
- "grad_norm": 0.7171315882558364,
809
- "learning_rate": 6.004035364278593e-06,
810
- "loss": 1.6367,
811
- "step": 110
812
- },
813
- {
814
- "epoch": 0.9873980726464048,
815
- "grad_norm": 0.7524610234390072,
816
- "learning_rate": 5.927738769147467e-06,
817
- "loss": 1.6586,
818
- "step": 111
819
- },
820
- {
821
- "epoch": 0.9962935507783544,
822
- "grad_norm": 0.776276543241556,
823
- "learning_rate": 5.851217778611994e-06,
824
- "loss": 1.6867,
825
- "step": 112
826
- },
827
- {
828
- "epoch": 0.9962935507783544,
829
- "eval_loss": 2.1309382915496826,
830
- "eval_runtime": 4.9289,
831
- "eval_samples_per_second": 14.811,
832
- "eval_steps_per_second": 2.638,
833
- "step": 112
834
- },
835
- {
836
- "epoch": 1.0052044609665427,
837
- "grad_norm": 0.7949226363525317,
838
- "learning_rate": 5.774490901070424e-06,
839
- "loss": 1.6816,
840
- "step": 113
841
- },
842
- {
843
- "epoch": 1.0141263940520446,
844
- "grad_norm": 0.8427527864276817,
845
- "learning_rate": 5.697576694719616e-06,
846
- "loss": 1.6729,
847
- "step": 114
848
- },
849
- {
850
- "epoch": 1.0230483271375466,
851
- "grad_norm": 0.8391300041321513,
852
- "learning_rate": 5.6204937630662974e-06,
853
- "loss": 1.606,
854
- "step": 115
855
- },
856
- {
857
- "epoch": 1.0319702602230483,
858
- "grad_norm": 0.860066136054114,
859
- "learning_rate": 5.543260750427373e-06,
860
- "loss": 1.6388,
861
- "step": 116
862
- },
863
- {
864
- "epoch": 1.0408921933085502,
865
- "grad_norm": 0.7969614266629734,
866
- "learning_rate": 5.465896337420359e-06,
867
- "loss": 1.6474,
868
- "step": 117
869
- },
870
- {
871
- "epoch": 1.0498141263940521,
872
- "grad_norm": 0.9025361781915455,
873
- "learning_rate": 5.388419236445033e-06,
874
- "loss": 1.5205,
875
- "step": 118
876
- },
877
- {
878
- "epoch": 1.0587360594795538,
879
- "grad_norm": 0.8330236878373249,
880
- "learning_rate": 5.310848187157404e-06,
881
- "loss": 1.5675,
882
- "step": 119
883
- },
884
- {
885
- "epoch": 1.0676579925650558,
886
- "grad_norm": 0.7616700634371533,
887
- "learning_rate": 5.233201951937088e-06,
888
- "loss": 1.6432,
889
- "step": 120
890
- },
891
- {
892
- "epoch": 1.0765799256505577,
893
- "grad_norm": 0.8546223421581763,
894
- "learning_rate": 5.155499311349185e-06,
895
- "loss": 1.5784,
896
- "step": 121
897
- },
898
- {
899
- "epoch": 1.0855018587360594,
900
- "grad_norm": 0.8166653392476256,
901
- "learning_rate": 5.077759059601756e-06,
902
- "loss": 1.5782,
903
- "step": 122
904
- },
905
- {
906
- "epoch": 1.0944237918215614,
907
- "grad_norm": 0.8111532279560614,
908
- "learning_rate": 5e-06,
909
- "loss": 1.5873,
910
- "step": 123
911
- },
912
- {
913
- "epoch": 1.1033457249070633,
914
- "grad_norm": 0.8158634269230521,
915
- "learning_rate": 4.922240940398246e-06,
916
- "loss": 1.5588,
917
- "step": 124
918
- },
919
- {
920
- "epoch": 1.112267657992565,
921
- "grad_norm": 0.8210896404843728,
922
- "learning_rate": 4.844500688650817e-06,
923
- "loss": 1.6126,
924
- "step": 125
925
- },
926
- {
927
- "epoch": 1.121189591078067,
928
- "grad_norm": 0.8000322873727128,
929
- "learning_rate": 4.766798048062913e-06,
930
- "loss": 1.5483,
931
- "step": 126
932
- },
933
- {
934
- "epoch": 1.1301115241635689,
935
- "grad_norm": 0.7519469533082767,
936
- "learning_rate": 4.689151812842598e-06,
937
- "loss": 1.5701,
938
- "step": 127
939
- },
940
- {
941
- "epoch": 1.1390334572490706,
942
- "grad_norm": 0.7800251310433457,
943
- "learning_rate": 4.611580763554969e-06,
944
- "loss": 1.5731,
945
- "step": 128
946
- },
947
- {
948
- "epoch": 1.1479553903345725,
949
- "grad_norm": 0.8406348404872362,
950
- "learning_rate": 4.534103662579643e-06,
951
- "loss": 1.5307,
952
- "step": 129
953
- },
954
- {
955
- "epoch": 1.1568773234200744,
956
- "grad_norm": 0.7654155312184125,
957
- "learning_rate": 4.456739249572628e-06,
958
- "loss": 1.547,
959
- "step": 130
960
- },
961
- {
962
- "epoch": 1.1657992565055761,
963
- "grad_norm": 0.7463028643340174,
964
- "learning_rate": 4.379506236933703e-06,
965
- "loss": 1.5446,
966
- "step": 131
967
- },
968
- {
969
- "epoch": 1.174721189591078,
970
- "grad_norm": 0.8062671750420662,
971
- "learning_rate": 4.3024233052803855e-06,
972
- "loss": 1.6141,
973
- "step": 132
974
- },
975
- {
976
- "epoch": 1.18364312267658,
977
- "grad_norm": 0.815723178829378,
978
- "learning_rate": 4.2255090989295765e-06,
979
- "loss": 1.616,
980
- "step": 133
981
- },
982
- {
983
- "epoch": 1.1925650557620817,
984
- "grad_norm": 0.7863478983348662,
985
- "learning_rate": 4.148782221388007e-06,
986
- "loss": 1.5959,
987
- "step": 134
988
- },
989
- {
990
- "epoch": 1.2014869888475836,
991
- "grad_norm": 0.7864870983392745,
992
- "learning_rate": 4.072261230852534e-06,
993
- "loss": 1.6498,
994
- "step": 135
995
- },
996
- {
997
- "epoch": 1.2104089219330856,
998
- "grad_norm": 0.7547272629847489,
999
- "learning_rate": 3.995964635721409e-06,
1000
- "loss": 1.5999,
1001
- "step": 136
1002
- },
1003
- {
1004
- "epoch": 1.2193308550185873,
1005
- "grad_norm": 0.7673639108820794,
1006
- "learning_rate": 3.919910890117584e-06,
1007
- "loss": 1.5933,
1008
- "step": 137
1009
- },
1010
- {
1011
- "epoch": 1.2282527881040892,
1012
- "grad_norm": 0.7766205741224386,
1013
- "learning_rate": 3.844118389425154e-06,
1014
- "loss": 1.61,
1015
- "step": 138
1016
- },
1017
- {
1018
- "epoch": 1.2371747211895912,
1019
- "grad_norm": 0.7362112046628648,
1020
- "learning_rate": 3.768605465839994e-06,
1021
- "loss": 1.5712,
1022
- "step": 139
1023
- },
1024
- {
1025
- "epoch": 1.2460966542750929,
1026
- "grad_norm": 0.740953819543322,
1027
- "learning_rate": 3.6933903839356983e-06,
1028
- "loss": 1.6463,
1029
- "step": 140
1030
- },
1031
- {
1032
- "epoch": 1.2460966542750929,
1033
- "eval_loss": 2.126718044281006,
1034
- "eval_runtime": 5.0405,
1035
- "eval_samples_per_second": 14.483,
1036
- "eval_steps_per_second": 2.579,
1037
- "step": 140
1038
- },
1039
- {
1040
- "epoch": 1.2550185873605948,
1041
- "grad_norm": 0.7455906988345508,
1042
- "learning_rate": 3.6184913362458497e-06,
1043
- "loss": 1.5954,
1044
- "step": 141
1045
- },
1046
- {
1047
- "epoch": 1.2639405204460967,
1048
- "grad_norm": 0.7390172101050686,
1049
- "learning_rate": 3.5439264388637407e-06,
1050
- "loss": 1.5978,
1051
- "step": 142
1052
- },
1053
- {
1054
- "epoch": 1.2728624535315984,
1055
- "grad_norm": 0.7812295476595575,
1056
- "learning_rate": 3.469713727060564e-06,
1057
- "loss": 1.6303,
1058
- "step": 143
1059
- },
1060
- {
1061
- "epoch": 1.2817843866171004,
1062
- "grad_norm": 0.7556406155289581,
1063
- "learning_rate": 3.3958711509231627e-06,
1064
- "loss": 1.5518,
1065
- "step": 144
1066
- },
1067
- {
1068
- "epoch": 1.2907063197026023,
1069
- "grad_norm": 0.7468728606931369,
1070
- "learning_rate": 3.322416571012376e-06,
1071
- "loss": 1.6044,
1072
- "step": 145
1073
- },
1074
- {
1075
- "epoch": 1.299628252788104,
1076
- "grad_norm": 0.7191959471643807,
1077
- "learning_rate": 3.249367754043047e-06,
1078
- "loss": 1.6269,
1079
- "step": 146
1080
- },
1081
- {
1082
- "epoch": 1.308550185873606,
1083
- "grad_norm": 0.7664174797908729,
1084
- "learning_rate": 3.176742368586725e-06,
1085
- "loss": 1.5637,
1086
- "step": 147
1087
- },
1088
- {
1089
- "epoch": 1.3174721189591079,
1090
- "grad_norm": 0.7468906342438638,
1091
- "learning_rate": 3.104557980798104e-06,
1092
- "loss": 1.5446,
1093
- "step": 148
1094
- },
1095
- {
1096
- "epoch": 1.3263940520446096,
1097
- "grad_norm": 0.7451846473452862,
1098
- "learning_rate": 3.032832050166239e-06,
1099
- "loss": 1.5449,
1100
- "step": 149
1101
- },
1102
- {
1103
- "epoch": 1.3353159851301115,
1104
- "grad_norm": 0.7616087364880716,
1105
- "learning_rate": 2.961581925291557e-06,
1106
- "loss": 1.6044,
1107
- "step": 150
1108
- },
1109
- {
1110
- "epoch": 1.3442379182156134,
1111
- "grad_norm": 0.7445080222060676,
1112
- "learning_rate": 2.8908248396896893e-06,
1113
- "loss": 1.5702,
1114
- "step": 151
1115
- },
1116
- {
1117
- "epoch": 1.3531598513011152,
1118
- "grad_norm": 0.7210878474745228,
1119
- "learning_rate": 2.820577907623145e-06,
1120
- "loss": 1.5921,
1121
- "step": 152
1122
- },
1123
- {
1124
- "epoch": 1.362081784386617,
1125
- "grad_norm": 0.7554243231660402,
1126
- "learning_rate": 2.750858119961821e-06,
1127
- "loss": 1.5874,
1128
- "step": 153
1129
- },
1130
- {
1131
- "epoch": 1.371003717472119,
1132
- "grad_norm": 0.7671684721265511,
1133
- "learning_rate": 2.6816823400733628e-06,
1134
- "loss": 1.6214,
1135
- "step": 154
1136
- },
1137
- {
1138
- "epoch": 1.3799256505576207,
1139
- "grad_norm": 0.7107001153097623,
1140
- "learning_rate": 2.613067299744364e-06,
1141
- "loss": 1.6306,
1142
- "step": 155
1143
- },
1144
- {
1145
- "epoch": 1.3888475836431227,
1146
- "grad_norm": 0.7766434748408712,
1147
- "learning_rate": 2.5450295951333896e-06,
1148
- "loss": 1.6016,
1149
- "step": 156
1150
- },
1151
- {
1152
- "epoch": 1.3977695167286246,
1153
- "grad_norm": 0.7253668897149933,
1154
- "learning_rate": 2.4775856827568016e-06,
1155
- "loss": 1.5755,
1156
- "step": 157
1157
- },
1158
- {
1159
- "epoch": 1.4066914498141263,
1160
- "grad_norm": 0.7445577446601732,
1161
- "learning_rate": 2.410751875508373e-06,
1162
- "loss": 1.6057,
1163
- "step": 158
1164
- },
1165
- {
1166
- "epoch": 1.4156133828996282,
1167
- "grad_norm": 0.7388798176698108,
1168
- "learning_rate": 2.3445443387136247e-06,
1169
- "loss": 1.6085,
1170
- "step": 159
1171
- },
1172
- {
1173
- "epoch": 1.4245353159851302,
1174
- "grad_norm": 0.7528881371618414,
1175
- "learning_rate": 2.278979086219863e-06,
1176
- "loss": 1.5843,
1177
- "step": 160
1178
- },
1179
- {
1180
- "epoch": 1.4334572490706319,
1181
- "grad_norm": 0.7368231349073808,
1182
- "learning_rate": 2.2140719765228587e-06,
1183
- "loss": 1.5469,
1184
- "step": 161
1185
- },
1186
- {
1187
- "epoch": 1.4423791821561338,
1188
- "grad_norm": 0.7123568741875721,
1189
- "learning_rate": 2.149838708931087e-06,
1190
- "loss": 1.5725,
1191
- "step": 162
1192
- },
1193
- {
1194
- "epoch": 1.4513011152416357,
1195
- "grad_norm": 0.7410245476145694,
1196
- "learning_rate": 2.086294819768496e-06,
1197
- "loss": 1.5768,
1198
- "step": 163
1199
- },
1200
- {
1201
- "epoch": 1.4602230483271375,
1202
- "grad_norm": 0.7791011096821625,
1203
- "learning_rate": 2.0234556786166715e-06,
1204
- "loss": 1.5203,
1205
- "step": 164
1206
- },
1207
- {
1208
- "epoch": 1.4691449814126394,
1209
- "grad_norm": 0.756774717840047,
1210
- "learning_rate": 1.9613364845973433e-06,
1211
- "loss": 1.6617,
1212
- "step": 165
1213
- },
1214
- {
1215
- "epoch": 1.4780669144981413,
1216
- "grad_norm": 0.7162094806926552,
1217
- "learning_rate": 1.8999522626961254e-06,
1218
- "loss": 1.6116,
1219
- "step": 166
1220
- },
1221
- {
1222
- "epoch": 1.486988847583643,
1223
- "grad_norm": 0.7132743980586249,
1224
- "learning_rate": 1.8393178601283684e-06,
1225
- "loss": 1.5591,
1226
- "step": 167
1227
- },
1228
- {
1229
- "epoch": 1.495910780669145,
1230
- "grad_norm": 0.7683594146532297,
1231
- "learning_rate": 1.7794479427480115e-06,
1232
- "loss": 1.593,
1233
- "step": 168
1234
- },
1235
- {
1236
- "epoch": 1.495910780669145,
1237
- "eval_loss": 2.114826202392578,
1238
- "eval_runtime": 4.9368,
1239
- "eval_samples_per_second": 14.787,
1240
- "eval_steps_per_second": 2.633,
1241
- "step": 168
1242
- },
1243
- {
1244
- "epoch": 1.504832713754647,
1245
- "grad_norm": 0.7109602921298782,
1246
- "learning_rate": 1.7203569915003005e-06,
1247
- "loss": 1.5261,
1248
- "step": 169
1249
- },
1250
- {
1251
- "epoch": 1.5137546468401486,
1252
- "grad_norm": 0.7346220135576979,
1253
- "learning_rate": 1.6620592989192318e-06,
1254
- "loss": 1.594,
1255
- "step": 170
1256
- },
1257
- {
1258
- "epoch": 1.5226765799256505,
1259
- "grad_norm": 0.6734164312637153,
1260
- "learning_rate": 1.6045689656705715e-06,
1261
- "loss": 1.5471,
1262
- "step": 171
1263
- },
1264
- {
1265
- "epoch": 1.5315985130111525,
1266
- "grad_norm": 0.7409502781596998,
1267
- "learning_rate": 1.5478998971412669e-06,
1268
- "loss": 1.536,
1269
- "step": 172
1270
- },
1271
- {
1272
- "epoch": 1.5405204460966542,
1273
- "grad_norm": 0.7056300593485433,
1274
- "learning_rate": 1.4920658000761172e-06,
1275
- "loss": 1.5677,
1276
- "step": 173
1277
- },
1278
- {
1279
- "epoch": 1.549442379182156,
1280
- "grad_norm": 0.7032068699867184,
1281
- "learning_rate": 1.4370801792624656e-06,
1282
- "loss": 1.6057,
1283
- "step": 174
1284
- },
1285
- {
1286
- "epoch": 1.558364312267658,
1287
- "grad_norm": 0.7354693345583995,
1288
- "learning_rate": 1.3829563342637514e-06,
1289
- "loss": 1.6657,
1290
- "step": 175
1291
- },
1292
- {
1293
- "epoch": 1.5672862453531597,
1294
- "grad_norm": 0.7292297554209234,
1295
- "learning_rate": 1.3297073562026992e-06,
1296
- "loss": 1.6144,
1297
- "step": 176
1298
- },
1299
- {
1300
- "epoch": 1.5762081784386617,
1301
- "grad_norm": 0.689272475476711,
1302
- "learning_rate": 1.2773461245949249e-06,
1303
- "loss": 1.5528,
1304
- "step": 177
1305
- },
1306
- {
1307
- "epoch": 1.5851301115241636,
1308
- "grad_norm": 0.7354887342865115,
1309
- "learning_rate": 1.225885304233716e-06,
1310
- "loss": 1.6064,
1311
- "step": 178
1312
- },
1313
- {
1314
- "epoch": 1.5940520446096653,
1315
- "grad_norm": 0.7064194581763904,
1316
- "learning_rate": 1.1753373421267622e-06,
1317
- "loss": 1.6049,
1318
- "step": 179
1319
- },
1320
- {
1321
- "epoch": 1.6029739776951673,
1322
- "grad_norm": 0.7566228137505194,
1323
- "learning_rate": 1.125714464485551e-06,
1324
- "loss": 1.5472,
1325
- "step": 180
1326
- },
1327
- {
1328
- "epoch": 1.6118959107806692,
1329
- "grad_norm": 0.7068920541521109,
1330
- "learning_rate": 1.0770286737681701e-06,
1331
- "loss": 1.5112,
1332
- "step": 181
1333
- },
1334
- {
1335
- "epoch": 1.620817843866171,
1336
- "grad_norm": 0.6927990179758211,
1337
- "learning_rate": 1.0292917457762325e-06,
1338
- "loss": 1.614,
1339
- "step": 182
1340
- },
1341
- {
1342
- "epoch": 1.6297397769516728,
1343
- "grad_norm": 0.7004195144075062,
1344
- "learning_rate": 9.825152268066213e-07,
1345
- "loss": 1.6333,
1346
- "step": 183
1347
- },
1348
- {
1349
- "epoch": 1.6386617100371748,
1350
- "grad_norm": 0.7095189872060162,
1351
- "learning_rate": 9.367104308587493e-07,
1352
- "loss": 1.5678,
1353
- "step": 184
1354
- },
1355
- {
1356
- "epoch": 1.6475836431226765,
1357
- "grad_norm": 0.71255932263421,
1358
- "learning_rate": 8.918884368979969e-07,
1359
- "loss": 1.5812,
1360
- "step": 185
1361
- },
1362
- {
1363
- "epoch": 1.6565055762081784,
1364
- "grad_norm": 0.7111899621451707,
1365
- "learning_rate": 8.480600861760124e-07,
1366
- "loss": 1.5792,
1367
- "step": 186
1368
- },
1369
- {
1370
- "epoch": 1.6654275092936803,
1371
- "grad_norm": 0.6775823161513022,
1372
- "learning_rate": 8.052359796084952e-07,
1373
- "loss": 1.641,
1374
- "step": 187
1375
- },
1376
- {
1377
- "epoch": 1.674349442379182,
1378
- "grad_norm": 0.7246805658391949,
1379
- "learning_rate": 7.634264752111131e-07,
1380
- "loss": 1.5788,
1381
- "step": 188
1382
- },
1383
- {
1384
- "epoch": 1.683271375464684,
1385
- "grad_norm": 0.6857756197716095,
1386
- "learning_rate": 7.226416855941814e-07,
1387
- "loss": 1.5896,
1388
- "step": 189
1389
- },
1390
- {
1391
- "epoch": 1.692193308550186,
1392
- "grad_norm": 0.7126493923907314,
1393
- "learning_rate": 6.828914755166826e-07,
1394
- "loss": 1.5808,
1395
- "step": 190
1396
- },
1397
- {
1398
- "epoch": 1.7011152416356876,
1399
- "grad_norm": 0.6885954964525883,
1400
- "learning_rate": 6.441854595002478e-07,
1401
- "loss": 1.5641,
1402
- "step": 191
1403
- },
1404
- {
1405
- "epoch": 1.7100371747211895,
1406
- "grad_norm": 0.7047319405532964,
1407
- "learning_rate": 6.065329995036573e-07,
1408
- "loss": 1.5676,
1409
- "step": 192
1410
- },
1411
- {
1412
- "epoch": 1.7189591078066915,
1413
- "grad_norm": 0.6718035026523713,
1414
- "learning_rate": 5.699432026584267e-07,
1415
- "loss": 1.6242,
1416
- "step": 193
1417
- },
1418
- {
1419
- "epoch": 1.7278810408921932,
1420
- "grad_norm": 0.6981766400413105,
1421
- "learning_rate": 5.344249190660427e-07,
1422
- "loss": 1.482,
1423
- "step": 194
1424
- },
1425
- {
1426
- "epoch": 1.7368029739776951,
1427
- "grad_norm": 0.6893168999107806,
1428
- "learning_rate": 4.999867396573499e-07,
1429
- "loss": 1.6211,
1430
- "step": 195
1431
- },
1432
- {
1433
- "epoch": 1.745724907063197,
1434
- "grad_norm": 0.7107176396602818,
1435
- "learning_rate": 4.666369941146376e-07,
1436
- "loss": 1.604,
1437
- "step": 196
1438
- },
1439
- {
1440
- "epoch": 1.745724907063197,
1441
- "eval_loss": 2.112863779067993,
1442
- "eval_runtime": 5.0454,
1443
- "eval_samples_per_second": 14.469,
1444
- "eval_steps_per_second": 2.577,
1445
- "step": 196
1446
- },
1447
- {
1448
- "epoch": 1.7546468401486988,
1449
- "grad_norm": 0.6976263641639928,
1450
- "learning_rate": 4.343837488569058e-07,
1451
- "loss": 1.5392,
1452
- "step": 197
1453
- },
1454
- {
1455
- "epoch": 1.7635687732342007,
1456
- "grad_norm": 0.7174067662211165,
1457
- "learning_rate": 4.03234805088818e-07,
1458
- "loss": 1.5687,
1459
- "step": 198
1460
- },
1461
- {
1462
- "epoch": 1.7724907063197026,
1463
- "grad_norm": 0.6870219887534516,
1464
- "learning_rate": 3.7319769691379295e-07,
1465
- "loss": 1.551,
1466
- "step": 199
1467
- },
1468
- {
1469
- "epoch": 1.7814126394052043,
1470
- "grad_norm": 0.6983449018701221,
1471
- "learning_rate": 3.4427968951170287e-07,
1472
- "loss": 1.5828,
1473
- "step": 200
1474
- },
1475
- {
1476
- "epoch": 1.7903345724907063,
1477
- "grad_norm": 0.7183216870017771,
1478
- "learning_rate": 3.1648777738162496e-07,
1479
- "loss": 1.5435,
1480
- "step": 201
1481
- },
1482
- {
1483
- "epoch": 1.7992565055762082,
1484
- "grad_norm": 0.7185654549153709,
1485
- "learning_rate": 2.8982868265005457e-07,
1486
- "loss": 1.5457,
1487
- "step": 202
1488
- },
1489
- {
1490
- "epoch": 1.80817843866171,
1491
- "grad_norm": 0.7352379562150491,
1492
- "learning_rate": 2.6430885344499944e-07,
1493
- "loss": 1.5641,
1494
- "step": 203
1495
- },
1496
- {
1497
- "epoch": 1.817100371747212,
1498
- "grad_norm": 0.6976729455703554,
1499
- "learning_rate": 2.399344623363503e-07,
1500
- "loss": 1.5592,
1501
- "step": 204
1502
- },
1503
- {
1504
- "epoch": 1.8260223048327138,
1505
- "grad_norm": 0.7057792233242116,
1506
- "learning_rate": 2.1671140484290144e-07,
1507
- "loss": 1.598,
1508
- "step": 205
1509
- },
1510
- {
1511
- "epoch": 1.8349442379182155,
1512
- "grad_norm": 0.6884424303550053,
1513
- "learning_rate": 1.9464529800637731e-07,
1514
- "loss": 1.5715,
1515
- "step": 206
1516
- },
1517
- {
1518
- "epoch": 1.8438661710037176,
1519
- "grad_norm": 0.6928948603154073,
1520
- "learning_rate": 1.737414790328218e-07,
1521
- "loss": 1.6124,
1522
- "step": 207
1523
- },
1524
- {
1525
- "epoch": 1.8527881040892193,
1526
- "grad_norm": 0.6795496892846022,
1527
- "learning_rate": 1.540050040016694e-07,
1528
- "loss": 1.5935,
1529
- "step": 208
1530
- },
1531
- {
1532
- "epoch": 1.861710037174721,
1533
- "grad_norm": 0.6636706068104933,
1534
- "learning_rate": 1.3544064664281266e-07,
1535
- "loss": 1.6063,
1536
- "step": 209
1537
- },
1538
- {
1539
- "epoch": 1.8706319702602232,
1540
- "grad_norm": 0.7070367652211981,
1541
- "learning_rate": 1.1805289718196499e-07,
1542
- "loss": 1.5869,
1543
- "step": 210
1544
- },
1545
- {
1546
- "epoch": 1.879553903345725,
1547
- "grad_norm": 0.6856354556452025,
1548
- "learning_rate": 1.0184596125459134e-07,
1549
- "loss": 1.5154,
1550
- "step": 211
1551
- },
1552
- {
1553
- "epoch": 1.8884758364312266,
1554
- "grad_norm": 0.68093775252014,
1555
- "learning_rate": 8.682375888868167e-08,
1556
- "loss": 1.5594,
1557
- "step": 212
1558
- },
1559
- {
1560
- "epoch": 1.8973977695167288,
1561
- "grad_norm": 0.6485244056932311,
1562
- "learning_rate": 7.29899235565934e-08,
1563
- "loss": 1.5559,
1564
- "step": 213
1565
- },
1566
- {
1567
- "epoch": 1.9063197026022305,
1568
- "grad_norm": 0.6675593036944839,
1569
- "learning_rate": 6.034780129621664e-08,
1570
- "loss": 1.5156,
1571
- "step": 214
1572
- },
1573
- {
1574
- "epoch": 1.9152416356877322,
1575
- "grad_norm": 0.6872424052686473,
1576
- "learning_rate": 4.8900449901653214e-08,
1577
- "loss": 1.5674,
1578
- "step": 215
1579
- },
1580
- {
1581
- "epoch": 1.9241635687732344,
1582
- "grad_norm": 0.6924079214009966,
1583
- "learning_rate": 3.8650638183617695e-08,
1584
- "loss": 1.5574,
1585
- "step": 216
1586
- },
1587
- {
1588
- "epoch": 1.933085501858736,
1589
- "grad_norm": 0.7070912793422548,
1590
- "learning_rate": 2.960084529973706e-08,
1591
- "loss": 1.5807,
1592
- "step": 217
1593
- },
1594
- {
1595
- "epoch": 1.9420074349442378,
1596
- "grad_norm": 0.7050185875893417,
1597
- "learning_rate": 2.1753260154906973e-08,
1598
- "loss": 1.5965,
1599
- "step": 218
1600
- },
1601
- {
1602
- "epoch": 1.95092936802974,
1603
- "grad_norm": 0.6885302564214322,
1604
- "learning_rate": 1.5109780871853663e-08,
1605
- "loss": 1.6065,
1606
- "step": 219
1607
- },
1608
- {
1609
- "epoch": 1.9598513011152416,
1610
- "grad_norm": 0.6970811020575952,
1611
- "learning_rate": 9.672014332028357e-09,
1612
- "loss": 1.5348,
1613
- "step": 220
1614
- },
1615
- {
1616
- "epoch": 1.9687732342007433,
1617
- "grad_norm": 0.6899769194702832,
1618
- "learning_rate": 5.4412757869459765e-09,
1619
- "loss": 1.5395,
1620
- "step": 221
1621
- },
1622
- {
1623
- "epoch": 1.9776951672862455,
1624
- "grad_norm": 0.6839594718217513,
1625
- "learning_rate": 2.4185885400596076e-09,
1626
- "loss": 1.5979,
1627
- "step": 222
1628
- },
1629
- {
1630
- "epoch": 1.9866171003717472,
1631
- "grad_norm": 0.6685813594094444,
1632
- "learning_rate": 6.04683699252373e-10,
1633
- "loss": 1.613,
1634
- "step": 223
1635
- },
1636
- {
1637
- "epoch": 1.995539033457249,
1638
- "grad_norm": 0.6899032355777277,
1639
- "learning_rate": 0.0,
1640
- "loss": 1.6085,
1641
- "step": 224
1642
- },
1643
- {
1644
- "epoch": 1.995539033457249,
1645
- "eval_loss": 2.1147751808166504,
1646
- "eval_runtime": 4.9367,
1647
- "eval_samples_per_second": 14.787,
1648
- "eval_steps_per_second": 2.633,
1649
- "step": 224
1650
- }
1651
- ],
1652
- "logging_steps": 1,
1653
- "max_steps": 224,
1654
- "num_input_tokens_seen": 0,
1655
- "num_train_epochs": 2,
1656
- "save_steps": 56,
1657
- "stateful_callbacks": {
1658
- "TrainerControl": {
1659
- "args": {
1660
- "should_epoch_stop": false,
1661
- "should_evaluate": false,
1662
- "should_log": false,
1663
- "should_save": true,
1664
- "should_training_stop": true
1665
- },
1666
- "attributes": {}
1667
- }
1668
- },
1669
- "total_flos": 2.8644591172950426e+17,
1670
- "train_batch_size": 3,
1671
- "trial_name": null,
1672
- "trial_params": null
1673
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-224/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9a73ea13cdc4aa1f89e967406cd928d6a343ca4b05825deb19cc3ea1ef4d6f2e
3
- size 8376
 
 
 
 
checkpoint-224/zero_to_fp32.py DELETED
@@ -1,604 +0,0 @@
1
- #!/usr/bin/env python
2
-
3
- # Copyright (c) Microsoft Corporation.
4
- # SPDX-License-Identifier: Apache-2.0
5
-
6
- # DeepSpeed Team
7
-
8
- # This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
9
- # copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
10
- # the future. Once extracted, the weights don't require DeepSpeed and can be used in any
11
- # application.
12
- #
13
- # example: python zero_to_fp32.py . pytorch_model.bin
14
-
15
- import argparse
16
- import torch
17
- import glob
18
- import math
19
- import os
20
- import re
21
- from collections import OrderedDict
22
- from dataclasses import dataclass
23
-
24
- # while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
25
- # DeepSpeed data structures it has to be available in the current python environment.
26
- from deepspeed.utils import logger
27
- from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
28
- FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
29
- FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
30
-
31
-
32
- @dataclass
33
- class zero_model_state:
34
- buffers: dict()
35
- param_shapes: dict()
36
- shared_params: list
37
- ds_version: int
38
- frozen_param_shapes: dict()
39
- frozen_param_fragments: dict()
40
-
41
-
42
- debug = 0
43
-
44
- # load to cpu
45
- device = torch.device('cpu')
46
-
47
-
48
- def atoi(text):
49
- return int(text) if text.isdigit() else text
50
-
51
-
52
- def natural_keys(text):
53
- '''
54
- alist.sort(key=natural_keys) sorts in human order
55
- http://nedbatchelder.com/blog/200712/human_sorting.html
56
- (See Toothy's implementation in the comments)
57
- '''
58
- return [atoi(c) for c in re.split(r'(\d+)', text)]
59
-
60
-
61
- def get_model_state_file(checkpoint_dir, zero_stage):
62
- if not os.path.isdir(checkpoint_dir):
63
- raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
64
-
65
- # there should be only one file
66
- if zero_stage <= 2:
67
- file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
68
- elif zero_stage == 3:
69
- file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
70
-
71
- if not os.path.exists(file):
72
- raise FileNotFoundError(f"can't find model states file at '{file}'")
73
-
74
- return file
75
-
76
-
77
- def get_checkpoint_files(checkpoint_dir, glob_pattern):
78
- # XXX: need to test that this simple glob rule works for multi-node setup too
79
- ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
80
-
81
- if len(ckpt_files) == 0:
82
- raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
83
-
84
- return ckpt_files
85
-
86
-
87
- def get_optim_files(checkpoint_dir):
88
- return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
89
-
90
-
91
- def get_model_state_files(checkpoint_dir):
92
- return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
93
-
94
-
95
- def parse_model_states(files):
96
- zero_model_states = []
97
- for file in files:
98
- state_dict = torch.load(file, map_location=device)
99
-
100
- if BUFFER_NAMES not in state_dict:
101
- raise ValueError(f"{file} is not a model state checkpoint")
102
- buffer_names = state_dict[BUFFER_NAMES]
103
- if debug:
104
- print("Found buffers:", buffer_names)
105
-
106
- # recover just the buffers while restoring them to fp32 if they were saved in fp16
107
- buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
108
- param_shapes = state_dict[PARAM_SHAPES]
109
-
110
- # collect parameters that are included in param_shapes
111
- param_names = []
112
- for s in param_shapes:
113
- for name in s.keys():
114
- param_names.append(name)
115
-
116
- # update with frozen parameters
117
- frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
118
- if frozen_param_shapes is not None:
119
- if debug:
120
- print(f"Found frozen_param_shapes: {frozen_param_shapes}")
121
- param_names += list(frozen_param_shapes.keys())
122
-
123
- # handle shared params
124
- shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
125
-
126
- ds_version = state_dict.get(DS_VERSION, None)
127
-
128
- frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
129
-
130
- z_model_state = zero_model_state(buffers=buffers,
131
- param_shapes=param_shapes,
132
- shared_params=shared_params,
133
- ds_version=ds_version,
134
- frozen_param_shapes=frozen_param_shapes,
135
- frozen_param_fragments=frozen_param_fragments)
136
- zero_model_states.append(z_model_state)
137
-
138
- return zero_model_states
139
-
140
-
141
- def parse_optim_states(files, ds_checkpoint_dir):
142
-
143
- total_files = len(files)
144
- state_dicts = []
145
- for f in files:
146
- state_dict = torch.load(f, map_location=device)
147
- # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
148
- # and also handle the case where it was already removed by another helper script
149
- state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
150
- state_dicts.append(state_dict)
151
-
152
- if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
153
- raise ValueError(f"{files[0]} is not a zero checkpoint")
154
- zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
155
- world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
156
-
157
- # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
158
- # parameters can be different from data parallelism for non-expert parameters. So we can just
159
- # use the max of the partition_count to get the dp world_size.
160
-
161
- if type(world_size) is list:
162
- world_size = max(world_size)
163
-
164
- if world_size != total_files:
165
- raise ValueError(
166
- f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
167
- "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
168
- )
169
-
170
- # the groups are named differently in each stage
171
- if zero_stage <= 2:
172
- fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
173
- elif zero_stage == 3:
174
- fp32_groups_key = FP32_FLAT_GROUPS
175
- else:
176
- raise ValueError(f"unknown zero stage {zero_stage}")
177
-
178
- if zero_stage <= 2:
179
- fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
180
- elif zero_stage == 3:
181
- # if there is more than one param group, there will be multiple flattened tensors - one
182
- # flattened tensor per group - for simplicity merge them into a single tensor
183
- #
184
- # XXX: could make the script more memory efficient for when there are multiple groups - it
185
- # will require matching the sub-lists of param_shapes for each param group flattened tensor
186
-
187
- fp32_flat_groups = [
188
- torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
189
- ]
190
-
191
- return zero_stage, world_size, fp32_flat_groups
192
-
193
-
194
- def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
195
- """
196
- Returns fp32 state_dict reconstructed from ds checkpoint
197
-
198
- Args:
199
- - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
200
-
201
- """
202
- print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
203
-
204
- optim_files = get_optim_files(ds_checkpoint_dir)
205
- zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
206
- print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
207
-
208
- model_files = get_model_state_files(ds_checkpoint_dir)
209
-
210
- zero_model_states = parse_model_states(model_files)
211
- print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
212
-
213
- if zero_stage <= 2:
214
- return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
215
- exclude_frozen_parameters)
216
- elif zero_stage == 3:
217
- return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
218
- exclude_frozen_parameters)
219
-
220
-
221
- def _zero2_merge_frozen_params(state_dict, zero_model_states):
222
- if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
223
- return
224
-
225
- frozen_param_shapes = zero_model_states[0].frozen_param_shapes
226
- frozen_param_fragments = zero_model_states[0].frozen_param_fragments
227
-
228
- if debug:
229
- num_elem = sum(s.numel() for s in frozen_param_shapes.values())
230
- print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
231
-
232
- wanted_params = len(frozen_param_shapes)
233
- wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
234
- avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
235
- print(f'Frozen params: Have {avail_numel} numels to process.')
236
- print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
237
-
238
- total_params = 0
239
- total_numel = 0
240
- for name, shape in frozen_param_shapes.items():
241
- total_params += 1
242
- unpartitioned_numel = shape.numel()
243
- total_numel += unpartitioned_numel
244
-
245
- state_dict[name] = frozen_param_fragments[name]
246
-
247
- if debug:
248
- print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
249
-
250
- print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
251
-
252
-
253
- def _has_callable(obj, fn):
254
- attr = getattr(obj, fn, None)
255
- return callable(attr)
256
-
257
-
258
- def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
259
- param_shapes = zero_model_states[0].param_shapes
260
-
261
- # Reconstruction protocol:
262
- #
263
- # XXX: document this
264
-
265
- if debug:
266
- for i in range(world_size):
267
- for j in range(len(fp32_flat_groups[0])):
268
- print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
269
-
270
- # XXX: memory usage doubles here (zero2)
271
- num_param_groups = len(fp32_flat_groups[0])
272
- merged_single_partition_of_fp32_groups = []
273
- for i in range(num_param_groups):
274
- merged_partitions = [sd[i] for sd in fp32_flat_groups]
275
- full_single_fp32_vector = torch.cat(merged_partitions, 0)
276
- merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
277
- avail_numel = sum(
278
- [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
279
-
280
- if debug:
281
- wanted_params = sum([len(shapes) for shapes in param_shapes])
282
- wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
283
- # not asserting if there is a mismatch due to possible padding
284
- print(f"Have {avail_numel} numels to process.")
285
- print(f"Need {wanted_numel} numels in {wanted_params} params.")
286
-
287
- # params
288
- # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
289
- # out-of-core computing solution
290
- total_numel = 0
291
- total_params = 0
292
- for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
293
- offset = 0
294
- avail_numel = full_single_fp32_vector.numel()
295
- for name, shape in shapes.items():
296
-
297
- unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
298
- total_numel += unpartitioned_numel
299
- total_params += 1
300
-
301
- if debug:
302
- print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
303
- state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
304
- offset += unpartitioned_numel
305
-
306
- # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
307
- # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
308
- # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
309
- # live optimizer object, so we are checking that the numbers are within the right range
310
- align_to = 2 * world_size
311
-
312
- def zero2_align(x):
313
- return align_to * math.ceil(x / align_to)
314
-
315
- if debug:
316
- print(f"original offset={offset}, avail_numel={avail_numel}")
317
-
318
- offset = zero2_align(offset)
319
- avail_numel = zero2_align(avail_numel)
320
-
321
- if debug:
322
- print(f"aligned offset={offset}, avail_numel={avail_numel}")
323
-
324
- # Sanity check
325
- if offset != avail_numel:
326
- raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
327
-
328
- print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
329
-
330
-
331
- def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
332
- exclude_frozen_parameters):
333
- state_dict = OrderedDict()
334
-
335
- # buffers
336
- buffers = zero_model_states[0].buffers
337
- state_dict.update(buffers)
338
- if debug:
339
- print(f"added {len(buffers)} buffers")
340
-
341
- if not exclude_frozen_parameters:
342
- _zero2_merge_frozen_params(state_dict, zero_model_states)
343
-
344
- _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
345
-
346
- # recover shared parameters
347
- for pair in zero_model_states[0].shared_params:
348
- if pair[1] in state_dict:
349
- state_dict[pair[0]] = state_dict[pair[1]]
350
-
351
- return state_dict
352
-
353
-
354
- def zero3_partitioned_param_info(unpartitioned_numel, world_size):
355
- remainder = unpartitioned_numel % world_size
356
- padding_numel = (world_size - remainder) if remainder else 0
357
- partitioned_numel = math.ceil(unpartitioned_numel / world_size)
358
- return partitioned_numel, padding_numel
359
-
360
-
361
- def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
362
- if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
363
- return
364
-
365
- if debug:
366
- for i in range(world_size):
367
- num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
368
- print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
369
-
370
- frozen_param_shapes = zero_model_states[0].frozen_param_shapes
371
- wanted_params = len(frozen_param_shapes)
372
- wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
373
- avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
374
- print(f'Frozen params: Have {avail_numel} numels to process.')
375
- print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
376
-
377
- total_params = 0
378
- total_numel = 0
379
- for name, shape in zero_model_states[0].frozen_param_shapes.items():
380
- total_params += 1
381
- unpartitioned_numel = shape.numel()
382
- total_numel += unpartitioned_numel
383
-
384
- param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
385
- state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
386
-
387
- partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
388
-
389
- if debug:
390
- print(
391
- f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
392
- )
393
-
394
- print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
395
-
396
-
397
- def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
398
- param_shapes = zero_model_states[0].param_shapes
399
- avail_numel = fp32_flat_groups[0].numel() * world_size
400
- # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
401
- # param, re-consolidating each param, while dealing with padding if any
402
-
403
- # merge list of dicts, preserving order
404
- param_shapes = {k: v for d in param_shapes for k, v in d.items()}
405
-
406
- if debug:
407
- for i in range(world_size):
408
- print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
409
-
410
- wanted_params = len(param_shapes)
411
- wanted_numel = sum(shape.numel() for shape in param_shapes.values())
412
- # not asserting if there is a mismatch due to possible padding
413
- avail_numel = fp32_flat_groups[0].numel() * world_size
414
- print(f"Trainable params: Have {avail_numel} numels to process.")
415
- print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
416
-
417
- # params
418
- # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
419
- # out-of-core computing solution
420
- offset = 0
421
- total_numel = 0
422
- total_params = 0
423
- for name, shape in param_shapes.items():
424
-
425
- unpartitioned_numel = shape.numel()
426
- total_numel += unpartitioned_numel
427
- total_params += 1
428
-
429
- partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
430
-
431
- if debug:
432
- print(
433
- f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
434
- )
435
-
436
- # XXX: memory usage doubles here
437
- state_dict[name] = torch.cat(
438
- tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
439
- 0).narrow(0, 0, unpartitioned_numel).view(shape)
440
- offset += partitioned_numel
441
-
442
- offset *= world_size
443
-
444
- # Sanity check
445
- if offset != avail_numel:
446
- raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
447
-
448
- print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
449
-
450
-
451
- def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
452
- exclude_frozen_parameters):
453
- state_dict = OrderedDict()
454
-
455
- # buffers
456
- buffers = zero_model_states[0].buffers
457
- state_dict.update(buffers)
458
- if debug:
459
- print(f"added {len(buffers)} buffers")
460
-
461
- if not exclude_frozen_parameters:
462
- _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
463
-
464
- _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
465
-
466
- # recover shared parameters
467
- for pair in zero_model_states[0].shared_params:
468
- if pair[1] in state_dict:
469
- state_dict[pair[0]] = state_dict[pair[1]]
470
-
471
- return state_dict
472
-
473
-
474
- def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False):
475
- """
476
- Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
477
- ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
478
- via a model hub.
479
-
480
- Args:
481
- - ``checkpoint_dir``: path to the desired checkpoint folder
482
- - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
483
- - ``exclude_frozen_parameters``: exclude frozen parameters
484
-
485
- Returns:
486
- - pytorch ``state_dict``
487
-
488
- Note: this approach may not work if your application doesn't have sufficient free CPU memory and
489
- you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
490
- the checkpoint.
491
-
492
- A typical usage might be ::
493
-
494
- from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
495
- # do the training and checkpoint saving
496
- state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
497
- model = model.cpu() # move to cpu
498
- model.load_state_dict(state_dict)
499
- # submit to model hub or save the model to share with others
500
-
501
- In this example the ``model`` will no longer be usable in the deepspeed context of the same
502
- application. i.e. you will need to re-initialize the deepspeed engine, since
503
- ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
504
-
505
- If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
506
-
507
- """
508
- if tag is None:
509
- latest_path = os.path.join(checkpoint_dir, 'latest')
510
- if os.path.isfile(latest_path):
511
- with open(latest_path, 'r') as fd:
512
- tag = fd.read().strip()
513
- else:
514
- raise ValueError(f"Unable to find 'latest' file at {latest_path}")
515
-
516
- ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
517
-
518
- if not os.path.isdir(ds_checkpoint_dir):
519
- raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
520
-
521
- return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
522
-
523
-
524
- def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None, exclude_frozen_parameters=False):
525
- """
526
- Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
527
- loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
528
-
529
- Args:
530
- - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
531
- - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
532
- - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
533
- - ``exclude_frozen_parameters``: exclude frozen parameters
534
- """
535
-
536
- state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters)
537
- print(f"Saving fp32 state dict to {output_file}")
538
- torch.save(state_dict, output_file)
539
-
540
-
541
- def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
542
- """
543
- 1. Put the provided model to cpu
544
- 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
545
- 3. Load it into the provided model
546
-
547
- Args:
548
- - ``model``: the model object to update
549
- - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
550
- - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
551
-
552
- Returns:
553
- - ``model`: modified model
554
-
555
- Make sure you have plenty of CPU memory available before you call this function. If you don't
556
- have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
557
- conveniently placed for you in the checkpoint folder.
558
-
559
- A typical usage might be ::
560
-
561
- from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
562
- model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
563
- # submit to model hub or save the model to share with others
564
-
565
- Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
566
- of the same application. i.e. you will need to re-initialize the deepspeed engine, since
567
- ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
568
-
569
- """
570
- logger.info(f"Extracting fp32 weights")
571
- state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
572
-
573
- logger.info(f"Overwriting model with fp32 weights")
574
- model = model.cpu()
575
- model.load_state_dict(state_dict, strict=False)
576
-
577
- return model
578
-
579
-
580
- if __name__ == "__main__":
581
-
582
- parser = argparse.ArgumentParser()
583
- parser.add_argument("checkpoint_dir",
584
- type=str,
585
- help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
586
- parser.add_argument(
587
- "output_file",
588
- type=str,
589
- help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
590
- parser.add_argument("-t",
591
- "--tag",
592
- type=str,
593
- default=None,
594
- help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
595
- parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
596
- parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
597
- args = parser.parse_args()
598
-
599
- debug = args.debug
600
-
601
- convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
602
- args.output_file,
603
- tag=args.tag,
604
- exclude_frozen_parameters=args.exclude_frozen_parameters)