danielhanchen commited on
Commit
2773fd6
1 Parent(s): 52b9ce9

Upload MllamaForConditionalGeneration

Browse files
config.json CHANGED
@@ -1,42 +1,220 @@
1
  {
 
2
  "architectures": [
3
- "MllamaForCausalLM"
4
  ],
5
- "bos_token_id": 128000,
6
- "cross_attention_layers": [
7
- 3,
8
- 8,
9
- 13,
10
- 18,
11
- 23,
12
- 28,
13
- 33,
14
- 38
15
- ],
16
- "dropout": 0,
17
- "eos_token_id": 128001,
18
- "hidden_act": "silu",
19
- "hidden_size": 4096,
20
- "initializer_range": 0.02,
21
- "intermediate_size": 14336,
22
- "max_position_embeddings": 131072,
23
- "model_type": "mllama_text_model",
24
- "num_attention_heads": 32,
25
- "num_hidden_layers": 40,
26
- "num_key_value_heads": 8,
27
- "pad_token_id": 128004,
28
- "rms_norm_eps": 1e-05,
29
- "rope_scaling": {
30
- "factor": 8.0,
31
- "high_freq_factor": 4.0,
32
- "low_freq_factor": 1.0,
33
- "original_max_position_embeddings": 8192,
34
- "rope_type": "llama3"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
35
  },
36
- "rope_theta": 500000.0,
37
- "tie_word_embeddings": false,
38
  "torch_dtype": "bfloat16",
39
- "transformers_version": "4.46.0.dev0",
40
- "use_cache": true,
41
- "vocab_size": 128256
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42
  }
 
1
  {
2
+ "_name_or_path": "meta-llama/Llama-3.2-11B-Vision",
3
  "architectures": [
4
+ "MllamaForConditionalGeneration"
5
  ],
6
+ "image_token_index": 128256,
7
+ "model_type": "mllama",
8
+ "text_config": {
9
+ "_name_or_path": "",
10
+ "add_cross_attention": false,
11
+ "architectures": null,
12
+ "bad_words_ids": null,
13
+ "begin_suppress_tokens": null,
14
+ "bos_token_id": 128000,
15
+ "chunk_size_feed_forward": 0,
16
+ "cross_attention_hidden_size": null,
17
+ "cross_attention_layers": [
18
+ 3,
19
+ 8,
20
+ 13,
21
+ 18,
22
+ 23,
23
+ 28,
24
+ 33,
25
+ 38
26
+ ],
27
+ "decoder_start_token_id": null,
28
+ "diversity_penalty": 0.0,
29
+ "do_sample": false,
30
+ "dropout": 0,
31
+ "early_stopping": false,
32
+ "encoder_no_repeat_ngram_size": 0,
33
+ "eos_token_id": 128001,
34
+ "exponential_decay_length_penalty": null,
35
+ "finetuning_task": null,
36
+ "forced_bos_token_id": null,
37
+ "forced_eos_token_id": null,
38
+ "hidden_act": "silu",
39
+ "hidden_size": 4096,
40
+ "id2label": {
41
+ "0": "LABEL_0",
42
+ "1": "LABEL_1"
43
+ },
44
+ "initializer_range": 0.02,
45
+ "intermediate_size": 14336,
46
+ "is_decoder": false,
47
+ "is_encoder_decoder": false,
48
+ "label2id": {
49
+ "LABEL_0": 0,
50
+ "LABEL_1": 1
51
+ },
52
+ "length_penalty": 1.0,
53
+ "max_length": 20,
54
+ "max_position_embeddings": 131072,
55
+ "min_length": 0,
56
+ "model_type": "mllama_text_model",
57
+ "no_repeat_ngram_size": 0,
58
+ "num_attention_heads": 32,
59
+ "num_beam_groups": 1,
60
+ "num_beams": 1,
61
+ "num_hidden_layers": 40,
62
+ "num_key_value_heads": 8,
63
+ "num_return_sequences": 1,
64
+ "output_attentions": false,
65
+ "output_hidden_states": false,
66
+ "output_scores": false,
67
+ "pad_token_id": 128004,
68
+ "prefix": null,
69
+ "problem_type": null,
70
+ "pruned_heads": {},
71
+ "remove_invalid_values": false,
72
+ "repetition_penalty": 1.0,
73
+ "return_dict": true,
74
+ "return_dict_in_generate": false,
75
+ "rms_norm_eps": 1e-05,
76
+ "rope_scaling": {
77
+ "factor": 8.0,
78
+ "high_freq_factor": 4.0,
79
+ "low_freq_factor": 1.0,
80
+ "original_max_position_embeddings": 8192,
81
+ "rope_type": "llama3"
82
+ },
83
+ "rope_theta": 500000.0,
84
+ "sep_token_id": null,
85
+ "suppress_tokens": null,
86
+ "task_specific_params": null,
87
+ "temperature": 1.0,
88
+ "tf_legacy_loss": false,
89
+ "tie_encoder_decoder": false,
90
+ "tie_word_embeddings": false,
91
+ "tokenizer_class": null,
92
+ "top_k": 50,
93
+ "top_p": 1.0,
94
+ "torch_dtype": "bfloat16",
95
+ "torchscript": false,
96
+ "typical_p": 1.0,
97
+ "use_bfloat16": false,
98
+ "use_cache": true,
99
+ "vocab_size": 128256
100
  },
 
 
101
  "torch_dtype": "bfloat16",
102
+ "transformers_version": "4.45.0",
103
+ "vision_config": {
104
+ "_name_or_path": "",
105
+ "add_cross_attention": false,
106
+ "architectures": null,
107
+ "attention_heads": 16,
108
+ "bad_words_ids": null,
109
+ "begin_suppress_tokens": null,
110
+ "bos_token_id": null,
111
+ "chunk_size_feed_forward": 0,
112
+ "cross_attention_hidden_size": null,
113
+ "decoder_start_token_id": null,
114
+ "diversity_penalty": 0.0,
115
+ "do_sample": false,
116
+ "early_stopping": false,
117
+ "encoder_no_repeat_ngram_size": 0,
118
+ "eos_token_id": null,
119
+ "exponential_decay_length_penalty": null,
120
+ "finetuning_task": null,
121
+ "forced_bos_token_id": null,
122
+ "forced_eos_token_id": null,
123
+ "hidden_act": "gelu",
124
+ "hidden_size": 1280,
125
+ "id2label": {
126
+ "0": "LABEL_0",
127
+ "1": "LABEL_1"
128
+ },
129
+ "image_size": 448,
130
+ "initializer_range": 0.02,
131
+ "intermediate_layers_indices": [
132
+ 3,
133
+ 7,
134
+ 15,
135
+ 23,
136
+ 30
137
+ ],
138
+ "intermediate_size": 5120,
139
+ "is_decoder": false,
140
+ "is_encoder_decoder": false,
141
+ "label2id": {
142
+ "LABEL_0": 0,
143
+ "LABEL_1": 1
144
+ },
145
+ "length_penalty": 1.0,
146
+ "max_length": 20,
147
+ "max_num_tiles": 4,
148
+ "min_length": 0,
149
+ "model_type": "mllama_vision_model",
150
+ "no_repeat_ngram_size": 0,
151
+ "norm_eps": 1e-05,
152
+ "num_beam_groups": 1,
153
+ "num_beams": 1,
154
+ "num_channels": 3,
155
+ "num_global_layers": 8,
156
+ "num_hidden_layers": 32,
157
+ "num_return_sequences": 1,
158
+ "output_attentions": false,
159
+ "output_hidden_states": false,
160
+ "output_scores": false,
161
+ "pad_token_id": null,
162
+ "patch_size": 14,
163
+ "prefix": null,
164
+ "problem_type": null,
165
+ "pruned_heads": {},
166
+ "remove_invalid_values": false,
167
+ "repetition_penalty": 1.0,
168
+ "return_dict": true,
169
+ "return_dict_in_generate": false,
170
+ "sep_token_id": null,
171
+ "supported_aspect_ratios": [
172
+ [
173
+ 1,
174
+ 1
175
+ ],
176
+ [
177
+ 1,
178
+ 2
179
+ ],
180
+ [
181
+ 1,
182
+ 3
183
+ ],
184
+ [
185
+ 1,
186
+ 4
187
+ ],
188
+ [
189
+ 2,
190
+ 1
191
+ ],
192
+ [
193
+ 2,
194
+ 2
195
+ ],
196
+ [
197
+ 3,
198
+ 1
199
+ ],
200
+ [
201
+ 4,
202
+ 1
203
+ ]
204
+ ],
205
+ "suppress_tokens": null,
206
+ "task_specific_params": null,
207
+ "temperature": 1.0,
208
+ "tf_legacy_loss": false,
209
+ "tie_encoder_decoder": false,
210
+ "tie_word_embeddings": true,
211
+ "tokenizer_class": null,
212
+ "top_k": 50,
213
+ "top_p": 1.0,
214
+ "torch_dtype": "bfloat16",
215
+ "torchscript": false,
216
+ "typical_p": 1.0,
217
+ "use_bfloat16": false,
218
+ "vision_output_dim": 7680
219
+ }
220
  }
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "bos_token_id": 128000,
4
  "eos_token_id": 128001,
5
  "pad_token_id": 128004,
6
- "transformers_version": "4.46.0.dev0"
7
  }
 
3
  "bos_token_id": 128000,
4
  "eos_token_id": 128001,
5
  "pad_token_id": 128004,
6
+ "transformers_version": "4.45.0"
7
  }
model-00001-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20f9ea8006cc17642463a94a916c5b9ad7907aa02ce4891d42d2095c146de2fa
3
+ size 4988395666
model-00002-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:700ba215225f5e2b16e9d451f2cc9703cfc124ea2a9de8fa8886f7a8911413da
3
+ size 4915919664
model-00003-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b06cdc33032dfe5f7a9cf61d9dbe388e2af72e952bda75bf2aedac0bbaadb142
3
+ size 4915919704
model-00004-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:806d7a1d87d0a2d45b2f6c42dddc8b8f2b77bcaf45ac085181d0af74f7492909
3
+ size 4999823980
model-00005-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5fefff872f2d7f82ed0498e0b5e7dc10fdd01b051e8be6e41ee3ae4f4fa192d
3
+ size 1465943128
model.safetensors.index.json CHANGED
The diff for this file is too large to render. See raw diff