Transformers
PyTorch
English
bridgetower
Inference Endpoints
anahita-b commited on
Commit
8884f5d
·
1 Parent(s): b972ada

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +4 -15
config.json CHANGED
@@ -1,9 +1,5 @@
1
  {
2
  "drop_rate":0.1,
3
- "freeze_RoBERTa":false,
4
- "freeze_ViT":false,
5
- "freeze_layer_count_roberta":false,
6
- "freeze_layer_count_vit":false,
7
  "head_hidden_scale":2,
8
  "hidden_act":"gelu",
9
  "hidden_size":768,
@@ -21,26 +17,19 @@
21
  "resolution_before":224,
22
  "stop_gradient":false,
23
  "tie_word_embeddings":false,
24
- "tokenizer":"roberta-base",
25
- "unfreeze_RoBERTa_attention":false,
26
- "unfreeze_RoBERTa_embeddings":false,
27
- "unfreeze_RoBERTa_encoder":false,
28
- "unfreeze_RoBERTa_layernorm":false,
29
- "unfreeze_ViT_attention":false,
30
- "unfreeze_ViT_layernorm":false,
31
  "vit_embed_dim":512,
32
- "vit_layers":12,
33
  "vit_layernorm_init_from_vit":false,
34
  "vit_layernorm_shared":true,
35
  "vit_patch_size":16,
36
  "vit_remove_last":false,
37
- "vit_transformer_width":512,
38
- "vit_width":768,
39
- "classifier_dropout": null,
40
  "vocab_size":50265,
41
  "text_config_dict": null,
42
  "text_config":{
43
  "architectures": ["BridgeTowerTextModel"],
 
44
  "vocab_size": 50265,
45
  "hidden_size": 768,
46
  "num_hidden_layers": 12,
 
1
  {
2
  "drop_rate":0.1,
 
 
 
 
3
  "head_hidden_scale":2,
4
  "hidden_act":"gelu",
5
  "hidden_size":768,
 
17
  "resolution_before":224,
18
  "stop_gradient":false,
19
  "tie_word_embeddings":false,
 
 
 
 
 
 
 
20
  "vit_embed_dim":512,
21
+ "vit_num_hidden_layers":12,
22
  "vit_layernorm_init_from_vit":false,
23
  "vit_layernorm_shared":true,
24
  "vit_patch_size":16,
25
  "vit_remove_last":false,
26
+ "vit_intermediate_size":512,
27
+ "vit_hidden_size":768,
 
28
  "vocab_size":50265,
29
  "text_config_dict": null,
30
  "text_config":{
31
  "architectures": ["BridgeTowerTextModel"],
32
+ "classifier_dropout": null,
33
  "vocab_size": 50265,
34
  "hidden_size": 768,
35
  "num_hidden_layers": 12,