Files changed (1) hide show
  1. config.json +7 -24
config.json CHANGED
@@ -1,8 +1,6 @@
1
  {
2
  "_name_or_path": "/tmp/iopath_cache/manifold_cache/tree/users/shenx/finetune/09281004-cambrian_llama3_2_t576_ov",
3
- "architectures": [
4
- "CambrianLlamaForCausalLM"
5
- ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
@@ -11,11 +9,7 @@
11
  "connector_only": true,
12
  "dino_threshold": 0.83,
13
  "drop_threshold": 0.8,
14
- "eos_token_id": [
15
- 128001,
16
- 128008,
17
- 128009
18
- ],
19
  "frame_pos": false,
20
  "freeze_mm_mlp_adapter": false,
21
  "hidden_act": "silu",
@@ -44,10 +38,7 @@
44
  "siglip/CLIP-ViT-SO400M-14-384",
45
  "facebook/dinov2-giant-res378"
46
  ],
47
- "mm_vision_tower_aux_token_len_list": [
48
- 576,
49
- 576
50
- ],
51
  "mm_vision_tower_lr": null,
52
  "model_type": "cambrian_llama",
53
  "num_attention_heads": 24,
@@ -56,16 +47,11 @@
56
  "num_of_vision_sampler_layers": 10,
57
  "num_query_group": 1,
58
  "pretraining_tp": 1,
59
- "query_num_list": [
60
- 144
61
- ],
62
- "rms_norm_eps": 1e-05,
63
  "rope_scaling": {
64
  "factor": 32.0,
65
- "high_freq_factor": 4.0,
66
- "low_freq_factor": 1.0,
67
- "original_max_position_embeddings": 8192,
68
- "rope_type": "llama3"
69
  },
70
  "rope_theta": 500000.0,
71
  "spmd_debug": null,
@@ -83,9 +69,6 @@
83
  "use_cache": false,
84
  "use_mm_proj": true,
85
  "vision_hidden_size": 1024,
86
- "vision_tower_aux_token_len_list": [
87
- 576,
88
- 576
89
- ],
90
  "vocab_size": 128256
91
  }
 
1
  {
2
  "_name_or_path": "/tmp/iopath_cache/manifold_cache/tree/users/shenx/finetune/09281004-cambrian_llama3_2_t576_ov",
3
+ "architectures": ["CambrianLlamaForCausalLM"],
 
 
4
  "attention_bias": false,
5
  "attention_dropout": 0.0,
6
  "bos_token_id": 128000,
 
9
  "connector_only": true,
10
  "dino_threshold": 0.83,
11
  "drop_threshold": 0.8,
12
+ "eos_token_id": [128001, 128008, 128009],
 
 
 
 
13
  "frame_pos": false,
14
  "freeze_mm_mlp_adapter": false,
15
  "hidden_act": "silu",
 
38
  "siglip/CLIP-ViT-SO400M-14-384",
39
  "facebook/dinov2-giant-res378"
40
  ],
41
+ "mm_vision_tower_aux_token_len_list": [576, 576],
 
 
 
42
  "mm_vision_tower_lr": null,
43
  "model_type": "cambrian_llama",
44
  "num_attention_heads": 24,
 
47
  "num_of_vision_sampler_layers": 10,
48
  "num_query_group": 1,
49
  "pretraining_tp": 1,
50
+ "query_num_list": [144],
51
+ "rms_norm_eps": 1e-5,
 
 
52
  "rope_scaling": {
53
  "factor": 32.0,
54
+ "type": "llama3"
 
 
 
55
  },
56
  "rope_theta": 500000.0,
57
  "spmd_debug": null,
 
69
  "use_cache": false,
70
  "use_mm_proj": true,
71
  "vision_hidden_size": 1024,
72
+ "vision_tower_aux_token_len_list": [576, 576],
 
 
 
73
  "vocab_size": 128256
74
  }