File size: 464 Bytes
01081d6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
{
  "additive_modeling": true,
  "auto_mapping": null,
  "base_model_name_or_path": "microsoft/phi-2",
  "detached_training": false,
  "encoder_hidden_size": 2560,
  "inference_mode": true,
  "num_attention_heads": 32,
  "num_layers": 32,
  "num_prefix_set": 8,
  "num_transformer_submodules": 1,
  "num_virtual_tokens": 30,
  "ot_diversified_prefix": true,
  "peft_type": "M_PREFIX",
  "prefix_projection": false,
  "task_type": "CAUSAL_LM",
  "token_dim": 2560
}