zhangchen1991 commited on
Commit
5d2c4d7
·
1 Parent(s): 219a93e

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +0 -45
config.json CHANGED
@@ -1,49 +1,4 @@
1
  {
2
- "_name_or_path": "roberta_full_large",
3
- "adapters": {
4
- "adapters": {
5
- "convai2": "b1017368d7a97b11",
6
- "dailydialog": "b1017368d7a97b11",
7
- "empathetic": "b1017368d7a97b11",
8
- "reddit": "b1017368d7a97b11",
9
- "topical": "b1017368d7a97b11"
10
- },
11
- "config_map": {
12
- "b1017368d7a97b11": {
13
- "adapter_residual_before_ln": false,
14
- "cross_adapter": false,
15
- "factorized_phm_W": true,
16
- "factorized_phm_rule": false,
17
- "hypercomplex_nonlinearity": "glorot-uniform",
18
- "init_weights": "bert",
19
- "inv_adapter": null,
20
- "inv_adapter_reduction_factor": null,
21
- "is_parallel": false,
22
- "learn_phm": true,
23
- "leave_out": [],
24
- "ln_after": false,
25
- "ln_before": false,
26
- "mh_adapter": true,
27
- "non_linearity": "swish",
28
- "original_ln_after": true,
29
- "original_ln_before": false,
30
- "output_adapter": true,
31
- "phm_bias": true,
32
- "phm_c_init": "normal",
33
- "phm_dim": 4,
34
- "phm_init_range": 0.0001,
35
- "phm_layer": false,
36
- "phm_rank": 1,
37
- "reduction_factor": 16,
38
- "residual_before_ln": true,
39
- "scaling": 1.0,
40
- "shared_W_phm": false,
41
- "shared_phm_rule": true
42
- }
43
- },
44
- "fusion_config_map": {},
45
- "fusions": {}
46
- },
47
  "architectures": [
48
  "RobertaForPoEEvaluation"
49
  ],
 
1
  {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  "architectures": [
3
  "RobertaForPoEEvaluation"
4
  ],