guymorganb commited on
Commit
c6d90f3
·
1 Parent(s): 9e948ba

messing around with differing settings.

Browse files
Files changed (1) hide show
  1. config.json +3 -4
config.json CHANGED
@@ -16,13 +16,11 @@
16
  "AutoModelForSequenceClassification": "modeling_lsg_bert.LSGBertForSequenceClassification",
17
  "AutoModelForTokenClassification": "modeling_lsg_bert.LSGBertForTokenClassification"
18
  },
 
19
  "block_size": 128,
 
20
  "sparse_block_size": 128,
21
  "sparsity_factor": 2,
22
- "base_model_prefix": "lsg",
23
- "sparsity_type": "norm",
24
- "is_decoder": false,
25
- "pool_with_global": true,
26
  "num_global_tokens": 1,
27
  "classifier_dropout": null,
28
  "hidden_act": "gelu",
@@ -39,6 +37,7 @@
39
  "num_hidden_layers": 24,
40
  "pad_token_id": 0,
41
  "position_embedding_type": "absolute",
 
42
  "torch_dtype": "float32",
43
  "transformers_version": "4.30.2",
44
  "type_vocab_size": 2,
 
16
  "AutoModelForSequenceClassification": "modeling_lsg_bert.LSGBertForSequenceClassification",
17
  "AutoModelForTokenClassification": "modeling_lsg_bert.LSGBertForTokenClassification"
18
  },
19
+ "base_model_prefix": "lsg",
20
  "block_size": 128,
21
+ "pool_with_global": true,
22
  "sparse_block_size": 128,
23
  "sparsity_factor": 2,
 
 
 
 
24
  "num_global_tokens": 1,
25
  "classifier_dropout": null,
26
  "hidden_act": "gelu",
 
37
  "num_hidden_layers": 24,
38
  "pad_token_id": 0,
39
  "position_embedding_type": "absolute",
40
+ "sparsity_type": "norm",
41
  "torch_dtype": "float32",
42
  "transformers_version": "4.30.2",
43
  "type_vocab_size": 2,