lewisnjue commited on
Commit
55facfd
·
verified ·
1 Parent(s): 7a527a9

Delete config.json

Browse files

deleted configuraion file

Files changed (1) hide show
  1. config.json +0 -25
config.json DELETED
@@ -1,25 +0,0 @@
1
- {
2
- "_name_or_path": "path/to/save/model",
3
- "activation": "gelu",
4
- "architectures": [
5
- "DistilBertForSequenceClassification"
6
- ],
7
- "attention_dropout": 0.1,
8
- "dim": 768,
9
- "dropout": 0.1,
10
- "hidden_dim": 3072,
11
- "initializer_range": 0.02,
12
- "max_position_embeddings": 512,
13
- "model_type": "distilbert",
14
- "n_heads": 12,
15
- "n_layers": 6,
16
- "pad_token_id": 0,
17
- "problem_type": "single_label_classification",
18
- "qa_dropout": 0.1,
19
- "seq_classif_dropout": 0.2,
20
- "sinusoidal_pos_embds": false,
21
- "tie_weights_": true,
22
- "torch_dtype": "float32",
23
- "transformers_version": "4.48.3",
24
- "vocab_size": 30522
25
- }