Vadim Borisov commited on
Commit
554c621
•
1 Parent(s): 219e567

Upload DistilBertForSequenceClassification

Browse files
Files changed (3) hide show
  1. README.md +5 -8
  2. config.json +14 -16
  3. model.safetensors +2 -2
README.md CHANGED
@@ -1,21 +1,20 @@
1
  ---
 
2
  language: en
 
 
3
  tags:
4
  - text-classification
5
  - sentiment-analysis
6
  - sentiment
7
- - 'synthetic data'
8
- - text-classification
9
  - multi-class
10
  - social-media-analysis
11
  - customer-feedback
12
  - product-reviews
13
  - brand-monitoring
14
- license: apache-2.0
15
  widget:
16
- - text: >-
17
- I absolutely loved this movie! The acting was superb and the plot was
18
- engaging.
19
  example_title: Very Positive Review
20
  - text: The service at this restaurant was terrible. I'll never go back.
21
  example_title: Very Negative Review
@@ -28,8 +27,6 @@ widget:
28
  inference:
29
  parameters:
30
  temperature: 1
31
- pipeline_tag: text-classification
32
- base_model: google-bert/bert-base-uncased
33
  ---
34
  # 🚀 BERT-based Sentiment Classification Model: Unleashing the Power of Synthetic Data
35
 
 
1
  ---
2
+ base_model: google-bert/bert-base-uncased
3
  language: en
4
+ license: apache-2.0
5
+ pipeline_tag: text-classification
6
  tags:
7
  - text-classification
8
  - sentiment-analysis
9
  - sentiment
10
+ - synthetic data
 
11
  - multi-class
12
  - social-media-analysis
13
  - customer-feedback
14
  - product-reviews
15
  - brand-monitoring
 
16
  widget:
17
+ - text: I absolutely loved this movie! The acting was superb and the plot was engaging.
 
 
18
  example_title: Very Positive Review
19
  - text: The service at this restaurant was terrible. I'll never go back.
20
  example_title: Very Negative Review
 
27
  inference:
28
  parameters:
29
  temperature: 1
 
 
30
  ---
31
  # 🚀 BERT-based Sentiment Classification Model: Unleashing the Power of Synthetic Data
32
 
config.json CHANGED
@@ -1,14 +1,13 @@
1
  {
2
- "_name_or_path": "bert-base-uncased",
 
3
  "architectures": [
4
- "BertForSequenceClassification"
5
  ],
6
- "attention_probs_dropout_prob": 0.1,
7
- "classifier_dropout": null,
8
- "gradient_checkpointing": false,
9
- "hidden_act": "gelu",
10
- "hidden_dropout_prob": 0.1,
11
- "hidden_size": 768,
12
  "id2label": {
13
  "0": "LABEL_0",
14
  "1": "LABEL_1",
@@ -17,7 +16,6 @@
17
  "4": "LABEL_4"
18
  },
19
  "initializer_range": 0.02,
20
- "intermediate_size": 3072,
21
  "label2id": {
22
  "LABEL_0": 0,
23
  "LABEL_1": 1,
@@ -25,16 +23,16 @@
25
  "LABEL_3": 3,
26
  "LABEL_4": 4
27
  },
28
- "layer_norm_eps": 1e-12,
29
  "max_position_embeddings": 512,
30
- "model_type": "bert",
31
- "num_attention_heads": 12,
32
- "num_hidden_layers": 12,
33
  "pad_token_id": 0,
34
- "position_embedding_type": "absolute",
 
 
 
35
  "torch_dtype": "float32",
36
  "transformers_version": "4.37.0",
37
- "type_vocab_size": 2,
38
- "use_cache": true,
39
  "vocab_size": 30522
40
  }
 
1
  {
2
+ "_name_or_path": "distilbert/distilbert-base-uncased",
3
+ "activation": "gelu",
4
  "architectures": [
5
+ "DistilBertForSequenceClassification"
6
  ],
7
+ "attention_dropout": 0.1,
8
+ "dim": 768,
9
+ "dropout": 0.1,
10
+ "hidden_dim": 3072,
 
 
11
  "id2label": {
12
  "0": "LABEL_0",
13
  "1": "LABEL_1",
 
16
  "4": "LABEL_4"
17
  },
18
  "initializer_range": 0.02,
 
19
  "label2id": {
20
  "LABEL_0": 0,
21
  "LABEL_1": 1,
 
23
  "LABEL_3": 3,
24
  "LABEL_4": 4
25
  },
 
26
  "max_position_embeddings": 512,
27
+ "model_type": "distilbert",
28
+ "n_heads": 12,
29
+ "n_layers": 6,
30
  "pad_token_id": 0,
31
+ "qa_dropout": 0.1,
32
+ "seq_classif_dropout": 0.2,
33
+ "sinusoidal_pos_embds": false,
34
+ "tie_weights_": true,
35
  "torch_dtype": "float32",
36
  "transformers_version": "4.37.0",
 
 
37
  "vocab_size": 30522
38
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:89fce6235d4dfe775a089b75a34d96c0c06af03db7fb704d50e0f730f60612fa
3
- size 437967876
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:102b8d817794e88fc4a01bb431a734b902d3556e6323dd7075c6cf51cde32c78
3
+ size 267841796