yijisuk commited on
Commit
0ceed92
·
1 Parent(s): 865bc54

End of training

Browse files
README.md ADDED
@@ -0,0 +1,115 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: other
3
+ base_model: nvidia/mit-b1
4
+ tags:
5
+ - vision
6
+ - image-segmentation
7
+ - generated_from_trainer
8
+ model-index:
9
+ - name: segformer-b1-finetuned-segments-ic-chip-sample
10
+ results: []
11
+ ---
12
+
13
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
14
+ should probably proofread and complete it, then remove this comment. -->
15
+
16
+ # segformer-b1-finetuned-segments-ic-chip-sample
17
+
18
+ This model is a fine-tuned version of [nvidia/mit-b1](https://huggingface.co/nvidia/mit-b1) on the yijisuk/ic-chip-sample dataset.
19
+ It achieves the following results on the evaluation set:
20
+ - Loss: 0.1227
21
+ - Mean Iou: 0.4744
22
+ - Mean Accuracy: 0.9489
23
+ - Overall Accuracy: 0.9489
24
+ - Accuracy Unlabeled: nan
25
+ - Accuracy Circuit: 0.9489
26
+ - Iou Unlabeled: 0.0
27
+ - Iou Circuit: 0.9489
28
+
29
+ ## Model description
30
+
31
+ More information needed
32
+
33
+ ## Intended uses & limitations
34
+
35
+ More information needed
36
+
37
+ ## Training and evaluation data
38
+
39
+ More information needed
40
+
41
+ ## Training procedure
42
+
43
+ ### Training hyperparameters
44
+
45
+ The following hyperparameters were used during training:
46
+ - learning_rate: 6e-05
47
+ - train_batch_size: 2
48
+ - eval_batch_size: 2
49
+ - seed: 42
50
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
51
+ - lr_scheduler_type: linear
52
+ - num_epochs: 50
53
+
54
+ ### Training results
55
+
56
+ | Training Loss | Epoch | Step | Validation Loss | Mean Iou | Mean Accuracy | Overall Accuracy | Accuracy Unlabeled | Accuracy Circuit | Iou Unlabeled | Iou Circuit |
57
+ |:-------------:|:-----:|:----:|:---------------:|:--------:|:-------------:|:----------------:|:------------------:|:----------------:|:-------------:|:-----------:|
58
+ | 0.4185 | 1.0 | 20 | 0.5878 | 0.3632 | 0.7265 | 0.7265 | nan | 0.7265 | 0.0 | 0.7265 |
59
+ | 0.4477 | 2.0 | 40 | 0.4288 | 0.4894 | 0.9788 | 0.9788 | nan | 0.9788 | 0.0 | 0.9788 |
60
+ | 0.9304 | 3.0 | 60 | 0.2053 | 0.4520 | 0.9041 | 0.9041 | nan | 0.9041 | 0.0 | 0.9041 |
61
+ | 0.1409 | 4.0 | 80 | 0.1817 | 0.4738 | 0.9477 | 0.9477 | nan | 0.9477 | 0.0 | 0.9477 |
62
+ | 0.392 | 5.0 | 100 | 0.1824 | 0.4900 | 0.9800 | 0.9800 | nan | 0.9800 | 0.0 | 0.9800 |
63
+ | 0.1589 | 6.0 | 120 | 0.1594 | 0.4814 | 0.9628 | 0.9628 | nan | 0.9628 | 0.0 | 0.9628 |
64
+ | 0.1848 | 7.0 | 140 | 0.1551 | 0.4625 | 0.9251 | 0.9251 | nan | 0.9251 | 0.0 | 0.9251 |
65
+ | 0.0874 | 8.0 | 160 | 0.1503 | 0.4829 | 0.9657 | 0.9657 | nan | 0.9657 | 0.0 | 0.9657 |
66
+ | 0.2172 | 9.0 | 180 | 0.1558 | 0.4591 | 0.9182 | 0.9182 | nan | 0.9182 | 0.0 | 0.9182 |
67
+ | 0.9914 | 10.0 | 200 | 0.1457 | 0.4698 | 0.9396 | 0.9396 | nan | 0.9396 | 0.0 | 0.9396 |
68
+ | 0.2387 | 11.0 | 220 | 0.1494 | 0.4709 | 0.9419 | 0.9419 | nan | 0.9419 | 0.0 | 0.9419 |
69
+ | 0.1242 | 12.0 | 240 | 0.1463 | 0.4743 | 0.9486 | 0.9486 | nan | 0.9486 | 0.0 | 0.9486 |
70
+ | 0.0819 | 13.0 | 260 | 0.1492 | 0.4757 | 0.9515 | 0.9515 | nan | 0.9515 | 0.0 | 0.9515 |
71
+ | 0.6077 | 14.0 | 280 | 0.1442 | 0.4793 | 0.9586 | 0.9586 | nan | 0.9586 | 0.0 | 0.9586 |
72
+ | 0.3156 | 15.0 | 300 | 0.1430 | 0.4813 | 0.9627 | 0.9627 | nan | 0.9627 | 0.0 | 0.9627 |
73
+ | 0.2564 | 16.0 | 320 | 0.1483 | 0.4673 | 0.9347 | 0.9347 | nan | 0.9347 | 0.0 | 0.9347 |
74
+ | 0.107 | 17.0 | 340 | 0.1467 | 0.4695 | 0.9390 | 0.9390 | nan | 0.9390 | 0.0 | 0.9390 |
75
+ | 1.1592 | 18.0 | 360 | 0.1437 | 0.4814 | 0.9628 | 0.9628 | nan | 0.9628 | 0.0 | 0.9628 |
76
+ | 0.0586 | 19.0 | 380 | 0.1396 | 0.4811 | 0.9622 | 0.9622 | nan | 0.9622 | 0.0 | 0.9622 |
77
+ | 0.9815 | 20.0 | 400 | 0.1399 | 0.4812 | 0.9624 | 0.9624 | nan | 0.9624 | 0.0 | 0.9624 |
78
+ | 0.3101 | 21.0 | 420 | 0.1411 | 0.4836 | 0.9672 | 0.9672 | nan | 0.9672 | 0.0 | 0.9672 |
79
+ | 0.2325 | 22.0 | 440 | 0.1395 | 0.4672 | 0.9344 | 0.9344 | nan | 0.9344 | 0.0 | 0.9344 |
80
+ | 0.1504 | 23.0 | 460 | 0.1420 | 0.4720 | 0.9441 | 0.9441 | nan | 0.9441 | 0.0 | 0.9441 |
81
+ | 0.2831 | 24.0 | 480 | 0.1393 | 0.4697 | 0.9395 | 0.9395 | nan | 0.9395 | 0.0 | 0.9395 |
82
+ | 0.0921 | 25.0 | 500 | 0.1418 | 0.4701 | 0.9401 | 0.9401 | nan | 0.9401 | 0.0 | 0.9401 |
83
+ | 0.141 | 26.0 | 520 | 0.1318 | 0.4648 | 0.9296 | 0.9296 | nan | 0.9296 | 0.0 | 0.9296 |
84
+ | 0.1381 | 27.0 | 540 | 0.1316 | 0.4697 | 0.9395 | 0.9395 | nan | 0.9395 | 0.0 | 0.9395 |
85
+ | 1.1864 | 28.0 | 560 | 0.1292 | 0.4774 | 0.9548 | 0.9548 | nan | 0.9548 | 0.0 | 0.9548 |
86
+ | 0.9492 | 29.0 | 580 | 0.1290 | 0.4709 | 0.9418 | 0.9418 | nan | 0.9418 | 0.0 | 0.9418 |
87
+ | 0.3061 | 30.0 | 600 | 0.1303 | 0.4536 | 0.9071 | 0.9071 | nan | 0.9071 | 0.0 | 0.9071 |
88
+ | 0.2511 | 31.0 | 620 | 0.1318 | 0.4725 | 0.9451 | 0.9451 | nan | 0.9451 | 0.0 | 0.9451 |
89
+ | 0.2706 | 32.0 | 640 | 0.1284 | 0.4790 | 0.9580 | 0.9580 | nan | 0.9580 | 0.0 | 0.9580 |
90
+ | 0.1508 | 33.0 | 660 | 0.1264 | 0.4698 | 0.9396 | 0.9396 | nan | 0.9396 | 0.0 | 0.9396 |
91
+ | 0.2802 | 34.0 | 680 | 0.1308 | 0.4733 | 0.9467 | 0.9467 | nan | 0.9467 | 0.0 | 0.9467 |
92
+ | 0.1897 | 35.0 | 700 | 0.1315 | 0.4681 | 0.9361 | 0.9361 | nan | 0.9361 | 0.0 | 0.9361 |
93
+ | 0.1981 | 36.0 | 720 | 0.1289 | 0.4766 | 0.9531 | 0.9531 | nan | 0.9531 | 0.0 | 0.9531 |
94
+ | 0.2742 | 37.0 | 740 | 0.1284 | 0.4818 | 0.9635 | 0.9635 | nan | 0.9635 | 0.0 | 0.9635 |
95
+ | 0.0418 | 38.0 | 760 | 0.1240 | 0.4762 | 0.9525 | 0.9525 | nan | 0.9525 | 0.0 | 0.9525 |
96
+ | 0.1946 | 39.0 | 780 | 0.1253 | 0.4750 | 0.9500 | 0.9500 | nan | 0.9500 | 0.0 | 0.9500 |
97
+ | 0.1692 | 40.0 | 800 | 0.1253 | 0.4836 | 0.9672 | 0.9672 | nan | 0.9672 | 0.0 | 0.9672 |
98
+ | 0.3071 | 41.0 | 820 | 0.1227 | 0.4751 | 0.9503 | 0.9503 | nan | 0.9503 | 0.0 | 0.9503 |
99
+ | 0.2003 | 42.0 | 840 | 0.1250 | 0.4762 | 0.9524 | 0.9524 | nan | 0.9524 | 0.0 | 0.9524 |
100
+ | 0.2099 | 43.0 | 860 | 0.1235 | 0.4740 | 0.9480 | 0.9480 | nan | 0.9480 | 0.0 | 0.9480 |
101
+ | 0.1218 | 44.0 | 880 | 0.1222 | 0.4743 | 0.9486 | 0.9486 | nan | 0.9486 | 0.0 | 0.9486 |
102
+ | 0.1583 | 45.0 | 900 | 0.1226 | 0.4708 | 0.9415 | 0.9415 | nan | 0.9415 | 0.0 | 0.9415 |
103
+ | 0.1506 | 46.0 | 920 | 0.1215 | 0.4686 | 0.9372 | 0.9372 | nan | 0.9372 | 0.0 | 0.9372 |
104
+ | 0.0643 | 47.0 | 940 | 0.1234 | 0.4779 | 0.9559 | 0.9559 | nan | 0.9559 | 0.0 | 0.9559 |
105
+ | 0.2006 | 48.0 | 960 | 0.1213 | 0.4757 | 0.9515 | 0.9515 | nan | 0.9515 | 0.0 | 0.9515 |
106
+ | 0.0783 | 49.0 | 980 | 0.1241 | 0.4726 | 0.9452 | 0.9452 | nan | 0.9452 | 0.0 | 0.9452 |
107
+ | 0.0552 | 50.0 | 1000 | 0.1227 | 0.4744 | 0.9489 | 0.9489 | nan | 0.9489 | 0.0 | 0.9489 |
108
+
109
+
110
+ ### Framework versions
111
+
112
+ - Transformers 4.36.2
113
+ - Pytorch 1.11.0+cu115
114
+ - Datasets 2.15.0
115
+ - Tokenizers 0.15.0
config.json ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "nvidia/mit-b1",
3
+ "architectures": [
4
+ "SegformerForSemanticSegmentation"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.0,
7
+ "classifier_dropout_prob": 0.1,
8
+ "decoder_hidden_size": 256,
9
+ "depths": [
10
+ 2,
11
+ 2,
12
+ 2,
13
+ 2
14
+ ],
15
+ "downsampling_rates": [
16
+ 1,
17
+ 4,
18
+ 8,
19
+ 16
20
+ ],
21
+ "drop_path_rate": 0.1,
22
+ "hidden_act": "gelu",
23
+ "hidden_dropout_prob": 0.0,
24
+ "hidden_sizes": [
25
+ 64,
26
+ 128,
27
+ 320,
28
+ 512
29
+ ],
30
+ "id2label": {
31
+ "0": "unlabeled",
32
+ "1": "circuit"
33
+ },
34
+ "image_size": 224,
35
+ "initializer_range": 0.02,
36
+ "label2id": {
37
+ "circuit": 1,
38
+ "unlabeled": 0
39
+ },
40
+ "layer_norm_eps": 1e-06,
41
+ "mlp_ratios": [
42
+ 4,
43
+ 4,
44
+ 4,
45
+ 4
46
+ ],
47
+ "model_type": "segformer",
48
+ "num_attention_heads": [
49
+ 1,
50
+ 2,
51
+ 5,
52
+ 8
53
+ ],
54
+ "num_channels": 3,
55
+ "num_encoder_blocks": 4,
56
+ "patch_sizes": [
57
+ 7,
58
+ 3,
59
+ 3,
60
+ 3
61
+ ],
62
+ "reshape_last_stage": true,
63
+ "semantic_loss_ignore_index": 255,
64
+ "sr_ratios": [
65
+ 8,
66
+ 4,
67
+ 2,
68
+ 1
69
+ ],
70
+ "strides": [
71
+ 4,
72
+ 2,
73
+ 2,
74
+ 2
75
+ ],
76
+ "torch_dtype": "float32",
77
+ "transformers_version": "4.36.2"
78
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:972d8908d1af222d319a6a97d16395039c8cf5181bd5f47773feff8ac79da31e
3
+ size 54737376
runs/Dec23_20-58-32_Centauri/events.out.tfevents.1703332732.Centauri.49300.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2816d925e10f9721f338a9bcc098da22b0782845cd57c0ba048711d72b4839bc
3
+ size 195461
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c9a0f4c167855855f0d27be60408be5ad2f2eb9262e91214fe8fe0735e635835
3
+ size 4271