HHansi commited on
Commit
ce67f0d
1 Parent(s): 220d49e
added_tokens.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"<e1>": 28996, "</e1>": 28997, "<e2>": 28998, "</e2>": 28999}
config.json ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "bert-large-cased",
3
+ "architectures": [
4
+ "BertForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "directionality": "bidi",
9
+ "gradient_checkpointing": false,
10
+ "hidden_act": "gelu",
11
+ "hidden_dropout_prob": 0.1,
12
+ "hidden_size": 1024,
13
+ "id2label": {
14
+ "0": "LABEL_0",
15
+ "1": "LABEL_1",
16
+ "2": "LABEL_2",
17
+ "3": "LABEL_3",
18
+ "4": "LABEL_4",
19
+ "5": "LABEL_5",
20
+ "6": "LABEL_6",
21
+ "7": "LABEL_7",
22
+ "8": "LABEL_8",
23
+ "9": "LABEL_9"
24
+ },
25
+ "initializer_range": 0.02,
26
+ "intermediate_size": 4096,
27
+ "label2id": {
28
+ "LABEL_0": 0,
29
+ "LABEL_1": 1,
30
+ "LABEL_2": 2,
31
+ "LABEL_3": 3,
32
+ "LABEL_4": 4,
33
+ "LABEL_5": 5,
34
+ "LABEL_6": 6,
35
+ "LABEL_7": 7,
36
+ "LABEL_8": 8,
37
+ "LABEL_9": 9
38
+ },
39
+ "layer_norm_eps": 1e-12,
40
+ "max_position_embeddings": 512,
41
+ "model_type": "bert",
42
+ "num_attention_heads": 16,
43
+ "num_hidden_layers": 24,
44
+ "pad_token_id": 0,
45
+ "pooler_fc_size": 768,
46
+ "pooler_num_attention_heads": 12,
47
+ "pooler_num_fc_layers": 3,
48
+ "pooler_size_per_head": 128,
49
+ "pooler_type": "first_token_transform",
50
+ "position_embedding_type": "absolute",
51
+ "torch_dtype": "float32",
52
+ "transformers_version": "4.16.2",
53
+ "type_vocab_size": 2,
54
+ "use_cache": true,
55
+ "vocab_size": 29000
56
+ }
eval_results.txt ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ cls_report = precision recall f1-score support
2
+
3
+ 0.0 0.9595 0.7634 0.8503 93
4
+ 1.0 0.8021 0.9390 0.8652 82
5
+ 2.0 0.7534 0.6875 0.7190 80
6
+ 3.0 1.0000 1.0000 1.0000 57
7
+ 4.0 0.8116 0.8615 0.8358 65
8
+ 5.0 0.9620 0.9870 0.9744 77
9
+ 6.0 0.9780 1.0000 0.9889 89
10
+ 7.0 0.9481 1.0000 0.9733 73
11
+ 8.0 1.0000 1.0000 1.0000 13
12
+ 9.0 1.0000 1.0000 1.0000 9
13
+
14
+ accuracy 0.9028 638
15
+ macro avg 0.9215 0.9239 0.9207 638
16
+ weighted avg 0.9049 0.9028 0.9012 638
17
+
18
+ eval_loss = 0.2609917640686035
19
+ macro_f1 = 0.9206824280993045
20
+ macro_p = 0.9214660895755212
21
+ macro_r = 0.9238516699010404
22
+ mcc = 0.8905145977958459
model_args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"adam_epsilon": 1e-08, "best_model_dir": "outputs/best_model", "cache_dir": "temp/cache_dir", "config": {}, "custom_layer_parameters": [], "custom_parameter_groups": [], "dataloader_num_workers": 1, "do_lower_case": false, "dynamic_quantize": false, "early_stopping_consider_epochs": false, "early_stopping_delta": 0, "early_stopping_metric": "eval_loss", "early_stopping_metric_minimize": true, "early_stopping_patience": 10, "encoding": null, "eval_batch_size": 64, "evaluate_during_training": true, "evaluate_during_training_silent": false, "evaluate_during_training_steps": 16, "evaluate_during_training_verbose": true, "evaluate_each_epoch": true, "fp16": false, "gradient_accumulation_steps": 1, "learning_rate": 1e-05, "local_rank": -1, "logging_steps": 16, "manual_seed": 157, "max_grad_norm": 1.0, "max_seq_length": 128, "model_name": "bert-large-cased", "model_type": "bert", "multiprocessing_chunksize": 500, "n_gpu": 1, "no_cache": false, "no_save": false, "not_saved_args": [], "num_train_epochs": 5, "output_dir": "temp/outputs", "overwrite_output_dir": true, "process_count": 70, "quantized_model": false, "reprocess_input_data": true, "save_best_model": true, "save_eval_checkpoints": false, "save_model_every_epoch": false, "save_optimizer_and_scheduler": true, "save_recent_only": true, "save_steps": 16, "silent": false, "tensorboard_dir": null, "thread_count": null, "train_batch_size": 16, "train_custom_parameters_only": false, "use_cached_eval_features": false, "use_early_stopping": true, "use_multiprocessing": true, "wandb_kwargs": {"name": "bert-large_1e-05_5_strain"}, "wandb_project": "relation-extraction", "warmup_ratio": 0.1, "warmup_steps": 180, "weight_decay": 0, "skip_special_tokens": true, "model_class": "REModel", "labels_list": ["selection", "necessity", "none", "greater", "part-of", "equal", "greater-equal", "less-equal", "not-part-of", "less"], "labels_map": {"selection": 0, "necessity": 1, "none": 2, "greater": 3, "part-of": 4, "equal": 5, "greater-equal": 6, "less-equal": 7, "not-part-of": 8, "less": 9}, "lazy_delimiter": "\t", "lazy_labels_column": 1, "lazy_loading": false, "lazy_loading_start_line": 1, "lazy_text_a_column": null, "lazy_text_b_column": null, "lazy_text_column": 0, "onnx": false, "regression": false, "sliding_window": false, "stride": 0.8, "tie_value": 1, "special_tags": ["<e1>", "<e2>"]}
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:efdad27900c657f635bdd7febbae61989f747e194540d2d1115e28158a39eaf7
3
+ size 2660663621
predictions.csv ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f1b1787704f0281030483854d40332760441d392fd00a54024a89d6075c2c5dc
3
+ size 1334579957
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:78c7876c3cf33ba6355acd8a4e1610138c157a71efc5ca0f1edba461142b9a9e
3
+ size 627
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
test_eval.txt ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Default classification report:
2
+ precision recall f1-score support
3
+
4
+ equal 0.7647 0.8667 0.8125 15
5
+ greater 1.0000 0.9091 0.9524 11
6
+ greater-equal 0.6207 0.8571 0.7200 21
7
+ less 1.0000 0.5000 0.6667 2
8
+ less-equal 0.7647 0.9286 0.8387 14
9
+ necessity 0.8062 0.8883 0.8453 206
10
+ none 0.7834 0.6150 0.6891 200
11
+ not-part-of 1.0000 1.0000 1.0000 2
12
+ part-of 0.7143 0.8025 0.7558 162
13
+ selection 0.8259 0.7940 0.8096 233
14
+
15
+ accuracy 0.7829 866
16
+ macro avg 0.8280 0.8161 0.8090 866
17
+ weighted avg 0.7865 0.7829 0.7805 866
18
+
19
+ mcc = 0.7252961407864995
20
+ precision(macro) = 0.8279886882633785
21
+ recall(macro) = 0.8161281928146451
22
+ f1_score(macro) = 0.8090040477874731
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"do_lower_case": false, "do_basic_tokenize": true, "never_split": null, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": null, "tokenizer_file": "/home/hh2/.cache/huggingface/transformers/75be22d7750034989358861e325977feda47740e1c3f8a4dc1cb73570aad843e.2b9a196704f2f183fe3f4b48d6e662dba8203fdcb3346bfa896831378edf6f97", "name_or_path": "bert-large-cased", "tokenizer_class": "BertTokenizer"}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a1b324359608ddeb36c07ae745145b765d655aa0878ff21ff20ef55da360ccc3
3
+ size 2875
training_progress_scores.csv ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ global_step,mcc,train_loss,eval_loss,macro_f1,macro_r,macro_p
2
+ 16,0.02120476189519079,2.2100412845611572,2.2965874671936035,0.0904909594023634,0.09649574693264419,0.11105955328889347
3
+ 32,0.04559116036010127,2.232901096343994,2.20410361289978,0.12180618829328564,0.12618150182538043,0.13715638389686075
4
+ 48,0.13398363699948374,2.023332118988037,2.0874132394790648,0.17823128472013944,0.18776398221831653,0.20062949997366064
5
+ 64,0.2322220566753284,2.0540969371795654,1.948941171169281,0.2584052751202262,0.26044517449399707,0.29758048467801645
6
+ 80,0.3570121750500104,1.919968605041504,1.797709619998932,0.3394214952699291,0.352114645732886,0.37802982731364815
7
+ 96,0.4155206237026726,1.607898235321045,1.6182670950889588,0.36514064900591503,0.38301124196998515,0.421363968458102
8
+ 112,0.5116573429827862,1.6240942478179932,1.4052219629287719,0.4478572936370206,0.4617380377139611,0.47301261989594534
9
+ 128,0.5417675503103258,1.3139628171920776,1.2070605039596558,0.47890228323643874,0.4845140674111995,0.5095592195407102
10
+ 144,0.6111607217734205,0.7470954060554504,1.0464143097400664,0.5296500021974426,0.5397918510570185,0.5492590466457714
11
+ 160,0.7166599065348958,0.9563337564468384,0.867076712846756,0.6579237728253122,0.6561980020587532,0.6967439212173381
12
+ 176,0.7565148310318511,0.8044032454490662,0.7237910747528076,0.7129280582675951,0.7244165017961612,0.7172402988169126
13
+ 192,0.7555659083132042,0.9068875312805176,0.639285323023796,0.7239596829446039,0.7091861748901661,0.8314335728465594
14
+ 208,0.8086278121066737,0.6481845378875732,0.5524878859519958,0.851568906628609,0.8619821829808437,0.8473586421588104
15
+ 224,0.8005911565464835,0.4953298270702362,0.5698974639177322,0.8213734544659145,0.8216843385933663,0.8793383708449986
16
+ 240,0.8348877074013525,0.23069928586483002,0.47031879127025605,0.8696886124489737,0.8671921836165218,0.8765028689608838
17
+ 256,0.8225355494604706,0.45986518263816833,0.445022839307785,0.8548889401888765,0.8554429906858185,0.8601816785710483
18
+ 272,0.8299820405880739,0.42743685841560364,0.44300584197044374,0.8662683204161684,0.8731962402839812,0.8704444835789611
19
+ 288,0.8536248687847678,0.2545669376850128,0.4230775862932205,0.8781617125170204,0.8834330835915178,0.8822807708277642
20
+ 304,0.8197344399081078,0.363519549369812,0.4402744174003601,0.8640566273374468,0.8610187086928786,0.8786448430397389
21
+ 320,0.8554058984391397,0.4504656195640564,0.39967024326324463,0.8901106120802293,0.8991443078511292,0.8877534104967768
22
+ 336,0.8772616170718975,0.7889290452003479,0.3636903315782547,0.910883287076588,0.910894636000614,0.911752464665675
23
+ 352,0.8744316436336443,0.35816603899002075,0.369956861436367,0.9057151481581528,0.9087663400898635,0.9076712478532342
24
+ 359,0.879251128719535,0.6407244205474854,0.37058296948671343,0.9138158433111461,0.9135424350282321,0.9157860958571554
25
+ 368,0.8673936640758833,0.2664627134799957,0.37372795045375823,0.90333621396767,0.9061759057513449,0.9049172128896664
26
+ 384,0.8739109110786013,0.16098318994045258,0.3428600266575813,0.9031844176834125,0.9088624519373629,0.8998101710192368
27
+ 400,0.8746912603147438,0.349138081073761,0.34930741637945173,0.9076235194278931,0.9114622538666189,0.9093713272543059
28
+ 416,0.8738687737714933,0.19574163854122162,0.347807040810585,0.8987136206953068,0.9078452564592776,0.8919981988402993
29
+ 432,0.8664561070262404,0.04722139239311218,0.3752228423953056,0.8926026825115286,0.8989858706007621,0.8957716478027846
30
+ 448,0.8673115685572573,0.20087212324142456,0.36219864934682844,0.8912853244098953,0.9000670066299594,0.8889896747561062
31
+ 464,0.8354361361335804,0.12940384447574615,0.3983158767223358,0.8751621235560225,0.8798399591599315,0.8828071296911795
32
+ 480,0.8814944374215199,0.05873381346464157,0.33358520567417144,0.9069170388587688,0.9149057309069111,0.9034232276162657
33
+ 496,0.8690226072809379,0.2113424837589264,0.3375336557626724,0.8953454749618185,0.901543901941227,0.8937344777225606
34
+ 512,0.8822011119188745,0.07148896157741547,0.35398565977811813,0.9042427293484698,0.9148142170575294,0.903350110800538
35
+ 528,0.8756394481459328,0.0486968569457531,0.33958793729543685,0.9066949515948908,0.9048098848919794,0.9103305004537774
36
+ 544,0.8796302627189637,0.14839854836463928,0.3467743456363678,0.902092362321621,0.9061139010654731,0.9154720151911568
37
+ 560,0.8663111633425733,0.05294731259346008,0.3235730841755867,0.9020467561690173,0.9065695630274305,0.9048098713972028
38
+ 576,0.8968742046520319,0.24249669909477234,0.2803815953433514,0.9263323257667146,0.9276367472679432,0.9254999156952326
39
+ 592,0.8813668795776285,0.5751103162765503,0.3024727046489716,0.9147965562839143,0.9152716854349426,0.917693624822056
40
+ 608,0.8739364445294886,0.0910172164440155,0.326918351650238,0.906429157640145,0.9122556875330282,0.9106852576451511
41
+ 624,0.8822194762632725,0.1523313820362091,0.2852836012840271,0.914807359162442,0.9127282611173249,0.9257197664428117
42
+ 640,0.8927059550723655,0.3128977417945862,0.2681684076786041,0.9235873467067922,0.9216705688096326,0.9322065501306007
43
+ 656,0.8905145977958459,0.06694497168064117,0.2609917640686035,0.9206824280993045,0.9238516699010404,0.9214660895755212
44
+ 672,0.8990628177619822,0.06331098079681396,0.26308665573596957,0.9254960764321449,0.9255833721746388,0.9280630266641822
45
+ 688,0.8781369185459599,0.6527341604232788,0.31593181043863294,0.9109128305401828,0.9070344865120223,0.9278278271538438
46
+ 704,0.8895419508370024,0.008200466632843018,0.3019000068306923,0.9157852963442812,0.9205255341370708,0.9221652820890647
47
+ 718,0.8940802633100846,0.13443365693092346,0.28330393955111505,0.9198756663225659,0.9228331507926419,0.9222515378593343
48
+ 720,0.8958080269931484,0.17313045263290405,0.2849727563560009,0.9212903400547049,0.9240831507926419,0.9234879711034616
49
+ 736,0.8846875499073846,0.016135575249791145,0.29293927997350694,0.9172577217857963,0.9162807692638637,0.9206660213197875
50
+ 752,0.8959864385890575,0.03149452060461044,0.2875762477517128,0.9218139072256719,0.9263175003595933,0.9234292021381976
51
+ 768,0.8936929152126204,0.17257337272167206,0.2858596809208393,0.9228173486361755,0.9206474601392092,0.9280320930432276
52
+ 784,0.8927478348204713,0.05605340376496315,0.2944803312420845,0.9194190544190917,0.9238068201381899,0.9233318686945855
53
+ 800,0.9029249363336409,0.0072312504053115845,0.28642565235495565,0.9280166462604154,0.9314922315423889,0.9295709577613721
54
+ 816,0.8986493488242808,0.00784089881926775,0.28926085531711576,0.9269950744100264,0.9267347841883019,0.9281135775888736
55
+ 832,0.9023083929927157,0.005200767889618874,0.28599364757537843,0.9291334029567582,0.9281748608066469,0.932004159762189
vocab.txt ADDED
The diff for this file is too large to render. See raw diff