{ "_name_or_path": "neuralmind/bert-base-portuguese-cased", "architectures": [ "BertForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "classifier_dropout": null, "directionality": "bidi", "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "O", "1": "B-MACRO_NAME", "2": "I-MACRO_NAME", "3": "B-ROUTINE_NAME", "4": "I-ROUTINE_NAME", "5": "B-SHOWME_SPECIFIC_VITALS_MODAL", "6": "I-SHOWME_SPECIFIC_VITALS_MODAL", "7": "B-SUBMIT_CONTEXT", "8": "I-SUBMIT_CONTEXT", "9": "B-UNIT_NAME", "10": "I-UNIT_NAME", "11": "B-GO_TO_LOCATION", "12": "I-GO_TO_LOCATION", "13": "B-ORDER_ITEM", "14": "I-ORDER_ITEM", "15": "B-PATIENT_NAME", "16": "I-PATIENT_NAME", "17": "B-CLINICAL_NOTE", "18": "I-CLINICAL_NOTE", "19": "B-TEMPLATE_NAME", "20": "I-TEMPLATE_NAME", "21": "B-SHOWME_SPECIFIC_LABRESULTS_MODAL", "22": "I-SHOWME_SPECIFIC_LABRESULTS_MODAL", "23": "B-SHOWME_SPECIFIC_SCORE_MODAL", "24": "I-SHOWME_SPECIFIC_SCORE_MODAL", "25": "B-PROTOCOL_NAME", "26": "I-PROTOCOL_NAME" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "B-CLINICAL_NOTE": 17, "B-GO_TO_LOCATION": 11, "B-MACRO_NAME": 1, "B-ORDER_ITEM": 13, "B-PATIENT_NAME": 15, "B-PROTOCOL_NAME": 25, "B-ROUTINE_NAME": 3, "B-SHOWME_SPECIFIC_LABRESULTS_MODAL": 21, "B-SHOWME_SPECIFIC_SCORE_MODAL": 23, "B-SHOWME_SPECIFIC_VITALS_MODAL": 5, "B-SUBMIT_CONTEXT": 7, "B-TEMPLATE_NAME": 19, "B-UNIT_NAME": 9, "I-CLINICAL_NOTE": 18, "I-GO_TO_LOCATION": 12, "I-MACRO_NAME": 2, "I-ORDER_ITEM": 14, "I-PATIENT_NAME": 16, "I-PROTOCOL_NAME": 26, "I-ROUTINE_NAME": 4, "I-SHOWME_SPECIFIC_LABRESULTS_MODAL": 22, "I-SHOWME_SPECIFIC_SCORE_MODAL": 24, "I-SHOWME_SPECIFIC_VITALS_MODAL": 6, "I-SUBMIT_CONTEXT": 8, "I-TEMPLATE_NAME": 20, "I-UNIT_NAME": 10, "O": 0 }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "bert", "num_attention_heads": 12, "num_hidden_layers": 12, "output_past": true, "pad_token_id": 0, "pooler_fc_size": 768, "pooler_num_attention_heads": 12, "pooler_num_fc_layers": 3, "pooler_size_per_head": 128, "pooler_type": "first_token_transform", "position_embedding_type": "absolute", "transformers_version": "4.35.0", "type_vocab_size": 2, "use_cache": true, "vocab_size": 29794 }