{ "_name_or_path": "ai-forever/ruElectra-medium", "architectures": [ "ElectraForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "classifier_dropout": null, "embedding_size": 576, "generator_size": "0.25", "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 576, "id2label": { "0": "O", "1": "B-Object", "2": "B-Experiencer", "3": "B-Cause", "4": "B-Deliberative", "5": "B-Causator", "6": "B-ContrSubject", "7": "B-Benefactive", "8": "B-Addressee", "9": "I-Object", "10": "B-Destinative", "11": "I-ContrSubject", "12": "B-Instrument", "13": "I-Deliberative", "14": "B-Limitative", "15": "B-DirectiveFinal", "16": "B-Mediative", "17": "I-DirectiveFinal", "18": "B-DirectiveInitial", "19": "I-DirectiveInitial", "20": "I-Experiencer", "21": "I-Cause", "22": "I-Causator" }, "initializer_range": 0.02, "intermediate_size": 2304, "label2id": { "B-Addressee": 8, "B-Benefactive": 7, "B-Causator": 5, "B-Cause": 3, "B-ContrSubject": 6, "B-Deliberative": 4, "B-Destinative": 10, "B-DirectiveFinal": 15, "B-DirectiveInitial": 18, "B-Experiencer": 2, "B-Instrument": 12, "B-Limitative": 14, "B-Mediative": 16, "B-Object": 1, "I-Causator": 22, "I-Cause": 21, "I-ContrSubject": 11, "I-Deliberative": 13, "I-DirectiveFinal": 17, "I-DirectiveInitial": 19, "I-Experiencer": 20, "I-Object": 9, "O": 0 }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "electra", "num_attention_heads": 9, "num_hidden_layers": 12, "pad_token_id": 0, "position_embedding_type": "absolute", "summary_activation": "gelu", "summary_last_dropout": 0.1, "summary_type": "first", "summary_use_proj": true, "torch_dtype": "float32", "transformers_version": "4.42.4", "type_vocab_size": 2, "use_cache": true, "vocab_size": 64000 }