{ "_name_or_path": "roberta-large", "architectures": [ "RobertaForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 1024, "id2label": { "A01002": "Self-direction: thought", "A01005": "Self-direction: action", "A01006": "Stimulation", "A01007": "Hedonism", "A01008": "Achievement", "A01009": "Power: dominance", "A01010": "Power: resources", "A01011": "Face", "A01014": "Security: personal", "A01015": "Security: societal", "A01016": "Tradition", "A01018": "Conformity: rules", "A01019": "Conformity: interpersonal", "A01020": "Humility", "A02003": "Benevolence: caring", "A02004": "Benevolence: dependability", "A02005": "Universalism: concern", "A02006": "Universalism: nature", "A02007": "Universalism: tolerance", "A02008": "Universalism: objectivity" }, "initializer_range": 0.02, "intermediate_size": 4096, "label2id": { "Achievement": "A01008", "Benevolence: caring": "A02003", "Benevolence: dependability": "A02004", "Conformity: interpersonal": "A01019", "Conformity: rules": "A01018", "Face": "A01011", "Hedonism": "A01007", "Humility": "A01020", "Power: dominance": "A01009", "Power: resources": "A01010", "Security: personal": "A01014", "Security: societal": "A01015", "Self-direction: action": "A01005", "Self-direction: thought": "A01002", "Stimulation": "A01006", "Tradition": "A01016", "Universalism: concern": "A02005", "Universalism: nature": "A02006", "Universalism: objectivity": "A02008", "Universalism: tolerance": "A02007" }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "roberta", "num_attention_heads": 16, "num_hidden_layers": 24, "pad_token_id": 1, "position_embedding_type": "absolute", "problem_type": "multi_label_classification", "torch_dtype": "float32", "transformers_version": "4.29.2", "type_vocab_size": 1, "use_cache": true, "vocab_size": 50265 }