{ "_name_or_path": "roberta_full_large", "adapters": { "adapters": { "convai2": "b1017368d7a97b11", "dailydialog": "b1017368d7a97b11", "empathetic": "b1017368d7a97b11", "reddit": "b1017368d7a97b11", "topical": "b1017368d7a97b11" }, "config_map": { "b1017368d7a97b11": { "adapter_residual_before_ln": false, "cross_adapter": false, "factorized_phm_W": true, "factorized_phm_rule": false, "hypercomplex_nonlinearity": "glorot-uniform", "init_weights": "bert", "inv_adapter": null, "inv_adapter_reduction_factor": null, "is_parallel": false, "learn_phm": true, "leave_out": [], "ln_after": false, "ln_before": false, "mh_adapter": true, "non_linearity": "swish", "original_ln_after": true, "original_ln_before": false, "output_adapter": true, "phm_bias": true, "phm_c_init": "normal", "phm_dim": 4, "phm_init_range": 0.0001, "phm_layer": false, "phm_rank": 1, "reduction_factor": 16, "residual_before_ln": true, "scaling": 1.0, "shared_W_phm": false, "shared_phm_rule": true } }, "fusion_config_map": {}, "fusions": {} }, "architectures": [ "RobertaForPoEEvaluation" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 4096, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "roberta", "num_attention_heads": 16, "num_hidden_layers": 24, "pad_token_id": 1, "position_embedding_type": "absolute", "torch_dtype": "float32", "transformers_version": "4.17.0", "type_vocab_size": 1, "use_cache": true, "vocab_size": 50265 }