|
{ |
|
"config": { |
|
"adapter_residual_before_ln": false, |
|
"cross_adapter": false, |
|
"factorized_phm_W": true, |
|
"factorized_phm_rule": false, |
|
"hypercomplex_nonlinearity": "glorot-uniform", |
|
"init_weights": "mam_adapter", |
|
"inv_adapter": null, |
|
"inv_adapter_reduction_factor": null, |
|
"is_parallel": true, |
|
"learn_phm": true, |
|
"leave_out": [], |
|
"ln_after": false, |
|
"ln_before": false, |
|
"mh_adapter": false, |
|
"non_linearity": "relu", |
|
"original_ln_after": true, |
|
"original_ln_before": false, |
|
"output_adapter": true, |
|
"phm_bias": true, |
|
"phm_c_init": "normal", |
|
"phm_dim": 4, |
|
"phm_init_range": 0.0001, |
|
"phm_layer": false, |
|
"phm_rank": 1, |
|
"reduction_factor": 16, |
|
"residual_before_ln": true, |
|
"scaling": 4.0, |
|
"shared_W_phm": false, |
|
"shared_phm_rule": true, |
|
"use_gating": false |
|
}, |
|
"hidden_size": 768, |
|
"model_class": "RobertaAdapterModel", |
|
"model_name": "roberta-base", |
|
"model_type": "roberta", |
|
"name": "micro_par_bn_v_4_pretrain", |
|
"version": "0.1.2" |
|
} |