File size: 1,048 Bytes
52b9c12
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
{
  "config": {
    "adapter_residual_before_ln": false,
    "cross_adapter": false,
    "dropout": 0.0,
    "factorized_phm_W": true,
    "factorized_phm_rule": false,
    "hypercomplex_nonlinearity": "glorot-uniform",
    "init_weights": "bert",
    "inv_adapter": null,
    "inv_adapter_reduction_factor": null,
    "is_parallel": false,
    "learn_phm": true,
    "leave_out": [],
    "ln_after": false,
    "ln_before": false,
    "mh_adapter": false,
    "non_linearity": "relu",
    "original_ln_after": true,
    "original_ln_before": true,
    "output_adapter": true,
    "phm_bias": true,
    "phm_c_init": "normal",
    "phm_dim": 4,
    "phm_init_range": 0.0001,
    "phm_layer": false,
    "phm_rank": 1,
    "reduction_factor": 16,
    "residual_before_ln": true,
    "scaling": 1.0,
    "shared_W_phm": false,
    "shared_phm_rule": true,
    "use_gating": false
  },
  "hidden_size": 1024,
  "model_class": "GPT2LMHeadModel",
  "model_name": "gpt2-medium",
  "model_type": "gpt2",
  "name": "fm_dom",
  "version": "adapters.1.0.0"
}