File size: 709 Bytes
0352a4e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
{
    "base_model_id": "google/gemma-7b-it",
    "hidden_size": 3072,
    "adapters": {
        "adapter_1": "lamm-mit/x-lora-gemma-7b/adapter_1",
        "adapter_2": "lamm-mit/x-lora-gemma-7b/adapter_2",
        "adapter_3": "lamm-mit/x-lora-gemma-7b/adapter_3",
        "adapter_4": "lamm-mit/x-lora-gemma-7b/adapter_4"
    },
    "enable_softmax": true,
    "enable_softmax_topk": false,
    "layerwise_scalings": true,
    "xlora_depth": 2,
    "xlora_size": 2048,
    "enable_relu_and_dropout": true,
    "use_bias": true,
    "xlora_dropout_p": 0.2,
    "use_trainable_adapters": false,
    "softmax_temperature": 1,
    "top_k_lora": null,
    "scaling_pass_value": 0,
    "global_scaling_weight": 1
}