mjbuehler commited on
Commit
0352a4e
1 Parent(s): 500864e

Update xlora_config.json

Browse files
Files changed (1) hide show
  1. xlora_config.json +23 -1
xlora_config.json CHANGED
@@ -1 +1,23 @@
1
- {"base_model_id":"google/gemma-7b-it", "hidden_size": 3072, "adapters": {"adapter_1": "lamm-mit/x-lora-gemma-7b/adapter_1", "adapter_2": "lamm-mit/x-lora-gemma-7b/adapter_2", "adapter_3": "lamm-mit/x-lora-gemma-7b/adapter_3", "adapter_4": "lamm-mit/x-lora-gemma-7b/adapter_4"}, "enable_softmax": true, "enable_softmax_topk": false, "layerwise_scalings": true, "xlora_depth": 2, "xlora_size": 2048, "enable_relu_and_dropout": true, "use_bias": true, "xlora_dropout_p": 0.2, "use_trainable_adapters": false, "softmax_temperature": 1.0, "top_k_lora": null, "scaling_pass_value": 0, "global_scaling_weight": 1.0}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "base_model_id": "google/gemma-7b-it",
3
+ "hidden_size": 3072,
4
+ "adapters": {
5
+ "adapter_1": "lamm-mit/x-lora-gemma-7b/adapter_1",
6
+ "adapter_2": "lamm-mit/x-lora-gemma-7b/adapter_2",
7
+ "adapter_3": "lamm-mit/x-lora-gemma-7b/adapter_3",
8
+ "adapter_4": "lamm-mit/x-lora-gemma-7b/adapter_4"
9
+ },
10
+ "enable_softmax": true,
11
+ "enable_softmax_topk": false,
12
+ "layerwise_scalings": true,
13
+ "xlora_depth": 2,
14
+ "xlora_size": 2048,
15
+ "enable_relu_and_dropout": true,
16
+ "use_bias": true,
17
+ "xlora_dropout_p": 0.2,
18
+ "use_trainable_adapters": false,
19
+ "softmax_temperature": 1,
20
+ "top_k_lora": null,
21
+ "scaling_pass_value": 0,
22
+ "global_scaling_weight": 1
23
+ }