mjbuehler commited on
Commit
53c1bd2
1 Parent(s): 62b5bbf

Upload xlora_config.json

Browse files
Files changed (1) hide show
  1. xlora_config.json +1 -1
xlora_config.json CHANGED
@@ -1 +1 @@
1
- {"hidden_size": 3072, "adapters": {"adapter_1": "./adapter_1", "adapter_2": "/./adapter_2", "adapter_3": "./adapter_3", "adapter_4": "./adapter_4"}, "enable_softmax": true, "enable_softmax_topk": false, "layerwise_scalings": true, "xlora_depth": 2, "xlora_size": 2048, "enable_relu_and_dropout": true, "use_bias": true, "xlora_dropout_p": 0.2, "use_trainable_adapters": false, "softmax_temperature": 1.0, "top_k_lora": null, "scaling_pass_value": 0, "global_scaling_weight": 1.0}
 
1
+ {"hidden_size": 3072, "adapters": {"adapter_1": "lamm-mit/x-lora-gemma-7b/adapter_1", "adapter_2": "lamm-mit/x-lora-gemma-7b/adapter_2", "adapter_3": "lamm-mit/x-lora-gemma-7b/adapter_3", "adapter_4": "lamm-mit/x-lora-gemma-7b/adapter_4"}, "enable_softmax": true, "enable_softmax_topk": false, "layerwise_scalings": true, "xlora_depth": 2, "xlora_size": 2048, "enable_relu_and_dropout": true, "use_bias": true, "xlora_dropout_p": 0.2, "use_trainable_adapters": false, "softmax_temperature": 1.0, "top_k_lora": null, "scaling_pass_value": 0, "global_scaling_weight": 1.0}