binhquoc commited on
Commit
fb51f5b
·
1 Parent(s): ff7a822

Upload model

Browse files
Files changed (3) hide show
  1. README.md +1 -1
  2. adapter_config.json +4 -3
  3. adapter_model.safetensors +2 -2
README.md CHANGED
@@ -1,6 +1,6 @@
1
  ---
2
  library_name: peft
3
- base_model: berkeley-nest/Starling-LM-7B-alpha
4
  ---
5
 
6
  # Model Card for Model ID
 
1
  ---
2
  library_name: peft
3
+ base_model: WizardLM/WizardMath-7B-V1.0
4
  ---
5
 
6
  # Model Card for Model ID
adapter_config.json CHANGED
@@ -1,13 +1,14 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "berkeley-nest/Starling-LM-7B-alpha",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
 
11
  "lora_alpha": 32,
12
  "lora_dropout": 0.05,
13
  "modules_to_save": null,
@@ -16,8 +17,8 @@
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
- "q_proj",
20
- "v_proj"
21
  ],
22
  "task_type": "CAUSAL_LM"
23
  }
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "WizardLM/WizardMath-7B-V1.0",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
+ "loftq_config": {},
12
  "lora_alpha": 32,
13
  "lora_dropout": 0.05,
14
  "modules_to_save": null,
 
17
  "rank_pattern": {},
18
  "revision": null,
19
  "target_modules": [
20
+ "v_proj",
21
+ "q_proj"
22
  ],
23
  "task_type": "CAUSAL_LM"
24
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ccec02e580df514a1def6def1d64fe8ab25e5bdf43435f1bdf5fa0a8c33093a5
3
- size 27280152
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2dd929fab5af16166322118f3dfb34293e3b2ed802d31bde6f867bed12b2eb19
3
+ size 33571624