duyphu commited on
Commit
ea7131e
·
verified ·
1 Parent(s): 6c23243

Training in progress, step 13

Browse files
adapter_config.json CHANGED
@@ -20,15 +20,15 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "v_proj",
24
- "up_proj",
25
- "k_proj",
26
  "q_proj",
27
  "down_proj",
 
 
28
  "o_proj",
 
29
  "gate_proj",
30
- "shared_expert_gate",
31
- "gate"
32
  ],
33
  "task_type": "CAUSAL_LM",
34
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "shared_expert_gate",
 
 
24
  "q_proj",
25
  "down_proj",
26
+ "gate",
27
+ "k_proj",
28
  "o_proj",
29
+ "v_proj",
30
  "gate_proj",
31
+ "up_proj"
 
32
  ],
33
  "task_type": "CAUSAL_LM",
34
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:292b29d8f127c5a9d0879eb791fce27a6fc37739d31871b0b4f88f8da014dada
3
  size 327040
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:613dc418f7f74ba2a5e952a581791c7e59dfeaf2b044fc763fec17368fae2186
3
  size 327040
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d33f67516e28755d6adf60072f81b922c0dd586d6ad5dd4e7c3e1a92ce2273a0
3
  size 6776
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db308e89136268b0b7904fb33e4d5d741e59a3dc3d198b678512630250a15cf4
3
  size 6776