Ed13210 commited on
Commit
51a0046
1 Parent(s): c5c676c

Training in progress, epoch 3

Browse files
adapter_config.json CHANGED
@@ -20,13 +20,13 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "up_proj",
24
- "q_proj",
25
- "down_proj",
26
  "v_proj",
27
- "o_proj",
28
  "gate_proj",
29
- "k_proj"
 
 
 
 
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
 
 
23
  "v_proj",
 
24
  "gate_proj",
25
+ "up_proj",
26
+ "o_proj",
27
+ "k_proj",
28
+ "down_proj",
29
+ "q_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a98ce63b60eb839e68980447252126a9ea97f7b98446a960f147bce7a49cc7d9
3
  size 1803907984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:163148fb2b06f34bb72966f380ab20b7319418d0491aca23c3b4bbb4274a8c3b
3
  size 1803907984
runs/Aug14_23-00-46_ialabps1/events.out.tfevents.1723669251.ialabps1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a08b8fd5dcaccd6f77f433ade355eb84e0917e0568225e0679293c7980a2d426
3
+ size 5800
runs/Aug15_00-27-24_ialabps1/events.out.tfevents.1723674450.ialabps1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c53b9df6a1e39ac2cdf91a358bfa323821e817a380fbddda4931190c8066008c
3
+ size 5452
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:21a19b614a18a526125fc4f6782c5dfdf9bbee1419b2a4f63bd0ebba29123543
3
  size 5240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92106cee763fc503a9a31ca143112bc438ec4db14db9c114fad9a9025a237f68
3
  size 5240