palicoqiqi
commited on
Commit
•
ed7ca82
1
Parent(s):
266934b
Training in progress, epoch 0
Browse files- adapter_config.json +33 -33
- adapter_model.safetensors +1 -1
- runs/Dec02_23-59-20_mmg2/events.out.tfevents.1733209165.mmg2 +3 -0
- runs/Dec03_00-18-58_mmg2/events.out.tfevents.1733210338.mmg2 +3 -0
- runs/Dec03_00-26-58_mmg2/events.out.tfevents.1733210819.mmg2 +3 -0
- runs/Dec03_08-11-49_mmg2/events.out.tfevents.1733238709.mmg2 +3 -0
- training_args.bin +2 -2
adapter_config.json
CHANGED
@@ -1,34 +1,34 @@
|
|
1 |
-
{
|
2 |
-
"alpha_pattern": {},
|
3 |
-
"auto_mapping": null,
|
4 |
-
"base_model_name_or_path": "google/paligemma-3b-ft-ocrvqa-448",
|
5 |
-
"bias": "none",
|
6 |
-
"fan_in_fan_out": false,
|
7 |
-
"inference_mode": true,
|
8 |
-
"init_lora_weights": true,
|
9 |
-
"layer_replication": null,
|
10 |
-
"layers_pattern": null,
|
11 |
-
"layers_to_transform": null,
|
12 |
-
"loftq_config": {},
|
13 |
-
"lora_alpha": 8,
|
14 |
-
"lora_dropout": 0.0,
|
15 |
-
"megatron_config": null,
|
16 |
-
"megatron_core": "megatron.core",
|
17 |
-
"modules_to_save": null,
|
18 |
-
"peft_type": "LORA",
|
19 |
-
"r": 8,
|
20 |
-
"rank_pattern": {},
|
21 |
-
"revision": null,
|
22 |
-
"target_modules": [
|
23 |
-
"o_proj",
|
24 |
-
"
|
25 |
-
"
|
26 |
-
"down_proj",
|
27 |
-
"k_proj",
|
28 |
-
"
|
29 |
-
"
|
30 |
-
],
|
31 |
-
"task_type": "CAUSAL_LM",
|
32 |
-
"use_dora": false,
|
33 |
-
"use_rslora": false
|
34 |
}
|
|
|
1 |
+
{
|
2 |
+
"alpha_pattern": {},
|
3 |
+
"auto_mapping": null,
|
4 |
+
"base_model_name_or_path": "google/paligemma-3b-ft-ocrvqa-448",
|
5 |
+
"bias": "none",
|
6 |
+
"fan_in_fan_out": false,
|
7 |
+
"inference_mode": true,
|
8 |
+
"init_lora_weights": true,
|
9 |
+
"layer_replication": null,
|
10 |
+
"layers_pattern": null,
|
11 |
+
"layers_to_transform": null,
|
12 |
+
"loftq_config": {},
|
13 |
+
"lora_alpha": 8,
|
14 |
+
"lora_dropout": 0.0,
|
15 |
+
"megatron_config": null,
|
16 |
+
"megatron_core": "megatron.core",
|
17 |
+
"modules_to_save": null,
|
18 |
+
"peft_type": "LORA",
|
19 |
+
"r": 8,
|
20 |
+
"rank_pattern": {},
|
21 |
+
"revision": null,
|
22 |
+
"target_modules": [
|
23 |
+
"o_proj",
|
24 |
+
"q_proj",
|
25 |
+
"up_proj",
|
26 |
+
"down_proj",
|
27 |
+
"k_proj",
|
28 |
+
"v_proj",
|
29 |
+
"gate_proj"
|
30 |
+
],
|
31 |
+
"task_type": "CAUSAL_LM",
|
32 |
+
"use_dora": false,
|
33 |
+
"use_rslora": false
|
34 |
}
|
adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 45258384
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5bb18727887b2bb2ef69c876cb66c0451b225578f163badf69d3c7e8a2f9665f
|
3 |
size 45258384
|
runs/Dec02_23-59-20_mmg2/events.out.tfevents.1733209165.mmg2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:425f94c49653231cf3257a907b307770efa363f5efd4a42b3363eb302ffb2cdf
|
3 |
+
size 5883
|
runs/Dec03_00-18-58_mmg2/events.out.tfevents.1733210338.mmg2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7270a409b59a6349533bc815edc33f83ea29dc3c8d85a70698edf6d479d92f21
|
3 |
+
size 5883
|
runs/Dec03_00-26-58_mmg2/events.out.tfevents.1733210819.mmg2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5bce679313380ab2320d75e556a5d1129d70954577bcdff6e151abab6f6a7059
|
3 |
+
size 5883
|
runs/Dec03_08-11-49_mmg2/events.out.tfevents.1733238709.mmg2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6435c3df670cced5d9f80753c1a63c9b2c15816f9194ed5e3a257aca0dd50b22
|
3 |
+
size 10103
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:610951118a90a7a51c14582e09b3927b8cddc087cd2b1c4353476c303cce30e7
|
3 |
+
size 5304
|