Training in progress, step 25
Browse files
adapter_config.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
{
|
2 |
"alpha_pattern": {},
|
3 |
"auto_mapping": null,
|
4 |
-
"base_model_name_or_path": "
|
5 |
"bias": "none",
|
6 |
"fan_in_fan_out": false,
|
7 |
"inference_mode": true,
|
@@ -16,13 +16,13 @@
|
|
16 |
"rank_pattern": {},
|
17 |
"revision": null,
|
18 |
"target_modules": [
|
|
|
|
|
19 |
"o_proj",
|
20 |
"q_proj",
|
21 |
-
"down_proj",
|
22 |
-
"v_proj",
|
23 |
-
"gate_proj",
|
24 |
"k_proj",
|
25 |
-
"
|
|
|
26 |
],
|
27 |
"task_type": "CAUSAL_LM"
|
28 |
}
|
|
|
1 |
{
|
2 |
"alpha_pattern": {},
|
3 |
"auto_mapping": null,
|
4 |
+
"base_model_name_or_path": "alexsherstinsky/Mistral-7B-v0.1-sharded",
|
5 |
"bias": "none",
|
6 |
"fan_in_fan_out": false,
|
7 |
"inference_mode": true,
|
|
|
16 |
"rank_pattern": {},
|
17 |
"revision": null,
|
18 |
"target_modules": [
|
19 |
+
"v_proj",
|
20 |
+
"up_proj",
|
21 |
"o_proj",
|
22 |
"q_proj",
|
|
|
|
|
|
|
23 |
"k_proj",
|
24 |
+
"down_proj",
|
25 |
+
"gate_proj"
|
26 |
],
|
27 |
"task_type": "CAUSAL_LM"
|
28 |
}
|
adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:87e72f051c9990ed66920d67ffad6a5dc9574b7d6c2caf2ebb83db07d22983eb
|
3 |
+
size 42002136
|
runs/Dec01_12-06-48_18e317ed094a/events.out.tfevents.1701432678.18e317ed094a.7554.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e4df8976cd60ce330cd84860032860d838cb68d93361cb549e50b983227d2f70
|
3 |
+
size 9696
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0aca71db67798b119d2c3693b55cd6a103898a7e935aaf7759e9079b3f6bc999
|
3 |
+
size 6264
|
training_config.json
CHANGED
@@ -9,7 +9,7 @@
|
|
9 |
"force_fp16": false,
|
10 |
"from_gptq": false,
|
11 |
"huggingface_hub_token": null,
|
12 |
-
"deepspeed_stage":
|
13 |
"deepspeed_config_path": null,
|
14 |
"fsdp_strategy": "",
|
15 |
"fsdp_offload": true,
|
@@ -38,7 +38,7 @@
|
|
38 |
"tokenizer_padding_side": null,
|
39 |
"collator_key": "lm",
|
40 |
"max_length": 2048,
|
41 |
-
"model_name_or_path": "
|
42 |
"push_to_hub_bos_add_bos_token": false,
|
43 |
"use_flash_attention_2": false,
|
44 |
"trust_remote_code": false,
|
|
|
9 |
"force_fp16": false,
|
10 |
"from_gptq": false,
|
11 |
"huggingface_hub_token": null,
|
12 |
+
"deepspeed_stage": 2,
|
13 |
"deepspeed_config_path": null,
|
14 |
"fsdp_strategy": "",
|
15 |
"fsdp_offload": true,
|
|
|
38 |
"tokenizer_padding_side": null,
|
39 |
"collator_key": "lm",
|
40 |
"max_length": 2048,
|
41 |
+
"model_name_or_path": "alexsherstinsky/Mistral-7B-v0.1-sharded",
|
42 |
"push_to_hub_bos_add_bos_token": false,
|
43 |
"use_flash_attention_2": false,
|
44 |
"trust_remote_code": false,
|