Delete mixtral-8x7b
Browse files
mixtral-8x7b/adapter_config.json
DELETED
@@ -1,33 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"alpha_pattern": {},
|
3 |
-
"auto_mapping": null,
|
4 |
-
"base_model_name_or_path": "./curr-model",
|
5 |
-
"bias": "none",
|
6 |
-
"fan_in_fan_out": null,
|
7 |
-
"inference_mode": true,
|
8 |
-
"init_lora_weights": true,
|
9 |
-
"layers_pattern": null,
|
10 |
-
"layers_to_transform": null,
|
11 |
-
"loftq_config": {},
|
12 |
-
"lora_alpha": 16,
|
13 |
-
"lora_dropout": 0.01,
|
14 |
-
"megatron_config": null,
|
15 |
-
"megatron_core": "megatron.core",
|
16 |
-
"modules_to_save": null,
|
17 |
-
"peft_type": "LORA",
|
18 |
-
"r": 64,
|
19 |
-
"rank_pattern": {},
|
20 |
-
"revision": null,
|
21 |
-
"target_modules": [
|
22 |
-
"w3",
|
23 |
-
"w2",
|
24 |
-
"w1",
|
25 |
-
"gate",
|
26 |
-
"q_proj",
|
27 |
-
"o_proj",
|
28 |
-
"k_proj",
|
29 |
-
"v_proj"
|
30 |
-
],
|
31 |
-
"task_type": "CAUSAL_LM",
|
32 |
-
"use_rslora": false
|
33 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
mixtral-8x7b/adapter_model.bin
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:9efc57848917f36627af0c935a77bfc05050250fb760503339907daa5622ec5f
|
3 |
-
size 3876297957
|
|
|
|
|
|
|
|
mixtral-8x7b/trainer_state.json
DELETED
The diff for this file is too large to render.
See raw diff
|
|