Ed13210 commited on
Commit
4f762a3
β€’
1 Parent(s): 56f3ed4

Training in progress, epoch 1

Browse files
adapter_config.json CHANGED
@@ -1,11 +1,12 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "OuteAI/Lite-Mistral-150M-v2-Instruct",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
 
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
  "loftq_config": {},
@@ -19,14 +20,15 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "v_proj",
23
- "gate_proj",
24
  "o_proj",
25
- "down_proj",
26
  "up_proj",
27
  "k_proj",
28
- "q_proj"
 
 
 
29
  ],
30
  "task_type": "CAUSAL_LM",
 
31
  "use_rslora": false
32
  }
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "codellama/CodeLlama-7b-hf",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
9
+ "layer_replication": null,
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
  "loftq_config": {},
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
 
23
  "o_proj",
 
24
  "up_proj",
25
  "k_proj",
26
+ "q_proj",
27
+ "v_proj",
28
+ "gate_proj",
29
+ "down_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
+ "use_dora": false,
33
  "use_rslora": false
34
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9c35df868127e63aabdeb8c9cec03a586d22f0713039943a8fde3350007c90c8
3
- size 202142240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ecc7ce01456c382d2075719afa8f4532a9614a18e04c160ab934cf2ea2cfbaaa
3
+ size 1803907984
runs/Aug08_19-51-50_ialabps1/events.out.tfevents.1723139515.ialabps1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08c592f50f145cd769fa481848942f56408418bd85d5cf18b51bf2beb3665a7a
3
+ size 4148
special_tokens_map.json CHANGED
@@ -1,14 +1,14 @@
1
  {
2
  "additional_special_tokens": [
3
  {
4
- "content": "<|im_end|>",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
8
  "single_word": false
9
  },
10
  {
11
- "content": "<|im_start|>",
12
  "lstrip": false,
13
  "normalized": false,
14
  "rstrip": false,
 
1
  {
2
  "additional_special_tokens": [
3
  {
4
+ "content": "<|im_start|>",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
8
  "single_word": false
9
  },
10
  {
11
+ "content": "<|im_end|>",
12
  "lstrip": false,
13
  "normalized": false,
14
  "rstrip": false,
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,7 +1,6 @@
1
  {
2
  "add_bos_token": true,
3
  "add_eos_token": false,
4
- "add_prefix_space": true,
5
  "added_tokens_decoder": {
6
  "0": {
7
  "content": "<unk>",
@@ -27,37 +26,73 @@
27
  "single_word": false,
28
  "special": true
29
  },
30
- "32000": {
31
- "content": "<|im_end|>",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  "lstrip": false,
33
  "normalized": false,
34
  "rstrip": false,
35
  "single_word": false,
36
  "special": true
37
  },
38
- "32001": {
 
 
 
 
 
 
 
 
39
  "content": "<|im_start|>",
40
  "lstrip": false,
41
  "normalized": false,
42
  "rstrip": false,
43
  "single_word": false,
44
  "special": true
 
 
 
 
 
 
 
 
45
  }
46
  },
47
  "additional_special_tokens": [
48
- "<|im_end|>",
49
- "<|im_start|>"
50
  ],
51
  "bos_token": "<|im_start|>",
52
  "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
53
  "clean_up_tokenization_spaces": false,
54
  "eos_token": "<|im_end|>",
55
- "legacy": true,
 
 
 
56
  "model_max_length": 1000000000000000019884624838656,
57
  "pad_token": "<|im_end|>",
 
58
  "sp_model_kwargs": {},
59
- "spaces_between_special_tokens": false,
60
- "tokenizer_class": "LlamaTokenizer",
61
  "unk_token": "<unk>",
62
  "use_default_system_prompt": false
63
  }
 
1
  {
2
  "add_bos_token": true,
3
  "add_eos_token": false,
 
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
 
26
  "single_word": false,
27
  "special": true
28
  },
29
+ "32007": {
30
+ "content": "▁<PRE>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "32008": {
38
+ "content": "▁<SUF>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "32009": {
46
+ "content": "▁<MID>",
47
  "lstrip": false,
48
  "normalized": false,
49
  "rstrip": false,
50
  "single_word": false,
51
  "special": true
52
  },
53
+ "32010": {
54
+ "content": "▁<EOT>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "32016": {
62
  "content": "<|im_start|>",
63
  "lstrip": false,
64
  "normalized": false,
65
  "rstrip": false,
66
  "single_word": false,
67
  "special": true
68
+ },
69
+ "32017": {
70
+ "content": "<|im_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
  }
77
  },
78
  "additional_special_tokens": [
79
+ "<|im_start|>",
80
+ "<|im_end|>"
81
  ],
82
  "bos_token": "<|im_start|>",
83
  "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
84
  "clean_up_tokenization_spaces": false,
85
  "eos_token": "<|im_end|>",
86
+ "eot_token": "▁<EOT>",
87
+ "fill_token": "<FILL_ME>",
88
+ "legacy": null,
89
+ "middle_token": "▁<MID>",
90
  "model_max_length": 1000000000000000019884624838656,
91
  "pad_token": "<|im_end|>",
92
+ "prefix_token": "▁<PRE>",
93
  "sp_model_kwargs": {},
94
+ "suffix_token": "▁<SUF>",
95
+ "tokenizer_class": "CodeLlamaTokenizer",
96
  "unk_token": "<unk>",
97
  "use_default_system_prompt": false
98
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a889a1db60b38d996929a6f616680c802df5f330103cd21e28168383218b07f7
3
- size 4728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61b170dac20a370084a4768aa19d777a7337171f7df06d5f474dd9866814eb01
3
+ size 5240