ZeL1k7 commited on
Commit
a186d65
1 Parent(s): 104665e

Remove unnecessary files (#1)

Browse files

- fix files (120e0ce8de11539c0413a66defd5d58527d83dc0)

added_tokens.json DELETED
@@ -1,24 +0,0 @@
1
- {
2
- "</tool_call>": 151658,
3
- "<tool_call>": 151657,
4
- "<|box_end|>": 151649,
5
- "<|box_start|>": 151648,
6
- "<|endoftext|>": 151643,
7
- "<|file_sep|>": 151664,
8
- "<|fim_middle|>": 151660,
9
- "<|fim_pad|>": 151662,
10
- "<|fim_prefix|>": 151659,
11
- "<|fim_suffix|>": 151661,
12
- "<|im_end|>": 151645,
13
- "<|im_start|>": 151644,
14
- "<|image_pad|>": 151655,
15
- "<|object_ref_end|>": 151647,
16
- "<|object_ref_start|>": 151646,
17
- "<|quad_end|>": 151651,
18
- "<|quad_start|>": 151650,
19
- "<|repo_name|>": 151663,
20
- "<|video_pad|>": 151656,
21
- "<|vision_end|>": 151653,
22
- "<|vision_pad|>": 151654,
23
- "<|vision_start|>": 151652
24
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "_name_or_path": "/from_s3/model",
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
 
1
  {
 
2
  "architectures": [
3
  "Qwen2ForCausalLM"
4
  ],
generation_config.json CHANGED
@@ -1,6 +1,14 @@
1
  {
2
- "_from_model_config": true,
3
  "bos_token_id": 151643,
4
- "eos_token_id": 151645,
 
 
 
 
 
 
 
 
 
5
  "transformers_version": "4.46.2"
6
- }
 
1
  {
 
2
  "bos_token_id": 151643,
3
+ "pad_token_id": 151643,
4
+ "do_sample": true,
5
+ "eos_token_id": [
6
+ 151645,
7
+ 151643
8
+ ],
9
+ "repetition_penalty": 1.05,
10
+ "temperature": 0.7,
11
+ "top_p": 0.8,
12
+ "top_k": 70,
13
  "transformers_version": "4.46.2"
14
+ }
special_tokens_map.json DELETED
@@ -1,31 +0,0 @@
1
- {
2
- "additional_special_tokens": [
3
- "<|im_start|>",
4
- "<|im_end|>",
5
- "<|object_ref_start|>",
6
- "<|object_ref_end|>",
7
- "<|box_start|>",
8
- "<|box_end|>",
9
- "<|quad_start|>",
10
- "<|quad_end|>",
11
- "<|vision_start|>",
12
- "<|vision_end|>",
13
- "<|vision_pad|>",
14
- "<|image_pad|>",
15
- "<|video_pad|>"
16
- ],
17
- "eos_token": {
18
- "content": "<|endoftext|>",
19
- "lstrip": false,
20
- "normalized": false,
21
- "rstrip": false,
22
- "single_word": false
23
- },
24
- "pad_token": {
25
- "content": "<|endoftext|>",
26
- "lstrip": false,
27
- "normalized": false,
28
- "rstrip": false,
29
- "single_word": false
30
- }
31
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
tokenizer_config.json CHANGED
@@ -197,7 +197,7 @@
197
  "bos_token": null,
198
  "chat_template": "{% for message in messages %}\n {{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' -}}\n{% endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\n' -}}\n{%- endif %}",
199
  "clean_up_tokenization_spaces": false,
200
- "eos_token": "<|endoftext|>",
201
  "errors": "replace",
202
  "model_max_length": 8192,
203
  "pad_token": "<|endoftext|>",
 
197
  "bos_token": null,
198
  "chat_template": "{% for message in messages %}\n {{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' -}}\n{% endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\n' -}}\n{%- endif %}",
199
  "clean_up_tokenization_spaces": false,
200
+ "eos_token": "<|im_end|>",
201
  "errors": "replace",
202
  "model_max_length": 8192,
203
  "pad_token": "<|endoftext|>",
trainer_state.json DELETED
The diff for this file is too large to render. See raw diff
 
training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:8bac43683e08677189ad196b17c90e3c619d4d4929e3f8f6483d571c8ebca8ed
3
- size 7608