dx2102 commited on
Commit
c91847d
·
verified ·
1 Parent(s): 940857a

Upload entire folder

Browse files
config.json CHANGED
@@ -29,8 +29,8 @@
29
  },
30
  "rope_theta": 500000.0,
31
  "tie_word_embeddings": true,
32
- "torch_dtype": "bfloat16",
33
- "transformers_version": "4.46.1",
34
- "use_cache": false,
35
  "vocab_size": 128256
36
  }
 
29
  },
30
  "rope_theta": 500000.0,
31
  "tie_word_embeddings": true,
32
+ "torch_dtype": "float32",
33
+ "transformers_version": "4.47.1",
34
+ "use_cache": true,
35
  "vocab_size": 128256
36
  }
generation_config.json CHANGED
@@ -5,5 +5,5 @@
5
  "eos_token_id": 128001,
6
  "temperature": 0.6,
7
  "top_p": 0.9,
8
- "transformers_version": "4.46.1"
9
  }
 
5
  "eos_token_id": 128001,
6
  "temperature": 0.6,
7
  "top_p": 0.9,
8
+ "transformers_version": "4.47.1"
9
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:94123f2e300fd3ba84a99defb4163b50255615507ddef4bd01a0bc9b397389fa
3
- size 2996982344
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5360b42ff161d858c4f92fb551861b980e44de5bb657f87c21e0641d8ed2e5be
3
+ size 4943274328
special_tokens_map.json CHANGED
@@ -1,20 +1,4 @@
1
  {
2
- "additional_special_tokens": [
3
- {
4
- "content": "<|eot_id|>",
5
- "lstrip": false,
6
- "normalized": false,
7
- "rstrip": false,
8
- "single_word": false
9
- },
10
- {
11
- "content": "<|eom_id|>",
12
- "lstrip": false,
13
- "normalized": false,
14
- "rstrip": false,
15
- "single_word": false
16
- }
17
- ],
18
  "bos_token": {
19
  "content": "<|begin_of_text|>",
20
  "lstrip": false,
 
1
  {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  "bos_token": {
3
  "content": "<|begin_of_text|>",
4
  "lstrip": false,
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
3
- size 17209920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:13d54255bb1f0d092f8b5299ce09811db0a0e88a433633d48628d8ff9a875eff
3
+ size 17210019
tokenizer_config.json CHANGED
@@ -2049,21 +2049,15 @@
2049
  "special": true
2050
  }
2051
  },
2052
- "additional_special_tokens": [
2053
- "<|eot_id|>",
2054
- "<|eom_id|>"
2055
- ],
2056
  "bos_token": "<|begin_of_text|>",
2057
- "chat_template": "{{ '<|begin_of_text|>' }}{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ '<|start_header_id|>system<|end_header_id|>\n\n' + system_message + '<|eot_id|>' }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ '<|start_header_id|>user<|end_header_id|>\n\n' + content + '<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n' }}{% elif message['role'] == 'assistant' %}{{ content + '<|end_of_text|>' }}{% endif %}{% endfor %}",
2058
  "clean_up_tokenization_spaces": true,
2059
  "eos_token": "<|end_of_text|>",
 
2060
  "model_input_names": [
2061
  "input_ids",
2062
  "attention_mask"
2063
  ],
2064
  "model_max_length": 131072,
2065
  "pad_token": "<|end_of_text|>",
2066
- "padding_side": "right",
2067
- "split_special_tokens": false,
2068
  "tokenizer_class": "PreTrainedTokenizerFast"
2069
  }
 
2049
  "special": true
2050
  }
2051
  },
 
 
 
 
2052
  "bos_token": "<|begin_of_text|>",
 
2053
  "clean_up_tokenization_spaces": true,
2054
  "eos_token": "<|end_of_text|>",
2055
+ "extra_special_tokens": {},
2056
  "model_input_names": [
2057
  "input_ids",
2058
  "attention_mask"
2059
  ],
2060
  "model_max_length": 131072,
2061
  "pad_token": "<|end_of_text|>",
 
 
2062
  "tokenizer_class": "PreTrainedTokenizerFast"
2063
  }