Safetensors
mistral
intervitens commited on
Commit
dce70fd
β€’
1 Parent(s): 526778e

Upload folder using huggingface_hub

Browse files
config.json CHANGED
@@ -4,7 +4,7 @@
4
  ],
5
  "attention_dropout": 0.0,
6
  "bos_token_id": 1,
7
- "eos_token_id": 2,
8
  "hidden_act": "silu",
9
  "hidden_size": 12288,
10
  "initializer_range": 0.02,
 
4
  ],
5
  "attention_dropout": 0.0,
6
  "bos_token_id": 1,
7
+ "eos_token_id": 4,
8
  "hidden_act": "silu",
9
  "hidden_size": 12288,
10
  "initializer_range": 0.02,
generation_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
- "eos_token_id": 2,
5
  "transformers_version": "4.42.3"
6
  }
 
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
+ "eos_token_id": 4,
5
  "transformers_version": "4.42.3"
6
  }
tokenizer.json CHANGED
@@ -32,7 +32,7 @@
32
  },
33
  {
34
  "id": 3,
35
- "content": "[INST]",
36
  "single_word": false,
37
  "lstrip": false,
38
  "rstrip": false,
@@ -41,7 +41,7 @@
41
  },
42
  {
43
  "id": 4,
44
- "content": "[/INST]",
45
  "single_word": false,
46
  "lstrip": false,
47
  "rstrip": false,
@@ -98790,4 +98790,4 @@
98790
  "▁ ▁▁▁▁▁▁▁▁▁▁"
98791
  ]
98792
  }
98793
- }
 
32
  },
33
  {
34
  "id": 3,
35
+ "content": "<|im_start|>",
36
  "single_word": false,
37
  "lstrip": false,
38
  "rstrip": false,
 
41
  },
42
  {
43
  "id": 4,
44
+ "content": "<|im_end|>",
45
  "single_word": false,
46
  "lstrip": false,
47
  "rstrip": false,
 
98790
  "▁ ▁▁▁▁▁▁▁▁▁▁"
98791
  ]
98792
  }
98793
+ }
tokenizer.model CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:59f95e28944c062244741268596badc900df86c7f5ded05088d2da22a7379e06
3
- size 587583
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22782e15722140ad83058a05ca45b53b0a4bf7a473a5828a48399eb71e03c610
3
+ size 587592
tokenizer_config.json CHANGED
@@ -28,7 +28,7 @@
28
  "special": true
29
  },
30
  "3": {
31
- "content": "[INST]",
32
  "lstrip": false,
33
  "normalized": false,
34
  "rstrip": false,
@@ -36,7 +36,7 @@
36
  "special": false
37
  },
38
  "4": {
39
- "content": "[/INST]",
40
  "lstrip": false,
41
  "normalized": false,
42
  "rstrip": false,
@@ -6174,7 +6174,7 @@
6174
  },
6175
  "bos_token": "<s>",
6176
  "clean_up_tokenization_spaces": false,
6177
- "eos_token": "</s>",
6178
  "legacy": true,
6179
  "model_max_length": 1000000000000000019884624838656,
6180
  "pad_token": null,
 
28
  "special": true
29
  },
30
  "3": {
31
+ "content": "<|im_start|>",
32
  "lstrip": false,
33
  "normalized": false,
34
  "rstrip": false,
 
36
  "special": false
37
  },
38
  "4": {
39
+ "content": "<|im_end|>",
40
  "lstrip": false,
41
  "normalized": false,
42
  "rstrip": false,
 
6174
  },
6175
  "bos_token": "<s>",
6176
  "clean_up_tokenization_spaces": false,
6177
+ "eos_token": "<|im_end|>",
6178
  "legacy": true,
6179
  "model_max_length": 1000000000000000019884624838656,
6180
  "pad_token": null,