i4never commited on
Commit
9c56f82
1 Parent(s): e1069fb

upload tokenizer

Browse files
Files changed (3) hide show
  1. added_tokens.json +3 -3
  2. tokenizer.json +3 -3
  3. tokenizer_config.json +4 -2
added_tokens.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "\n\n### Instruction:\n": 60513,
3
- "\n\n### Response:\n": 60514,
4
- "<pad>": 60512
5
  }
 
1
  {
2
+ "\n\n### Instruction:\n": 60512,
3
+ "\n\n### Response:\n": 60513,
4
+ "<pad>": 60514
5
  }
tokenizer.json CHANGED
@@ -32,7 +32,7 @@
32
  },
33
  {
34
  "id": 60512,
35
- "content": "<pad>",
36
  "single_word": false,
37
  "lstrip": false,
38
  "rstrip": false,
@@ -41,7 +41,7 @@
41
  },
42
  {
43
  "id": 60513,
44
- "content": "\n\n### Instruction:\n",
45
  "single_word": false,
46
  "lstrip": false,
47
  "rstrip": false,
@@ -50,7 +50,7 @@
50
  },
51
  {
52
  "id": 60514,
53
- "content": "\n\n### Response:\n",
54
  "single_word": false,
55
  "lstrip": false,
56
  "rstrip": false,
 
32
  },
33
  {
34
  "id": 60512,
35
+ "content": "\n\n### Instruction:\n",
36
  "single_word": false,
37
  "lstrip": false,
38
  "rstrip": false,
 
41
  },
42
  {
43
  "id": 60513,
44
+ "content": "\n\n### Response:\n",
45
  "single_word": false,
46
  "lstrip": false,
47
  "rstrip": false,
 
50
  },
51
  {
52
  "id": 60514,
53
+ "content": "<pad>",
54
  "single_word": false,
55
  "lstrip": false,
56
  "rstrip": false,
tokenizer_config.json CHANGED
@@ -10,6 +10,7 @@
10
  "single_word": false
11
  },
12
  "clean_up_tokenization_spaces": false,
 
13
  "eos_token": {
14
  "__type": "AddedToken",
15
  "content": "</s>",
@@ -19,7 +20,7 @@
19
  "single_word": false
20
  },
21
  "legacy": true,
22
- "model_max_length": 1000000000000000019884624838656,
23
  "pad_token": null,
24
  "sp_model_kwargs": {},
25
  "tokenizer_class": "LlamaTokenizer",
@@ -30,5 +31,6 @@
30
  "normalized": true,
31
  "rstrip": false,
32
  "single_word": false
33
- }
 
34
  }
 
10
  "single_word": false
11
  },
12
  "clean_up_tokenization_spaces": false,
13
+ "device_map": "cpu",
14
  "eos_token": {
15
  "__type": "AddedToken",
16
  "content": "</s>",
 
20
  "single_word": false
21
  },
22
  "legacy": true,
23
+ "model_max_length": 1024,
24
  "pad_token": null,
25
  "sp_model_kwargs": {},
26
  "tokenizer_class": "LlamaTokenizer",
 
31
  "normalized": true,
32
  "rstrip": false,
33
  "single_word": false
34
+ },
35
+ "use_fast": true
36
  }