pcuenq HF staff Molbap HF staff commited on
Commit
8a9d74e
1 Parent(s): 6259495

Upload processor (#4)

Browse files

- Upload processor (5451064815e2cd3556354241f2f4a45c7c658cf0)


Co-authored-by: Pablo Montalvo <Molbap@users.noreply.huggingface.co>

preprocessor_config.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "image_processor_type": "FuyuImageProcessor",
3
+ "padding_mode": "constant",
4
+ "padding_value": 1.0,
5
+ "processor_class": "FuyuProcessor",
6
+ "target_height": 1080,
7
+ "target_width": 1920
8
+ }
special_tokens_map.json CHANGED
@@ -1,4 +1,8 @@
1
  {
 
 
 
 
2
  "bos_token": "|ENDOFTEXT|",
3
  "eos_token": "|ENDOFTEXT|",
4
  "unk_token": "<unk>"
 
1
  {
2
+ "additional_special_tokens": [
3
+ "<unk>",
4
+ "|ENDOFTEXT|"
5
+ ],
6
  "bos_token": "|ENDOFTEXT|",
7
  "eos_token": "|ENDOFTEXT|",
8
  "unk_token": "<unk>"
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5c624be2d9dddae3e8c82cda48fe5c737ec6710225e953ef851a4023f2d4796a
3
- size 16464051
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20119416a890c3fefc86210999fb8189dde194c12f7f861dbb3bc7a922baaabb
3
+ size 16454569
tokenizer_config.json CHANGED
@@ -1,11 +1,9 @@
1
  {
2
- "add_bos_token": true,
3
- "add_eos_token": false,
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
7
  "lstrip": false,
8
- "normalized": true,
9
  "rstrip": false,
10
  "single_word": false,
11
  "special": true
@@ -13,19 +11,23 @@
13
  "71013": {
14
  "content": "|ENDOFTEXT|",
15
  "lstrip": false,
16
- "normalized": true,
17
  "rstrip": false,
18
  "single_word": false,
19
  "special": true
20
  }
21
  },
22
- "additional_special_tokens": [],
 
 
 
23
  "bos_token": "|ENDOFTEXT|",
24
  "clean_up_tokenization_spaces": false,
25
  "eos_token": "|ENDOFTEXT|",
26
  "legacy": true,
27
  "model_max_length": 1000000000000000019884624838656,
28
  "pad_token": null,
 
29
  "sp_model_kwargs": {},
30
  "spaces_between_special_tokens": false,
31
  "tokenizer_class": "LlamaTokenizer",
 
1
  {
 
 
2
  "added_tokens_decoder": {
3
  "0": {
4
  "content": "<unk>",
5
  "lstrip": false,
6
+ "normalized": false,
7
  "rstrip": false,
8
  "single_word": false,
9
  "special": true
 
11
  "71013": {
12
  "content": "|ENDOFTEXT|",
13
  "lstrip": false,
14
+ "normalized": false,
15
  "rstrip": false,
16
  "single_word": false,
17
  "special": true
18
  }
19
  },
20
+ "additional_special_tokens": [
21
+ "<unk>",
22
+ "|ENDOFTEXT|"
23
+ ],
24
  "bos_token": "|ENDOFTEXT|",
25
  "clean_up_tokenization_spaces": false,
26
  "eos_token": "|ENDOFTEXT|",
27
  "legacy": true,
28
  "model_max_length": 1000000000000000019884624838656,
29
  "pad_token": null,
30
+ "processor_class": "FuyuProcessor",
31
  "sp_model_kwargs": {},
32
  "spaces_between_special_tokens": false,
33
  "tokenizer_class": "LlamaTokenizer",