Croneker commited on
Commit
188a249
·
1 Parent(s): 40fbe12

First model version

Browse files
added_tokens.json DELETED
@@ -1,3 +0,0 @@
1
- {
2
- "<unk>NOTUSED": 32005
3
- }
 
 
 
 
config.json DELETED
@@ -1,28 +0,0 @@
1
- {
2
- "_name_or_path": "camembert-base",
3
- "architectures": [
4
- "CamembertForMaskedLM"
5
- ],
6
- "attention_probs_dropout_prob": 0.1,
7
- "bos_token_id": 5,
8
- "classifier_dropout": null,
9
- "eos_token_id": 6,
10
- "hidden_act": "gelu",
11
- "hidden_dropout_prob": 0.1,
12
- "hidden_size": 768,
13
- "initializer_range": 0.02,
14
- "intermediate_size": 3072,
15
- "layer_norm_eps": 1e-05,
16
- "max_position_embeddings": 514,
17
- "model_type": "camembert",
18
- "num_attention_heads": 12,
19
- "num_hidden_layers": 12,
20
- "output_past": true,
21
- "pad_token_id": 1,
22
- "position_embedding_type": "absolute",
23
- "torch_dtype": "float32",
24
- "transformers_version": "4.46.3",
25
- "type_vocab_size": 1,
26
- "use_cache": true,
27
- "vocab_size": 32005
28
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
model.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:7ec33e2edeca9c3e2d59609b5b074bfa7d2f8de7757b6de945ea3824c31778c0
3
- size 442646188
 
 
 
 
sentencepiece.bpe.model DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:988bc5a00281c6d210a5d34bd143d0363741a432fefe741bf71e61b1869d4314
3
- size 810912
 
 
 
 
special_tokens_map.json DELETED
@@ -1,20 +0,0 @@
1
- {
2
- "additional_special_tokens": [
3
- "<s>NOTUSED",
4
- "</s>NOTUSED",
5
- "<unk>NOTUSED"
6
- ],
7
- "bos_token": "<s>",
8
- "cls_token": "<s>",
9
- "eos_token": "</s>",
10
- "mask_token": {
11
- "content": "<mask>",
12
- "lstrip": true,
13
- "normalized": false,
14
- "rstrip": false,
15
- "single_word": false
16
- },
17
- "pad_token": "<pad>",
18
- "sep_token": "</s>",
19
- "unk_token": "<unk>"
20
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json DELETED
@@ -1,83 +0,0 @@
1
- {
2
- "added_tokens_decoder": {
3
- "0": {
4
- "content": "<s>NOTUSED",
5
- "lstrip": false,
6
- "normalized": false,
7
- "rstrip": false,
8
- "single_word": false,
9
- "special": true
10
- },
11
- "1": {
12
- "content": "<pad>",
13
- "lstrip": false,
14
- "normalized": false,
15
- "rstrip": false,
16
- "single_word": false,
17
- "special": true
18
- },
19
- "2": {
20
- "content": "</s>NOTUSED",
21
- "lstrip": false,
22
- "normalized": false,
23
- "rstrip": false,
24
- "single_word": false,
25
- "special": true
26
- },
27
- "4": {
28
- "content": "<unk>",
29
- "lstrip": false,
30
- "normalized": false,
31
- "rstrip": false,
32
- "single_word": false,
33
- "special": true
34
- },
35
- "5": {
36
- "content": "<s>",
37
- "lstrip": false,
38
- "normalized": false,
39
- "rstrip": false,
40
- "single_word": false,
41
- "special": true
42
- },
43
- "6": {
44
- "content": "</s>",
45
- "lstrip": false,
46
- "normalized": false,
47
- "rstrip": false,
48
- "single_word": false,
49
- "special": true
50
- },
51
- "32004": {
52
- "content": "<mask>",
53
- "lstrip": true,
54
- "normalized": false,
55
- "rstrip": false,
56
- "single_word": false,
57
- "special": true
58
- },
59
- "32005": {
60
- "content": "<unk>NOTUSED",
61
- "lstrip": false,
62
- "normalized": false,
63
- "rstrip": false,
64
- "single_word": false,
65
- "special": true
66
- }
67
- },
68
- "additional_special_tokens": [
69
- "<s>NOTUSED",
70
- "</s>NOTUSED",
71
- "<unk>NOTUSED"
72
- ],
73
- "bos_token": "<s>",
74
- "clean_up_tokenization_spaces": false,
75
- "cls_token": "<s>",
76
- "eos_token": "</s>",
77
- "mask_token": "<mask>",
78
- "model_max_length": 512,
79
- "pad_token": "<pad>",
80
- "sep_token": "</s>",
81
- "tokenizer_class": "CamembertTokenizer",
82
- "unk_token": "<unk>"
83
- }