cecchiara commited on
Commit
8ef65c5
1 Parent(s): 2893ee5

Delete bert-finetuned-squad-accelerate

Browse files
bert-finetuned-squad-accelerate/config.json DELETED
@@ -1,28 +0,0 @@
1
- {
2
- "_name_or_path": "nlpaueb/legal-bert-base-uncased",
3
- "architectures": [
4
- "BertForQuestionAnswering"
5
- ],
6
- "attention_probs_dropout_prob": 0.1,
7
- "bos_token_id": 0,
8
- "classifier_dropout": null,
9
- "eos_token_ids": 0,
10
- "hidden_act": "gelu",
11
- "hidden_dropout_prob": 0.1,
12
- "hidden_size": 768,
13
- "initializer_range": 0.02,
14
- "intermediate_size": 3072,
15
- "layer_norm_eps": 1e-12,
16
- "max_position_embeddings": 512,
17
- "model_type": "bert",
18
- "num_attention_heads": 12,
19
- "num_hidden_layers": 12,
20
- "output_past": true,
21
- "pad_token_id": 0,
22
- "position_embedding_type": "absolute",
23
- "torch_dtype": "float32",
24
- "transformers_version": "4.27.3",
25
- "type_vocab_size": 2,
26
- "use_cache": true,
27
- "vocab_size": 30522
28
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bert-finetuned-squad-accelerate/pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:c9a9d2e0568865a31dc402cbf32af5c7dbb1ee45bf1dd69a16156f575e31511e
3
- size 435642093
 
 
 
 
bert-finetuned-squad-accelerate/special_tokens_map.json DELETED
@@ -1,7 +0,0 @@
1
- {
2
- "cls_token": "[CLS]",
3
- "mask_token": "[MASK]",
4
- "pad_token": "[PAD]",
5
- "sep_token": "[SEP]",
6
- "unk_token": "[UNK]"
7
- }
 
 
 
 
 
 
 
 
bert-finetuned-squad-accelerate/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
bert-finetuned-squad-accelerate/tokenizer_config.json DELETED
@@ -1,15 +0,0 @@
1
- {
2
- "cls_token": "[CLS]",
3
- "do_basic_tokenize": true,
4
- "do_lower_case": true,
5
- "mask_token": "[MASK]",
6
- "model_max_length": 512,
7
- "never_split": null,
8
- "pad_token": "[PAD]",
9
- "sep_token": "[SEP]",
10
- "special_tokens_map_file": "C:\\Users\\chiar/.cache\\huggingface\\hub\\models--nlpaueb--legal-bert-base-uncased\\snapshots\\15b570cbf88259610b082a167dacc190124f60f6\\special_tokens_map.json",
11
- "strip_accents": null,
12
- "tokenize_chinese_chars": true,
13
- "tokenizer_class": "BertTokenizer",
14
- "unk_token": "[UNK]"
15
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bert-finetuned-squad-accelerate/vocab.txt DELETED
The diff for this file is too large to render. See raw diff