techthiyanes commited on
Commit
88ab085
·
verified ·
1 Parent(s): 1f6a0cf

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. config.json +4 -8
  2. model.onnx +2 -2
  3. tokenizer.json +2 -16
  4. tokenizer_config.json +1 -8
config.json CHANGED
@@ -1,21 +1,17 @@
1
  {
2
- "_name_or_path": "sentence-transformers/all-MiniLM-L6-v2",
3
- "architectures": [
4
- "BertModel"
5
- ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "classifier_dropout": null,
8
- "gradient_checkpointing": false,
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.1,
11
- "hidden_size": 384,
12
  "initializer_range": 0.02,
13
- "intermediate_size": 1536,
14
  "layer_norm_eps": 1e-12,
15
  "max_position_embeddings": 512,
16
  "model_type": "bert",
17
  "num_attention_heads": 12,
18
- "num_hidden_layers": 6,
19
  "pad_token_id": 0,
20
  "position_embedding_type": "absolute",
21
  "transformers_version": "4.37.2",
 
1
  {
2
+ "_name_or_path": "deepset/sentence_bert",
 
 
 
3
  "attention_probs_dropout_prob": 0.1,
4
  "classifier_dropout": null,
 
5
  "hidden_act": "gelu",
6
  "hidden_dropout_prob": 0.1,
7
+ "hidden_size": 768,
8
  "initializer_range": 0.02,
9
+ "intermediate_size": 3072,
10
  "layer_norm_eps": 1e-12,
11
  "max_position_embeddings": 512,
12
  "model_type": "bert",
13
  "num_attention_heads": 12,
14
+ "num_hidden_layers": 12,
15
  "pad_token_id": 0,
16
  "position_embedding_type": "absolute",
17
  "transformers_version": "4.37.2",
model.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f263f01807eae07c66d69f3ff349e61b11533b59bbe14a813d35da52e33dc2bc
3
- size 90431458
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4c99a75433ec3aec852994b4b3c63ecf2ac2f422cc0089ecc89e2da69e06f97
3
+ size 435885596
tokenizer.json CHANGED
@@ -1,21 +1,7 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 128,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
- "padding": {
10
- "strategy": {
11
- "Fixed": 128
12
- },
13
- "direction": "Right",
14
- "pad_to_multiple_of": null,
15
- "pad_id": 0,
16
- "pad_type_id": 0,
17
- "pad_token": "[PAD]"
18
- },
19
  "added_tokens": [
20
  {
21
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
4
+ "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
tokenizer_config.json CHANGED
@@ -46,19 +46,12 @@
46
  "do_basic_tokenize": true,
47
  "do_lower_case": true,
48
  "mask_token": "[MASK]",
49
- "max_length": 128,
50
- "model_max_length": 512,
51
  "never_split": null,
52
- "pad_to_multiple_of": null,
53
  "pad_token": "[PAD]",
54
- "pad_token_type_id": 0,
55
- "padding_side": "right",
56
  "sep_token": "[SEP]",
57
- "stride": 0,
58
  "strip_accents": null,
59
  "tokenize_chinese_chars": true,
60
  "tokenizer_class": "BertTokenizer",
61
- "truncation_side": "right",
62
- "truncation_strategy": "longest_first",
63
  "unk_token": "[UNK]"
64
  }
 
46
  "do_basic_tokenize": true,
47
  "do_lower_case": true,
48
  "mask_token": "[MASK]",
49
+ "model_max_length": 1000000000000000019884624838656,
 
50
  "never_split": null,
 
51
  "pad_token": "[PAD]",
 
 
52
  "sep_token": "[SEP]",
 
53
  "strip_accents": null,
54
  "tokenize_chinese_chars": true,
55
  "tokenizer_class": "BertTokenizer",
 
 
56
  "unk_token": "[UNK]"
57
  }