Den4ikAI commited on
Commit
169eec3
1 Parent(s): 3d707e9

Upload 6 files

Browse files
nn/nn_stress_usage_predictor/config.json CHANGED
@@ -1,32 +1,36 @@
1
  {
2
- "_name_or_path": "5k/",
3
- "activation": "gelu",
4
  "architectures": [
5
- "DistilBertForTokenClassification"
6
  ],
7
- "attention_dropout": 0.1,
8
- "dim": 264,
9
- "dropout": 0.1,
10
- "hidden_dim": 792,
 
 
 
11
  "id2label": {
12
  "0": "NO_STRESS",
13
  "1": "PUNCT",
14
  "2": "STRESS"
15
  },
16
  "initializer_range": 0.02,
 
17
  "label2id": {
18
  "NO_STRESS": 0,
19
  "PUNCT": 1,
20
  "STRESS": 2
21
  },
22
- "max_position_embeddings": 512,
23
- "model_type": "distilbert",
24
- "n_heads": 12,
25
- "n_layers": 3,
 
26
  "pad_token_id": 0,
27
- "qa_dropout": 0.1,
28
- "seq_classif_dropout": 0.2,
29
- "sinusoidal_pos_embds": false,
30
- "transformers_version": "4.29.2",
31
- "vocab_size": 5031
32
  }
 
1
  {
2
+ "_name_or_path": "tiny/checkpoint-59441/",
 
3
  "architectures": [
4
+ "BertForTokenClassification"
5
  ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "emb_size": 312,
9
+ "gradient_checkpointing": false,
10
+ "hidden_act": "gelu",
11
+ "hidden_dropout_prob": 0.1,
12
+ "hidden_size": 312,
13
  "id2label": {
14
  "0": "NO_STRESS",
15
  "1": "PUNCT",
16
  "2": "STRESS"
17
  },
18
  "initializer_range": 0.02,
19
+ "intermediate_size": 600,
20
  "label2id": {
21
  "NO_STRESS": 0,
22
  "PUNCT": 1,
23
  "STRESS": 2
24
  },
25
+ "layer_norm_eps": 1e-12,
26
+ "max_position_embeddings": 2048,
27
+ "model_type": "bert",
28
+ "num_attention_heads": 12,
29
+ "num_hidden_layers": 3,
30
  "pad_token_id": 0,
31
+ "position_embedding_type": "absolute",
32
+ "transformers_version": "4.28.1",
33
+ "type_vocab_size": 2,
34
+ "use_cache": true,
35
+ "vocab_size": 83828
36
  }
nn/nn_stress_usage_predictor/model.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1d77b4aa95e2589ee4470e3b9e08e2ddaf6d4a7e90e4e28194dd1731b93cf549
3
- size 14332169
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d547500637b4ddfec8880ed6d1405fd50ee9d3f0131ef8a2a69dcf961dbefeb
3
+ size 116473561
nn/nn_stress_usage_predictor/tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
nn/nn_stress_usage_predictor/tokenizer_config.json CHANGED
@@ -4,12 +4,12 @@
4
  "do_basic_tokenize": true,
5
  "do_lower_case": false,
6
  "mask_token": "[MASK]",
7
- "model_max_length": 1000000000000000019884624838656,
8
  "never_split": null,
9
  "pad_token": "[PAD]",
10
  "sep_token": "[SEP]",
11
  "strip_accents": null,
12
  "tokenize_chinese_chars": true,
13
- "tokenizer_class": "DistilBertTokenizer",
14
  "unk_token": "[UNK]"
15
  }
 
4
  "do_basic_tokenize": true,
5
  "do_lower_case": false,
6
  "mask_token": "[MASK]",
7
+ "model_max_length": 2048,
8
  "never_split": null,
9
  "pad_token": "[PAD]",
10
  "sep_token": "[SEP]",
11
  "strip_accents": null,
12
  "tokenize_chinese_chars": true,
13
+ "tokenizer_class": "BertTokenizer",
14
  "unk_token": "[UNK]"
15
  }
nn/nn_stress_usage_predictor/vocab.txt CHANGED
Binary files a/nn/nn_stress_usage_predictor/vocab.txt and b/nn/nn_stress_usage_predictor/vocab.txt differ