Titouan commited on
Commit
e64e7a8
1 Parent(s): bb61b5e

Create config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -67
config.json CHANGED
@@ -1,68 +1,3 @@
1
  {
2
- "activation_dropout": 0.1,
3
- "apply_spec_augment": true,
4
- "architectures": [
5
- "Wav2Vec2Model"
6
- ],
7
- "attention_dropout": 0.1,
8
- "bos_token_id": 1,
9
- "conv_bias": true,
10
- "conv_dim": [
11
- 512,
12
- 512,
13
- 512,
14
- 512,
15
- 512,
16
- 512,
17
- 512
18
- ],
19
- "conv_kernel": [
20
- 10,
21
- 3,
22
- 3,
23
- 3,
24
- 3,
25
- 2,
26
- 2
27
- ],
28
- "conv_stride": [
29
- 5,
30
- 2,
31
- 2,
32
- 2,
33
- 2,
34
- 2,
35
- 2
36
- ],
37
- "ctc_loss_reduction": "sum",
38
- "ctc_zero_infinity": false,
39
- "do_stable_layer_norm": true,
40
- "eos_token_id": 2,
41
- "feat_extract_activation": "gelu",
42
- "feat_extract_dropout": 0.0,
43
- "feat_extract_norm": "layer",
44
- "feat_proj_dropout": 0.1,
45
- "final_dropout": 0.1,
46
- "gradient_checkpointing": false,
47
- "hidden_act": "gelu",
48
- "hidden_dropout": 0.1,
49
- "hidden_dropout_prob": 0.1,
50
- "hidden_size": 1024,
51
- "initializer_range": 0.02,
52
- "intermediate_size": 4096,
53
- "layer_norm_eps": 1e-05,
54
- "layerdrop": 0.1,
55
- "mask_feature_length": 10,
56
- "mask_feature_prob": 0.0,
57
- "mask_time_length": 10,
58
- "mask_time_prob": 0.05,
59
- "model_type": "wav2vec2",
60
- "num_attention_heads": 16,
61
- "num_conv_pos_embedding_groups": 16,
62
- "num_conv_pos_embeddings": 128,
63
- "num_feat_extract_layers": 7,
64
- "num_hidden_layers": 24,
65
- "pad_token_id": 0,
66
- "transformers_version": "4.4.0.dev0",
67
- "vocab_size": 32
68
- }
 
1
  {
2
+ "speechbrain_interface": "EncoderDecoderASR"
3
+ }