mikr Viewegger commited on
Commit
fe73e87
1 Parent(s): c330b2d

Upload FlaxWhisperForConditionalGeneration (#1)

Browse files

- Upload FlaxWhisperForConditionalGeneration (0fc2d6f4d732b3eb369911b3ffb8223e722413aa)


Co-authored-by: Martin Viewegger <Viewegger@users.noreply.huggingface.co>

Files changed (3) hide show
  1. config.json +10 -1
  2. flax_model.msgpack +3 -0
  3. generation_config.json +14 -0
config.json CHANGED
@@ -2,6 +2,7 @@
2
  "_name_or_path": "openai/whisper-large-v2",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
 
5
  "architectures": [
6
  "WhisperForConditionalGeneration"
7
  ],
@@ -11,6 +12,7 @@
11
  50257
12
  ],
13
  "bos_token_id": 50257,
 
14
  "d_model": 1280,
15
  "decoder_attention_heads": 20,
16
  "decoder_ffn_dim": 5120,
@@ -26,6 +28,12 @@
26
  "forced_decoder_ids": null,
27
  "init_std": 0.02,
28
  "is_encoder_decoder": true,
 
 
 
 
 
 
29
  "max_length": 448,
30
  "max_source_positions": 1500,
31
  "max_target_positions": 448,
@@ -35,7 +43,8 @@
35
  "pad_token_id": 50257,
36
  "scale_embedding": false,
37
  "torch_dtype": "float16",
38
- "transformers_version": "4.26.0.dev0",
39
  "use_cache": false,
 
40
  "vocab_size": 51865
41
  }
 
2
  "_name_or_path": "openai/whisper-large-v2",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
+ "apply_spec_augment": false,
6
  "architectures": [
7
  "WhisperForConditionalGeneration"
8
  ],
 
12
  50257
13
  ],
14
  "bos_token_id": 50257,
15
+ "classifier_proj_size": 256,
16
  "d_model": 1280,
17
  "decoder_attention_heads": 20,
18
  "decoder_ffn_dim": 5120,
 
28
  "forced_decoder_ids": null,
29
  "init_std": 0.02,
30
  "is_encoder_decoder": true,
31
+ "mask_feature_length": 10,
32
+ "mask_feature_min_masks": 0,
33
+ "mask_feature_prob": 0.0,
34
+ "mask_time_length": 10,
35
+ "mask_time_min_masks": 2,
36
+ "mask_time_prob": 0.05,
37
  "max_length": 448,
38
  "max_source_positions": 1500,
39
  "max_target_positions": 448,
 
43
  "pad_token_id": 50257,
44
  "scale_embedding": false,
45
  "torch_dtype": "float16",
46
+ "transformers_version": "4.30.2",
47
  "use_cache": false,
48
+ "use_weighted_layer_sum": false,
49
  "vocab_size": 51865
50
  }
flax_model.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d9de2c2efbb09611b16622b877019848ea164ad3ebfb1e01db4e6a12cce7866
3
+ size 3086655003
generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "begin_suppress_tokens": [
4
+ 220,
5
+ 50257
6
+ ],
7
+ "bos_token_id": 50257,
8
+ "decoder_start_token_id": 50258,
9
+ "eos_token_id": 50257,
10
+ "max_length": 448,
11
+ "pad_token_id": 50257,
12
+ "transformers_version": "4.30.2",
13
+ "use_cache": false
14
+ }