hadiqa123 commited on
Commit
b713c8e
1 Parent(s): 48e1b20

Training in progress, step 1000

Browse files
config.json CHANGED
@@ -1,6 +1,7 @@
1
  {
2
- "_name_or_path": "facebook/wav2vec2-xls-r-300m",
3
  "activation_dropout": 0.0,
 
4
  "adapter_kernel_size": 3,
5
  "adapter_stride": 2,
6
  "add_adapter": false,
@@ -60,12 +61,20 @@
60
  "intermediate_size": 4096,
61
  "layer_norm_eps": 1e-05,
62
  "layerdrop": 0.1,
 
 
 
 
 
63
  "mask_feature_length": 10,
64
  "mask_feature_min_masks": 0,
65
  "mask_feature_prob": 0.0,
66
  "mask_time_length": 10,
67
  "mask_time_min_masks": 2,
 
 
68
  "mask_time_prob": 0.075,
 
69
  "model_type": "wav2vec2",
70
  "num_adapter_layers": 3,
71
  "num_attention_heads": 16,
@@ -77,7 +86,7 @@
77
  "num_hidden_layers": 24,
78
  "num_negatives": 100,
79
  "output_hidden_size": 1024,
80
- "pad_token_id": 44,
81
  "proj_codevector_dim": 768,
82
  "tdnn_dilation": [
83
  1,
@@ -101,8 +110,8 @@
101
  1
102
  ],
103
  "torch_dtype": "float32",
104
- "transformers_version": "4.21.0",
105
  "use_weighted_layer_sum": false,
106
- "vocab_size": 45,
107
  "xvector_output_dim": 512
108
  }
 
1
  {
2
+ "_name_or_path": "facebook/wav2vec2-large-xlsr-53",
3
  "activation_dropout": 0.0,
4
+ "adapter_attn_dim": null,
5
  "adapter_kernel_size": 3,
6
  "adapter_stride": 2,
7
  "add_adapter": false,
 
61
  "intermediate_size": 4096,
62
  "layer_norm_eps": 1e-05,
63
  "layerdrop": 0.1,
64
+ "mask_channel_length": 10,
65
+ "mask_channel_min_space": 1,
66
+ "mask_channel_other": 0.0,
67
+ "mask_channel_prob": 0.0,
68
+ "mask_channel_selection": "static",
69
  "mask_feature_length": 10,
70
  "mask_feature_min_masks": 0,
71
  "mask_feature_prob": 0.0,
72
  "mask_time_length": 10,
73
  "mask_time_min_masks": 2,
74
+ "mask_time_min_space": 1,
75
+ "mask_time_other": 0.0,
76
  "mask_time_prob": 0.075,
77
+ "mask_time_selection": "static",
78
  "model_type": "wav2vec2",
79
  "num_adapter_layers": 3,
80
  "num_attention_heads": 16,
 
86
  "num_hidden_layers": 24,
87
  "num_negatives": 100,
88
  "output_hidden_size": 1024,
89
+ "pad_token_id": 45,
90
  "proj_codevector_dim": 768,
91
  "tdnn_dilation": [
92
  1,
 
110
  1
111
  ],
112
  "torch_dtype": "float32",
113
+ "transformers_version": "4.31.0",
114
  "use_weighted_layer_sum": false,
115
+ "vocab_size": 48,
116
  "xvector_output_dim": 512
117
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:92d5db3c27b7e12c0645e3931c985995f7e152c47f35ec694eba064921d72f42
3
- size 1262083185
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c6c082be8a71ec946f554f73834e66abd4ba22ab8cb00d4a78ac544cb4de9f3
3
+ size 1262095537
runs/Jul29_13-26-00_86dc0f5d0624/events.out.tfevents.1690637660.86dc0f5d0624.9740.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f22789e1a9acd5a883e15a5c2f19a01bafa1321bf05000f7d7b2caa5c851300
3
+ size 6180
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fda8557ee56354f224bb3470e53cc7f6f2b65b6232a4d4524610b2f6241b249e
3
- size 3311
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a96524b72edacd34a509e99deecbc7ed9cfe70f779741eb2900ef2396135ffc6
3
+ size 3887