AntonioMS commited on
Commit
4fae61f
1 Parent(s): 8cc5ac8

Actualización modelo

Browse files
Files changed (2) hide show
  1. sabela.pth +2 -2
  2. sabela_config.json +42 -26
sabela.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1c23af423e8f25be4a7a6e889257ccf32a090d82c7f26cc9d6243f75edc39bbf
3
- size 997748420
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:24714543c0735c1a2c4e7c79462ba3ec834f8d4afc28cbaa23bea3ee8f67964c
3
+ size 997811318
sabela_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
- "output_path": "/home/twbgmy/play/TTS-play/TTS/recipes/galotron",
3
  "logger_uri": null,
4
- "run_name": "vits_galotron",
5
  "project_name": null,
6
  "run_description": "\ud83d\udc38Coqui trainer run.",
7
  "print_step": 25,
@@ -9,12 +9,13 @@
9
  "model_param_stats": false,
10
  "wandb_entity": null,
11
  "dashboard_logger": "tensorboard",
 
12
  "log_model_step": null,
13
  "save_step": 10000,
14
- "save_n_checkpoints": 5,
15
  "save_checkpoints": true,
16
  "save_all_best": false,
17
- "save_best_after": 200,
18
  "target_loss": null,
19
  "print_eval": true,
20
  "test_delay_epochs": -1,
@@ -23,15 +24,16 @@
23
  "distributed_backend": "nccl",
24
  "distributed_url": "tcp://localhost:54321",
25
  "mixed_precision": true,
26
- "epochs": 1000,
27
- "batch_size": 26,
 
28
  "eval_batch_size": 16,
29
  "grad_clip": [
30
  1000,
31
  1000
32
  ],
33
  "scheduler_after_epoch": true,
34
- "lr": 0.001,
35
  "optimizer": "AdamW",
36
  "optimizer_params": {
37
  "betas": [
@@ -41,9 +43,10 @@
41
  "eps": 1e-09,
42
  "weight_decay": 0.01
43
  },
44
- "lr_scheduler": "",
45
  "lr_scheduler_params": {},
46
  "use_grad_scaler": false,
 
47
  "cudnn_enable": true,
48
  "cudnn_deterministic": false,
49
  "cudnn_benchmark": false,
@@ -63,22 +66,22 @@
63
  },
64
  "use_phonemes": false,
65
  "phonemizer": null,
66
- "phoneme_language": "en-us",
67
  "compute_input_seq_cache": true,
68
- "text_cleaner": "multilingual_cleaners",
69
  "enable_eos_bos_chars": false,
70
  "test_sentences_file": "",
71
- "phoneme_cache_path": "/home/twbgmy/play/TTS-play/TTS/recipes/galotron/phoneme_cache",
72
  "characters": {
73
  "characters_class": "TTS.tts.models.vits.VitsCharacters",
74
  "vocab_dict": null,
75
- "pad": "_",
76
- "eos": "*",
77
- "bos": "^",
78
- "blank": null,
79
- "characters": "ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvwxyz\u00c1\u00c9\u00cd\u00d3\u00da\u00e1\u00e9\u00ed\u00f1\u00f3\u00fa\u00fc",
80
- "punctuations": "!\"(),-.:;?\u00a1\u00bf ",
81
- "phonemes": "",
82
  "is_unique": true,
83
  "is_sorted": true
84
  },
@@ -90,26 +93,39 @@
90
  "min_text_len": 1,
91
  "max_text_len": Infinity,
92
  "compute_f0": false,
 
93
  "compute_linear_spec": true,
94
  "precompute_num_workers": 0,
95
  "start_by_longest": false,
 
 
96
  "datasets": [
97
  {
98
- "name": "custom_turkish",
99
- "path": "/home/twbgmy/play/TTS-play/TTS/recipes/galotron",
100
- "meta_file_train": "sabela.csv",
 
101
  "ignored_speakers": null,
102
  "language": "",
 
103
  "meta_file_val": "",
104
  "meta_file_attn_mask": ""
105
  }
106
  ],
107
  "test_sentences": [
108
- "O recurso solicitado non foi atopado.",
109
- "O galego \u00e9 unha lingua indoeuropea que pertence \u00e1 p\u00f3la de linguas rom\u00e1nicas."
 
 
 
 
 
 
 
 
110
  ],
111
  "eval_split_max_size": null,
112
- "eval_split_size": 0.025,
113
  "use_speaker_weighted_sampler": false,
114
  "speaker_weighted_sampler_alpha": 1.0,
115
  "use_language_weighted_sampler": false,
@@ -117,7 +133,7 @@
117
  "use_length_weighted_sampler": false,
118
  "length_weighted_sampler_alpha": 1.0,
119
  "model_args": {
120
- "num_chars": 78,
121
  "out_channels": 513,
122
  "spec_segment_size": 32,
123
  "hidden_channels": 192,
@@ -244,5 +260,5 @@
244
  "use_d_vector_file": false,
245
  "d_vector_file": null,
246
  "d_vector_dim": 0,
247
- "github_branch": "* dev"
248
  }
 
1
  {
2
+ "output_path": "/mnt/netapp2/Store_uni/home/usc/ci/ams/experimentos/sabela/normal",
3
  "logger_uri": null,
4
+ "run_name": "sabela_48_104",
5
  "project_name": null,
6
  "run_description": "\ud83d\udc38Coqui trainer run.",
7
  "print_step": 25,
 
9
  "model_param_stats": false,
10
  "wandb_entity": null,
11
  "dashboard_logger": "tensorboard",
12
+ "save_on_interrupt": true,
13
  "log_model_step": null,
14
  "save_step": 10000,
15
+ "save_n_checkpoints": 10,
16
  "save_checkpoints": true,
17
  "save_all_best": false,
18
+ "save_best_after": 400,
19
  "target_loss": null,
20
  "print_eval": true,
21
  "test_delay_epochs": -1,
 
24
  "distributed_backend": "nccl",
25
  "distributed_url": "tcp://localhost:54321",
26
  "mixed_precision": true,
27
+ "precision": "fp16",
28
+ "epochs": 1300,
29
+ "batch_size": 48,
30
  "eval_batch_size": 16,
31
  "grad_clip": [
32
  1000,
33
  1000
34
  ],
35
  "scheduler_after_epoch": true,
36
+ "lr": 0.0001,
37
  "optimizer": "AdamW",
38
  "optimizer_params": {
39
  "betas": [
 
43
  "eps": 1e-09,
44
  "weight_decay": 0.01
45
  },
46
+ "lr_scheduler": null,
47
  "lr_scheduler_params": {},
48
  "use_grad_scaler": false,
49
+ "allow_tf32": false,
50
  "cudnn_enable": true,
51
  "cudnn_deterministic": false,
52
  "cudnn_benchmark": false,
 
66
  },
67
  "use_phonemes": false,
68
  "phonemizer": null,
69
+ "phoneme_language": null,
70
  "compute_input_seq_cache": true,
71
+ "text_cleaner": null,
72
  "enable_eos_bos_chars": false,
73
  "test_sentences_file": "",
74
+ "phoneme_cache_path": "/mnt/netapp2/Store_uni/home/usc/ci/ams/experimentos/sabela/normal/phoneme_cache",
75
  "characters": {
76
  "characters_class": "TTS.tts.models.vits.VitsCharacters",
77
  "vocab_dict": null,
78
+ "pad": "<PAD>",
79
+ "eos": "<EOS>",
80
+ "bos": "<BOS>",
81
+ "blank": "<BLNK>",
82
+ "characters": "ABCDEFGHIJKLMNOPQRSTUVWXYZ\u00c7\u00c1\u00c9\u00cd\u00d3\u00da\u00dcabcdefghijklmnopqrstuvwxyz\u00f1\u00e1\u00e9\u00ed\u00f3\u00fa\u00fc",
83
+ "punctuations": "!\u00a1'(),-.:;\u00bf? \"\n",
84
+ "phonemes": null,
85
  "is_unique": true,
86
  "is_sorted": true
87
  },
 
93
  "min_text_len": 1,
94
  "max_text_len": Infinity,
95
  "compute_f0": false,
96
+ "compute_energy": false,
97
  "compute_linear_spec": true,
98
  "precompute_num_workers": 0,
99
  "start_by_longest": false,
100
+ "shuffle": false,
101
+ "drop_last": false,
102
  "datasets": [
103
  {
104
+ "formatter": "nos_fonemas",
105
+ "dataset_name": "",
106
+ "path": "../..",
107
+ "meta_file_train": "sabela/normal/sabela_train.csv",
108
  "ignored_speakers": null,
109
  "language": "",
110
+ "phonemizer": "",
111
  "meta_file_val": "",
112
  "meta_file_attn_mask": ""
113
  }
114
  ],
115
  "test_sentences": [
116
+ "k\u00f3mo B\u00e1N eSekut\u00e1r t\u00f3Das \u00e9stas fuNTj\u00f3Ns ke Zes est\u00e1N a peD\u00edr?",
117
+ "\u00c9 Dif\u00edTil, n\u00f3N?",
118
+ "a eskas\u00e9Ta De BaT\u00ednas eN eur\u00d3pa poDer\u00eda komplik\u00e1r a aDministraTj\u00f3n da seG\u00fanda D\u00d3se.",
119
+ "ter\u00e1s \u00e1lGo ke kont\u00e1r...",
120
+ "k\u00e9 B\u00c9N \u00d3le!",
121
+ "d\u00d3nas e kaBal\u00e9jros: b\u00f3wZes amos\u00e1lo \u00d3me m\u00e1js f\u00d3rte Do m\u00fando.",
122
+ "a kur\u00faSa! -berr\u00f3w o B\u00c9Zo.",
123
+ "as awtoestr\u00e1Das Da S\u00fanta (b\u00edGo Baj\u00f3na e a kor\u00faJa karB\u00e1Zo) estar\u00e1N rremat\u00e1Das no \u00e1no m\u00edl noBeT\u00e9ntos noB\u00e9nta e s\u00e9te.",
124
+ "\u00e1j!, e SoG\u00e1ndo O tr\u00f3mpo tam\u00c9N aB\u00eda S\u00e9nte ke o koZ\u00eda a \u00faJa, n\u00f3N s\u00e1Bes?, e B\u00c9lo koZ\u00e9r a \u00faJa...",
125
+ "os ostal\u00e9jros m\u00edraN as T\u00edfras kom preokupaTj\u00f3N."
126
  ],
127
  "eval_split_max_size": null,
128
+ "eval_split_size": 0.05,
129
  "use_speaker_weighted_sampler": false,
130
  "speaker_weighted_sampler_alpha": 1.0,
131
  "use_language_weighted_sampler": false,
 
133
  "use_length_weighted_sampler": false,
134
  "length_weighted_sampler_alpha": 1.0,
135
  "model_args": {
136
+ "num_chars": 83,
137
  "out_channels": 513,
138
  "spec_segment_size": 32,
139
  "hidden_channels": 192,
 
260
  "use_d_vector_file": false,
261
  "d_vector_file": null,
262
  "d_vector_dim": 0,
263
+ "github_branch": "inside_docker"
264
  }