jadechoghari commited on
Commit
5017efb
1 Parent(s): 1d1f6ff

add space new version

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +3 -0
  2. .gitignore +165 -0
  3. .gitmodules +3 -0
  4. BigVGAN/.gitignore +146 -0
  5. BigVGAN/LICENSE +21 -0
  6. BigVGAN/README.md +266 -0
  7. BigVGAN/activations.py +126 -0
  8. BigVGAN/alias_free_activation/cuda/__init__.py +0 -0
  9. BigVGAN/alias_free_activation/cuda/activation1d.py +77 -0
  10. BigVGAN/alias_free_activation/cuda/anti_alias_activation.cpp +23 -0
  11. BigVGAN/alias_free_activation/cuda/anti_alias_activation_cuda.cu +246 -0
  12. BigVGAN/alias_free_activation/cuda/compat.h +29 -0
  13. BigVGAN/alias_free_activation/cuda/load.py +86 -0
  14. BigVGAN/alias_free_activation/cuda/type_shim.h +92 -0
  15. BigVGAN/alias_free_activation/torch/__init__.py +6 -0
  16. BigVGAN/alias_free_activation/torch/act.py +30 -0
  17. BigVGAN/alias_free_activation/torch/filter.py +101 -0
  18. BigVGAN/alias_free_activation/torch/resample.py +58 -0
  19. BigVGAN/bigvgan.py +494 -0
  20. BigVGAN/configs/bigvgan_22khz_80band.json +45 -0
  21. BigVGAN/configs/bigvgan_24khz_100band.json +45 -0
  22. BigVGAN/configs/bigvgan_base_22khz_80band.json +45 -0
  23. BigVGAN/configs/bigvgan_base_24khz_100band.json +45 -0
  24. BigVGAN/configs/bigvgan_v2_22khz_80band_256x.json +61 -0
  25. BigVGAN/configs/bigvgan_v2_22khz_80band_fmax8k_256x.json +61 -0
  26. BigVGAN/configs/bigvgan_v2_24khz_100band_256x.json +61 -0
  27. BigVGAN/configs/bigvgan_v2_44khz_128band_256x.json +61 -0
  28. BigVGAN/configs/bigvgan_v2_44khz_128band_512x.json +61 -0
  29. BigVGAN/demo/__init__.py +0 -0
  30. BigVGAN/demo/app.py +441 -0
  31. BigVGAN/demo/examples/dance_24k.wav +0 -0
  32. BigVGAN/demo/examples/hifitts_44k.wav +0 -0
  33. BigVGAN/demo/examples/jensen_24k.wav +0 -0
  34. BigVGAN/demo/examples/libritts_24k.wav +0 -0
  35. BigVGAN/demo/examples/megalovania_24k.wav +3 -0
  36. BigVGAN/demo/examples/musdbhq_44k.wav +0 -0
  37. BigVGAN/demo/examples/musiccaps1_44k.wav +0 -0
  38. BigVGAN/demo/examples/musiccaps2_44k.wav +0 -0
  39. BigVGAN/demo/examples/queen_24k.wav +0 -0
  40. BigVGAN/demo/requirements.txt +15 -0
  41. BigVGAN/discriminators.py +651 -0
  42. BigVGAN/env.py +18 -0
  43. BigVGAN/filelists/LibriTTS/dev-clean.txt +115 -0
  44. BigVGAN/filelists/LibriTTS/dev-other.txt +93 -0
  45. BigVGAN/filelists/LibriTTS/parse_libritts.py +77 -0
  46. BigVGAN/filelists/LibriTTS/test-clean.txt +97 -0
  47. BigVGAN/filelists/LibriTTS/test-other.txt +103 -0
  48. BigVGAN/filelists/LibriTTS/train-full.txt +3 -0
  49. BigVGAN/filelists/LibriTTS/val-full.txt +119 -0
  50. BigVGAN/incl_licenses/LICENSE_1 +21 -0
.gitattributes CHANGED
@@ -33,3 +33,6 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ BigVGAN/demo/examples/megalovania_24k.wav filter=lfs diff=lfs merge=lfs -text
37
+ BigVGAN/filelists/LibriTTS/train-full.txt filter=lfs diff=lfs merge=lfs -text
38
+ long_form_sample.ogg filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1,165 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.pt
2
+ test_output.wav
3
+
4
+ # Byte-compiled / optimized / DLL files
5
+ __pycache__/
6
+ *.py[cod]
7
+ *$py.class
8
+
9
+ # C extensions
10
+ *.so
11
+
12
+ # Distribution / packaging
13
+ .Python
14
+ build/
15
+ develop-eggs/
16
+ dist/
17
+ downloads/
18
+ eggs/
19
+ .eggs/
20
+ lib/
21
+ lib64/
22
+ parts/
23
+ sdist/
24
+ var/
25
+ wheels/
26
+ share/python-wheels/
27
+ *.egg-info/
28
+ .installed.cfg
29
+ *.egg
30
+ MANIFEST
31
+
32
+ # PyInstaller
33
+ # Usually these files are written by a python script from a template
34
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
35
+ *.manifest
36
+ *.spec
37
+
38
+ # Installer logs
39
+ pip-log.txt
40
+ pip-delete-this-directory.txt
41
+
42
+ # Unit test / coverage reports
43
+ htmlcov/
44
+ .tox/
45
+ .nox/
46
+ .coverage
47
+ .coverage.*
48
+ .cache
49
+ nosetests.xml
50
+ coverage.xml
51
+ *.cover
52
+ *.py,cover
53
+ .hypothesis/
54
+ .pytest_cache/
55
+ cover/
56
+
57
+ # Translations
58
+ *.mo
59
+ *.pot
60
+
61
+ # Django stuff:
62
+ *.log
63
+ local_settings.py
64
+ db.sqlite3
65
+ db.sqlite3-journal
66
+
67
+ # Flask stuff:
68
+ instance/
69
+ .webassets-cache
70
+
71
+ # Scrapy stuff:
72
+ .scrapy
73
+
74
+ # Sphinx documentation
75
+ docs/_build/
76
+
77
+ # PyBuilder
78
+ .pybuilder/
79
+ target/
80
+
81
+ # Jupyter Notebook
82
+ .ipynb_checkpoints
83
+
84
+ # IPython
85
+ profile_default/
86
+ ipython_config.py
87
+
88
+ # pyenv
89
+ # For a library or package, you might want to ignore these files since the code is
90
+ # intended to run in multiple environments; otherwise, check them in:
91
+ # .python-version
92
+
93
+ # pipenv
94
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
95
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
96
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
97
+ # install all needed dependencies.
98
+ #Pipfile.lock
99
+
100
+ # poetry
101
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
102
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
103
+ # commonly ignored for libraries.
104
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
105
+ #poetry.lock
106
+
107
+ # pdm
108
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
109
+ #pdm.lock
110
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
111
+ # in version control.
112
+ # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
113
+ .pdm.toml
114
+ .pdm-python
115
+ .pdm-build/
116
+
117
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
118
+ __pypackages__/
119
+
120
+ # Celery stuff
121
+ celerybeat-schedule
122
+ celerybeat.pid
123
+
124
+ # SageMath parsed files
125
+ *.sage.py
126
+
127
+ # Environments
128
+ .env
129
+ .venv
130
+ env/
131
+ venv/
132
+ ENV/
133
+ env.bak/
134
+ venv.bak/
135
+
136
+ # Spyder project settings
137
+ .spyderproject
138
+ .spyproject
139
+
140
+ # Rope project settings
141
+ .ropeproject
142
+
143
+ # mkdocs documentation
144
+ /site
145
+
146
+ # mypy
147
+ .mypy_cache/
148
+ .dmypy.json
149
+ dmypy.json
150
+
151
+ # Pyre type checker
152
+ .pyre/
153
+
154
+ # pytype static type analyzer
155
+ .pytype/
156
+
157
+ # Cython debug symbols
158
+ cython_debug/
159
+
160
+ # PyCharm
161
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
162
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
163
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
164
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
165
+ #.idea/
.gitmodules ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ [submodule "BigVGAN"]
2
+ path = BigVGAN
3
+ url = https://github.com/NVIDIA/BigVGAN
BigVGAN/.gitignore ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # BigVGAN
2
+ alias_free_activation/cuda/build/
3
+ exp/
4
+ tmp/
5
+
6
+ # Symlinks for bundled LibriTTS filelists
7
+ filelists/LibriTTS/train-clean-100
8
+ filelists/LibriTTS/train-clean-360
9
+ filelists/LibriTTS/train-other-500
10
+ filelists/LibriTTS/dev-clean
11
+ filelists/LibriTTS/dev-other
12
+ filelists/LibriTTS/test-clean
13
+ filelists/LibriTTS/test-other
14
+
15
+ # VSCode configs
16
+ .vscode/
17
+
18
+ # Byte-compiled / optimized / DLL files
19
+ __pycache__/
20
+ *.py[cod]
21
+ *$py.class
22
+
23
+ # C extensions
24
+ *.so
25
+
26
+ # Distribution / packaging
27
+ .Python
28
+ build/
29
+ develop-eggs/
30
+ dist/
31
+ downloads/
32
+ eggs/
33
+ .eggs/
34
+ lib/
35
+ lib64/
36
+ parts/
37
+ sdist/
38
+ var/
39
+ wheels/
40
+ share/python-wheels/
41
+ *.egg-info/
42
+ .installed.cfg
43
+ *.egg
44
+ MANIFEST
45
+
46
+ # PyInstaller
47
+ # Usually these files are written by a python script from a template
48
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
49
+ *.manifest
50
+ *.spec
51
+
52
+ # Installer logs
53
+ pip-log.txt
54
+ pip-delete-this-directory.txt
55
+
56
+ # Unit test / coverage reports
57
+ htmlcov/
58
+ .tox/
59
+ .nox/
60
+ .coverage
61
+ .coverage.*
62
+ .cache
63
+ nosetests.xml
64
+ coverage.xml
65
+ *.cover
66
+ *.py,cover
67
+ .hypothesis/
68
+ .pytest_cache/
69
+ cover/
70
+
71
+ # Translations
72
+ *.mo
73
+ *.pot
74
+
75
+ # Django stuff:
76
+ *.log
77
+ local_settings.py
78
+ db.sqlite3
79
+ db.sqlite3-journal
80
+
81
+ # Flask stuff:
82
+ instance/
83
+ .webassets-cache
84
+
85
+ # Scrapy stuff:
86
+ .scrapy
87
+
88
+ # Sphinx documentation
89
+ docs/_build/
90
+
91
+ # PyBuilder
92
+ .pybuilder/
93
+ target/
94
+
95
+ # Jupyter Notebook
96
+ .ipynb_checkpoints
97
+
98
+ # IPython
99
+ profile_default/
100
+ ipython_config.py
101
+
102
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
103
+ __pypackages__/
104
+
105
+ # Celery stuff
106
+ celerybeat-schedule
107
+ celerybeat.pid
108
+
109
+ # SageMath parsed files
110
+ *.sage.py
111
+
112
+ # Environments
113
+ .env
114
+ .venv
115
+ env/
116
+ venv/
117
+ ENV/
118
+ env.bak/
119
+ venv.bak/
120
+
121
+ # Spyder project settings
122
+ .spyderproject
123
+ .spyproject
124
+
125
+ # Rope project settings
126
+ .ropeproject
127
+
128
+ # mkdocs documentation
129
+ /site
130
+
131
+ # mypy
132
+ .mypy_cache/
133
+ .dmypy.json
134
+ dmypy.json
135
+
136
+ # Pyre type checker
137
+ .pyre/
138
+
139
+ # pytype static type analyzer
140
+ .pytype/
141
+
142
+ # Cython debug symbols
143
+ cython_debug/
144
+
145
+ # PyCharm
146
+ .idea/
BigVGAN/LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2024 NVIDIA CORPORATION.
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
BigVGAN/README.md ADDED
@@ -0,0 +1,266 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ## BigVGAN: A Universal Neural Vocoder with Large-Scale Training
2
+
3
+ #### Sang-gil Lee, Wei Ping, Boris Ginsburg, Bryan Catanzaro, Sungroh Yoon
4
+
5
+ [[Paper]](https://arxiv.org/abs/2206.04658) - [[Code]](https://github.com/NVIDIA/BigVGAN) - [[Showcase]](https://bigvgan-demo.github.io/) - [[Project Page]](https://research.nvidia.com/labs/adlr/projects/bigvgan/) - [[Weights]](https://huggingface.co/collections/nvidia/bigvgan-66959df3d97fd7d98d97dc9a) - [[Demo]](https://huggingface.co/spaces/nvidia/BigVGAN)
6
+
7
+ [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/bigvgan-a-universal-neural-vocoder-with-large/speech-synthesis-on-libritts)](https://paperswithcode.com/sota/speech-synthesis-on-libritts?p=bigvgan-a-universal-neural-vocoder-with-large)
8
+
9
+ <center><img src="https://user-images.githubusercontent.com/15963413/218609148-881e39df-33af-4af9-ab95-1427c4ebf062.png" width="800"></center>
10
+
11
+ ## News
12
+ - **Sep 2024 (v2.4):**
13
+ - We have updated the pretrained checkpoints trained for 5M steps. This is final release of the BigVGAN-v2 checkpoints.
14
+
15
+ - **Jul 2024 (v2.3):**
16
+ - General refactor and code improvements for improved readability.
17
+ - Fully fused CUDA kernel of anti-alised activation (upsampling + activation + downsampling) with inference speed benchmark.
18
+
19
+ - **Jul 2024 (v2.2):** The repository now includes an interactive local demo using gradio.
20
+
21
+ - **Jul 2024 (v2.1):** BigVGAN is now integrated with 🤗 Hugging Face Hub with easy access to inference using pretrained checkpoints. We also provide an interactive demo on Hugging Face Spaces.
22
+
23
+ - **Jul 2024 (v2):** We release BigVGAN-v2 along with pretrained checkpoints. Below are the highlights:
24
+ - Custom CUDA kernel for inference: we provide a fused upsampling + activation kernel written in CUDA for accelerated inference speed. Our test shows 1.5 - 3x faster speed on a single A100 GPU.
25
+ - Improved discriminator and loss: BigVGAN-v2 is trained using a [multi-scale sub-band CQT discriminator](https://arxiv.org/abs/2311.14957) and a [multi-scale mel spectrogram loss](https://arxiv.org/abs/2306.06546).
26
+ - Larger training data: BigVGAN-v2 is trained using datasets containing diverse audio types, including speech in multiple languages, environmental sounds, and instruments.
27
+ - We provide pretrained checkpoints of BigVGAN-v2 using diverse audio configurations, supporting up to 44 kHz sampling rate and 512x upsampling ratio.
28
+
29
+ ## Installation
30
+
31
+ The codebase has been tested on Python `3.10` and PyTorch `2.3.1` conda packages with either `pytorch-cuda=12.1` or `pytorch-cuda=11.8`. Below is an example command to create the conda environment:
32
+
33
+ ```shell
34
+ conda create -n bigvgan python=3.10 pytorch torchvision torchaudio pytorch-cuda=12.1 -c pytorch -c nvidia
35
+ conda activate bigvgan
36
+ ```
37
+
38
+ Clone the repository and install dependencies:
39
+
40
+ ```shell
41
+ git clone https://github.com/NVIDIA/BigVGAN
42
+ cd BigVGAN
43
+ pip install -r requirements.txt
44
+ ```
45
+
46
+ ## Inference Quickstart using 🤗 Hugging Face Hub
47
+
48
+ Below example describes how you can use BigVGAN: load the pretrained BigVGAN generator from Hugging Face Hub, compute mel spectrogram from input waveform, and generate synthesized waveform using the mel spectrogram as the model's input.
49
+
50
+ ```python
51
+ device = 'cuda'
52
+
53
+ import torch
54
+ import bigvgan
55
+ import librosa
56
+ from meldataset import get_mel_spectrogram
57
+
58
+ # instantiate the model. You can optionally set use_cuda_kernel=True for faster inference.
59
+ model = bigvgan.BigVGAN.from_pretrained('nvidia/bigvgan_v2_24khz_100band_256x', use_cuda_kernel=False)
60
+
61
+ # remove weight norm in the model and set to eval mode
62
+ model.remove_weight_norm()
63
+ model = model.eval().to(device)
64
+
65
+ # load wav file and compute mel spectrogram
66
+ wav_path = '/path/to/your/audio.wav'
67
+ wav, sr = librosa.load(wav_path, sr=model.h.sampling_rate, mono=True) # wav is np.ndarray with shape [T_time] and values in [-1, 1]
68
+ wav = torch.FloatTensor(wav).unsqueeze(0) # wav is FloatTensor with shape [B(1), T_time]
69
+
70
+ # compute mel spectrogram from the ground truth audio
71
+ mel = get_mel_spectrogram(wav, model.h).to(device) # mel is FloatTensor with shape [B(1), C_mel, T_frame]
72
+
73
+ # generate waveform from mel
74
+ with torch.inference_mode():
75
+ wav_gen = model(mel) # wav_gen is FloatTensor with shape [B(1), 1, T_time] and values in [-1, 1]
76
+ wav_gen_float = wav_gen.squeeze(0).cpu() # wav_gen is FloatTensor with shape [1, T_time]
77
+
78
+ # you can convert the generated waveform to 16 bit linear PCM
79
+ wav_gen_int16 = (wav_gen_float * 32767.0).numpy().astype('int16') # wav_gen is now np.ndarray with shape [1, T_time] and int16 dtype
80
+ ```
81
+
82
+ ## Local gradio demo <a href='https://github.com/gradio-app/gradio'><img src='https://img.shields.io/github/stars/gradio-app/gradio'></a>
83
+
84
+ You can run a local gradio demo using below command:
85
+
86
+ ```python
87
+ pip install -r demo/requirements.txt
88
+ python demo/app.py
89
+ ```
90
+
91
+ ## Training
92
+
93
+ Create symbolic link to the root of the dataset. The codebase uses filelist with the relative path from the dataset. Below are the example commands for LibriTTS dataset:
94
+
95
+ ```shell
96
+ cd filelists/LibriTTS && \
97
+ ln -s /path/to/your/LibriTTS/train-clean-100 train-clean-100 && \
98
+ ln -s /path/to/your/LibriTTS/train-clean-360 train-clean-360 && \
99
+ ln -s /path/to/your/LibriTTS/train-other-500 train-other-500 && \
100
+ ln -s /path/to/your/LibriTTS/dev-clean dev-clean && \
101
+ ln -s /path/to/your/LibriTTS/dev-other dev-other && \
102
+ ln -s /path/to/your/LibriTTS/test-clean test-clean && \
103
+ ln -s /path/to/your/LibriTTS/test-other test-other && \
104
+ cd ../..
105
+ ```
106
+
107
+ Train BigVGAN model. Below is an example command for training BigVGAN-v2 using LibriTTS dataset at 24kHz with a full 100-band mel spectrogram as input:
108
+
109
+ ```shell
110
+ python train.py \
111
+ --config configs/bigvgan_v2_24khz_100band_256x.json \
112
+ --input_wavs_dir filelists/LibriTTS \
113
+ --input_training_file filelists/LibriTTS/train-full.txt \
114
+ --input_validation_file filelists/LibriTTS/val-full.txt \
115
+ --list_input_unseen_wavs_dir filelists/LibriTTS filelists/LibriTTS \
116
+ --list_input_unseen_validation_file filelists/LibriTTS/dev-clean.txt filelists/LibriTTS/dev-other.txt \
117
+ --checkpoint_path exp/bigvgan_v2_24khz_100band_256x
118
+ ```
119
+
120
+ ## Synthesis
121
+
122
+ Synthesize from BigVGAN model. Below is an example command for generating audio from the model.
123
+ It computes mel spectrograms using wav files from `--input_wavs_dir` and saves the generated audio to `--output_dir`.
124
+
125
+ ```shell
126
+ python inference.py \
127
+ --checkpoint_file /path/to/your/bigvgan_v2_24khz_100band_256x/bigvgan_generator.pt \
128
+ --input_wavs_dir /path/to/your/input_wav \
129
+ --output_dir /path/to/your/output_wav
130
+ ```
131
+
132
+ `inference_e2e.py` supports synthesis directly from the mel spectrogram saved in `.npy` format, with shapes `[1, channel, frame]` or `[channel, frame]`.
133
+ It loads mel spectrograms from `--input_mels_dir` and saves the generated audio to `--output_dir`.
134
+
135
+ Make sure that the STFT hyperparameters for mel spectrogram are the same as the model, which are defined in `config.json` of the corresponding model.
136
+
137
+ ```shell
138
+ python inference_e2e.py \
139
+ --checkpoint_file /path/to/your/bigvgan_v2_24khz_100band_256x/bigvgan_generator.pt \
140
+ --input_mels_dir /path/to/your/input_mel \
141
+ --output_dir /path/to/your/output_wav
142
+ ```
143
+
144
+ ## Using Custom CUDA Kernel for Synthesis
145
+
146
+ You can apply the fast CUDA inference kernel by using a parameter `use_cuda_kernel` when instantiating BigVGAN:
147
+
148
+ ```python
149
+ generator = BigVGAN(h, use_cuda_kernel=True)
150
+ ```
151
+
152
+ You can also pass `--use_cuda_kernel` to `inference.py` and `inference_e2e.py` to enable this feature.
153
+
154
+ When applied for the first time, it builds the kernel using `nvcc` and `ninja`. If the build succeeds, the kernel is saved to `alias_free_activation/cuda/build` and the model automatically loads the kernel. The codebase has been tested using CUDA `12.1`.
155
+
156
+ Please make sure that both are installed in your system and `nvcc` installed in your system matches the version your PyTorch build is using.
157
+
158
+ We recommend running `test_cuda_vs_torch_model.py` first to build and check the correctness of the CUDA kernel. See below example command and its output, where it returns `[Success] test CUDA fused vs. plain torch BigVGAN inference`:
159
+
160
+ ```python
161
+ python tests/test_cuda_vs_torch_model.py \
162
+ --checkpoint_file /path/to/your/bigvgan_generator.pt
163
+ ```
164
+
165
+ ```shell
166
+ loading plain Pytorch BigVGAN
167
+ ...
168
+ loading CUDA kernel BigVGAN with auto-build
169
+ Detected CUDA files, patching ldflags
170
+ Emitting ninja build file /path/to/your/BigVGAN/alias_free_activation/cuda/build/build.ninja..
171
+ Building extension module anti_alias_activation_cuda...
172
+ ...
173
+ Loading extension module anti_alias_activation_cuda...
174
+ ...
175
+ Loading '/path/to/your/bigvgan_generator.pt'
176
+ ...
177
+ [Success] test CUDA fused vs. plain torch BigVGAN inference
178
+ > mean_difference=0.0007238413265440613
179
+ ...
180
+ ```
181
+
182
+ If you see `[Fail] test CUDA fused vs. plain torch BigVGAN inference`, it means that the CUDA kernel inference is incorrect. Please check if `nvcc` installed in your system is compatible with your PyTorch version.
183
+
184
+ ## Pretrained Models
185
+
186
+ We provide the [pretrained models on Hugging Face Collections](https://huggingface.co/collections/nvidia/bigvgan-66959df3d97fd7d98d97dc9a).
187
+ One can download the checkpoints of the generator weight (named `bigvgan_generator.pt`) and its discriminator/optimizer states (named `bigvgan_discriminator_optimizer.pt`) within the listed model repositories.
188
+
189
+ | Model Name | Sampling Rate | Mel band | fmax | Upsampling Ratio | Params | Dataset | Steps | Fine-Tuned |
190
+ |:--------------------------------------------------------------------------------------------------------:|:-------------:|:--------:|:-----:|:----------------:|:------:|:--------------------------:|:-----:|:----------:|
191
+ | [bigvgan_v2_44khz_128band_512x](https://huggingface.co/nvidia/bigvgan_v2_44khz_128band_512x) | 44 kHz | 128 | 22050 | 512 | 122M | Large-scale Compilation | 5M | No |
192
+ | [bigvgan_v2_44khz_128band_256x](https://huggingface.co/nvidia/bigvgan_v2_44khz_128band_256x) | 44 kHz | 128 | 22050 | 256 | 112M | Large-scale Compilation | 5M | No |
193
+ | [bigvgan_v2_24khz_100band_256x](https://huggingface.co/nvidia/bigvgan_v2_24khz_100band_256x) | 24 kHz | 100 | 12000 | 256 | 112M | Large-scale Compilation | 5M | No |
194
+ | [bigvgan_v2_22khz_80band_256x](https://huggingface.co/nvidia/bigvgan_v2_22khz_80band_256x) | 22 kHz | 80 | 11025 | 256 | 112M | Large-scale Compilation | 5M | No |
195
+ | [bigvgan_v2_22khz_80band_fmax8k_256x](https://huggingface.co/nvidia/bigvgan_v2_22khz_80band_fmax8k_256x) | 22 kHz | 80 | 8000 | 256 | 112M | Large-scale Compilation | 5M | No |
196
+ | [bigvgan_24khz_100band](https://huggingface.co/nvidia/bigvgan_24khz_100band) | 24 kHz | 100 | 12000 | 256 | 112M | LibriTTS | 5M | No |
197
+ | [bigvgan_base_24khz_100band](https://huggingface.co/nvidia/bigvgan_base_24khz_100band) | 24 kHz | 100 | 12000 | 256 | 14M | LibriTTS | 5M | No |
198
+ | [bigvgan_22khz_80band](https://huggingface.co/nvidia/bigvgan_22khz_80band) | 22 kHz | 80 | 8000 | 256 | 112M | LibriTTS + VCTK + LJSpeech | 5M | No |
199
+ | [bigvgan_base_22khz_80band](https://huggingface.co/nvidia/bigvgan_base_22khz_80band) | 22 kHz | 80 | 8000 | 256 | 14M | LibriTTS + VCTK + LJSpeech | 5M | No |
200
+
201
+ The paper results are based on the original 24kHz BigVGAN models (`bigvgan_24khz_100band` and `bigvgan_base_24khz_100band`) trained on LibriTTS dataset.
202
+ We also provide 22kHz BigVGAN models with band-limited setup (i.e., fmax=8000) for TTS applications.
203
+ Note that the checkpoints use `snakebeta` activation with log scale parameterization, which have the best overall quality.
204
+
205
+ You can fine-tune the models by:
206
+
207
+ 1. downloading the checkpoints (both the generator weight and its discriminator/optimizer states)
208
+ 2. resuming training using your audio dataset by specifying `--checkpoint_path` that includes the checkpoints when launching `train.py`
209
+
210
+ ## Training Details of BigVGAN-v2
211
+
212
+ Comapred to the original BigVGAN, the pretrained checkpoints of BigVGAN-v2 used `batch_size=32` with a longer `segment_size=65536` and are trained using 8 A100 GPUs.
213
+
214
+ Note that the BigVGAN-v2 `json` config files in `./configs` use `batch_size=4` as default to fit in a single A100 GPU for training. You can fine-tune the models adjusting `batch_size` depending on your GPUs.
215
+
216
+ When training BigVGAN-v2 from scratch with small batch size, it can potentially encounter the early divergence problem mentioned in the paper. In such case, we recommend lowering the `clip_grad_norm` value (e.g. `100`) for the early training iterations (e.g. 20000 steps) and increase the value to the default `500`.
217
+
218
+ ## Evaluation Results of BigVGAN-v2
219
+
220
+ Below are the objective results of the 24kHz model (`bigvgan_v2_24khz_100band_256x`) obtained from the LibriTTS `dev` sets. BigVGAN-v2 shows noticeable improvements of the metrics. The model also exhibits reduced perceptual artifacts, especially for non-speech audio.
221
+
222
+ | Model | Dataset | Steps | PESQ(↑) | M-STFT(↓) | MCD(↓) | Periodicity(↓) | V/UV F1(↑) |
223
+ |:----------:|:-----------------------:|:-----:|:---------:|:----------:|:----------:|:--------------:|:----------:|
224
+ | BigVGAN | LibriTTS | 1M | 4.027 | 0.7997 | 0.3745 | 0.1018 | 0.9598 |
225
+ | BigVGAN | LibriTTS | 5M | 4.256 | 0.7409 | 0.2988 | 0.0809 | 0.9698 |
226
+ | BigVGAN-v2 | Large-scale Compilation | 3M | 4.359 | 0.7134 | 0.3060 | 0.0621 | 0.9777 |
227
+ | BigVGAN-v2 | Large-scale Compilation | 5M | **4.362** | **0.7026** | **0.2903** | **0.0593** | **0.9793** |
228
+
229
+ ## Speed Benchmark
230
+
231
+ Below are the speed and VRAM usage benchmark results of BigVGAN from `tests/test_cuda_vs_torch_model.py`, using `bigvgan_v2_24khz_100band_256x` as a reference model.
232
+
233
+ | GPU | num_mel_frame | use_cuda_kernel | Speed (kHz) | Real-time Factor | VRAM (GB) |
234
+ |:--------------------------:|:-------------:|:---------------:|:-----------:|:----------------:|:---------:|
235
+ | NVIDIA A100 | 256 | False | 1672.1 | 69.7x | 1.3 |
236
+ | | | True | 3916.5 | 163.2x | 1.3 |
237
+ | | 2048 | False | 1899.6 | 79.2x | 1.7 |
238
+ | | | True | 5330.1 | 222.1x | 1.7 |
239
+ | | 16384 | False | 1973.8 | 82.2x | 5.0 |
240
+ | | | True | 5761.7 | 240.1x | 4.4 |
241
+ | NVIDIA GeForce RTX 3080 | 256 | False | 841.1 | 35.0x | 1.3 |
242
+ | | | True | 1598.1 | 66.6x | 1.3 |
243
+ | | 2048 | False | 929.9 | 38.7x | 1.7 |
244
+ | | | True | 1971.3 | 82.1x | 1.6 |
245
+ | | 16384 | False | 943.4 | 39.3x | 5.0 |
246
+ | | | True | 2026.5 | 84.4x | 3.9 |
247
+ | NVIDIA GeForce RTX 2080 Ti | 256 | False | 515.6 | 21.5x | 1.3 |
248
+ | | | True | 811.3 | 33.8x | 1.3 |
249
+ | | 2048 | False | 576.5 | 24.0x | 1.7 |
250
+ | | | True | 1023.0 | 42.6x | 1.5 |
251
+ | | 16384 | False | 589.4 | 24.6x | 5.0 |
252
+ | | | True | 1068.1 | 44.5x | 3.2 |
253
+
254
+ ## Acknowledgements
255
+
256
+ We thank Vijay Anand Korthikanti and Kevin J. Shih for their generous support in implementing the CUDA kernel for inference.
257
+
258
+ ## References
259
+
260
+ - [HiFi-GAN](https://github.com/jik876/hifi-gan) (for generator and multi-period discriminator)
261
+ - [Snake](https://github.com/EdwardDixon/snake) (for periodic activation)
262
+ - [Alias-free-torch](https://github.com/junjun3518/alias-free-torch) (for anti-aliasing)
263
+ - [Julius](https://github.com/adefossez/julius) (for low-pass filter)
264
+ - [UnivNet](https://github.com/mindslab-ai/univnet) (for multi-resolution discriminator)
265
+ - [descript-audio-codec](https://github.com/descriptinc/descript-audio-codec) and [vocos](https://github.com/gemelo-ai/vocos) (for multi-band multi-scale STFT discriminator and multi-scale mel spectrogram loss)
266
+ - [Amphion](https://github.com/open-mmlab/Amphion) (for multi-scale sub-band CQT discriminator)
BigVGAN/activations.py ADDED
@@ -0,0 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Implementation adapted from https://github.com/EdwardDixon/snake under the MIT license.
2
+ # LICENSE is in incl_licenses directory.
3
+
4
+ import torch
5
+ from torch import nn, sin, pow
6
+ from torch.nn import Parameter
7
+
8
+
9
+ class Snake(nn.Module):
10
+ """
11
+ Implementation of a sine-based periodic activation function
12
+ Shape:
13
+ - Input: (B, C, T)
14
+ - Output: (B, C, T), same shape as the input
15
+ Parameters:
16
+ - alpha - trainable parameter
17
+ References:
18
+ - This activation function is from this paper by Liu Ziyin, Tilman Hartwig, Masahito Ueda:
19
+ https://arxiv.org/abs/2006.08195
20
+ Examples:
21
+ >>> a1 = snake(256)
22
+ >>> x = torch.randn(256)
23
+ >>> x = a1(x)
24
+ """
25
+
26
+ def __init__(
27
+ self, in_features, alpha=1.0, alpha_trainable=True, alpha_logscale=False
28
+ ):
29
+ """
30
+ Initialization.
31
+ INPUT:
32
+ - in_features: shape of the input
33
+ - alpha: trainable parameter
34
+ alpha is initialized to 1 by default, higher values = higher-frequency.
35
+ alpha will be trained along with the rest of your model.
36
+ """
37
+ super(Snake, self).__init__()
38
+ self.in_features = in_features
39
+
40
+ # Initialize alpha
41
+ self.alpha_logscale = alpha_logscale
42
+ if self.alpha_logscale: # Log scale alphas initialized to zeros
43
+ self.alpha = Parameter(torch.zeros(in_features) * alpha)
44
+ else: # Linear scale alphas initialized to ones
45
+ self.alpha = Parameter(torch.ones(in_features) * alpha)
46
+
47
+ self.alpha.requires_grad = alpha_trainable
48
+
49
+ self.no_div_by_zero = 0.000000001
50
+
51
+ def forward(self, x):
52
+ """
53
+ Forward pass of the function.
54
+ Applies the function to the input elementwise.
55
+ Snake ∶= x + 1/a * sin^2 (xa)
56
+ """
57
+ alpha = self.alpha.unsqueeze(0).unsqueeze(-1) # Line up with x to [B, C, T]
58
+ if self.alpha_logscale:
59
+ alpha = torch.exp(alpha)
60
+ x = x + (1.0 / (alpha + self.no_div_by_zero)) * pow(sin(x * alpha), 2)
61
+
62
+ return x
63
+
64
+
65
+ class SnakeBeta(nn.Module):
66
+ """
67
+ A modified Snake function which uses separate parameters for the magnitude of the periodic components
68
+ Shape:
69
+ - Input: (B, C, T)
70
+ - Output: (B, C, T), same shape as the input
71
+ Parameters:
72
+ - alpha - trainable parameter that controls frequency
73
+ - beta - trainable parameter that controls magnitude
74
+ References:
75
+ - This activation function is a modified version based on this paper by Liu Ziyin, Tilman Hartwig, Masahito Ueda:
76
+ https://arxiv.org/abs/2006.08195
77
+ Examples:
78
+ >>> a1 = snakebeta(256)
79
+ >>> x = torch.randn(256)
80
+ >>> x = a1(x)
81
+ """
82
+
83
+ def __init__(
84
+ self, in_features, alpha=1.0, alpha_trainable=True, alpha_logscale=False
85
+ ):
86
+ """
87
+ Initialization.
88
+ INPUT:
89
+ - in_features: shape of the input
90
+ - alpha - trainable parameter that controls frequency
91
+ - beta - trainable parameter that controls magnitude
92
+ alpha is initialized to 1 by default, higher values = higher-frequency.
93
+ beta is initialized to 1 by default, higher values = higher-magnitude.
94
+ alpha will be trained along with the rest of your model.
95
+ """
96
+ super(SnakeBeta, self).__init__()
97
+ self.in_features = in_features
98
+
99
+ # Initialize alpha
100
+ self.alpha_logscale = alpha_logscale
101
+ if self.alpha_logscale: # Log scale alphas initialized to zeros
102
+ self.alpha = Parameter(torch.zeros(in_features) * alpha)
103
+ self.beta = Parameter(torch.zeros(in_features) * alpha)
104
+ else: # Linear scale alphas initialized to ones
105
+ self.alpha = Parameter(torch.ones(in_features) * alpha)
106
+ self.beta = Parameter(torch.ones(in_features) * alpha)
107
+
108
+ self.alpha.requires_grad = alpha_trainable
109
+ self.beta.requires_grad = alpha_trainable
110
+
111
+ self.no_div_by_zero = 0.000000001
112
+
113
+ def forward(self, x):
114
+ """
115
+ Forward pass of the function.
116
+ Applies the function to the input elementwise.
117
+ SnakeBeta ∶= x + 1/b * sin^2 (xa)
118
+ """
119
+ alpha = self.alpha.unsqueeze(0).unsqueeze(-1) # Line up with x to [B, C, T]
120
+ beta = self.beta.unsqueeze(0).unsqueeze(-1)
121
+ if self.alpha_logscale:
122
+ alpha = torch.exp(alpha)
123
+ beta = torch.exp(beta)
124
+ x = x + (1.0 / (beta + self.no_div_by_zero)) * pow(sin(x * alpha), 2)
125
+
126
+ return x
BigVGAN/alias_free_activation/cuda/__init__.py ADDED
File without changes
BigVGAN/alias_free_activation/cuda/activation1d.py ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) 2024 NVIDIA CORPORATION.
2
+ # Licensed under the MIT license.
3
+
4
+ import torch
5
+ import torch.nn as nn
6
+ from BigVGAN.alias_free_activation.torch.resample import UpSample1d, DownSample1d
7
+
8
+ # load fused CUDA kernel: this enables importing anti_alias_activation_cuda
9
+ from BigVGAN.alias_free_activation.cuda import load
10
+
11
+ anti_alias_activation_cuda = load.load()
12
+
13
+
14
+ class FusedAntiAliasActivation(torch.autograd.Function):
15
+ """
16
+ Assumes filter size 12, replication padding on upsampling/downsampling, and logscale alpha/beta parameters as inputs.
17
+ The hyperparameters are hard-coded in the kernel to maximize speed.
18
+ NOTE: The fused kenrel is incorrect for Activation1d with different hyperparameters.
19
+ """
20
+
21
+ @staticmethod
22
+ def forward(ctx, inputs, up_ftr, down_ftr, alpha, beta):
23
+ activation_results = anti_alias_activation_cuda.forward(
24
+ inputs, up_ftr, down_ftr, alpha, beta
25
+ )
26
+
27
+ return activation_results
28
+
29
+ @staticmethod
30
+ def backward(ctx, output_grads):
31
+ raise NotImplementedError
32
+ return output_grads, None, None
33
+
34
+
35
+ class Activation1d(nn.Module):
36
+ def __init__(
37
+ self,
38
+ activation,
39
+ up_ratio: int = 2,
40
+ down_ratio: int = 2,
41
+ up_kernel_size: int = 12,
42
+ down_kernel_size: int = 12,
43
+ fused: bool = True,
44
+ ):
45
+ super().__init__()
46
+ self.up_ratio = up_ratio
47
+ self.down_ratio = down_ratio
48
+ self.act = activation
49
+ self.upsample = UpSample1d(up_ratio, up_kernel_size)
50
+ self.downsample = DownSample1d(down_ratio, down_kernel_size)
51
+
52
+ self.fused = fused # Whether to use fused CUDA kernel or not
53
+
54
+ def forward(self, x):
55
+ if not self.fused:
56
+ x = self.upsample(x)
57
+ x = self.act(x)
58
+ x = self.downsample(x)
59
+ return x
60
+ else:
61
+ if self.act.__class__.__name__ == "Snake":
62
+ beta = self.act.alpha.data # Snake uses same params for alpha and beta
63
+ else:
64
+ beta = (
65
+ self.act.beta.data
66
+ ) # Snakebeta uses different params for alpha and beta
67
+ alpha = self.act.alpha.data
68
+ if (
69
+ not self.act.alpha_logscale
70
+ ): # Exp baked into cuda kernel, cancel it out with a log
71
+ alpha = torch.log(alpha)
72
+ beta = torch.log(beta)
73
+
74
+ x = FusedAntiAliasActivation.apply(
75
+ x, self.upsample.filter, self.downsample.lowpass.filter, alpha, beta
76
+ )
77
+ return x
BigVGAN/alias_free_activation/cuda/anti_alias_activation.cpp ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* coding=utf-8
2
+ * Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ #include <torch/extension.h>
18
+
19
+ extern "C" torch::Tensor fwd_cuda(torch::Tensor const &input, torch::Tensor const &up_filter, torch::Tensor const &down_filter, torch::Tensor const &alpha, torch::Tensor const &beta);
20
+
21
+ PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) {
22
+ m.def("forward", &fwd_cuda, "Anti-Alias Activation forward (CUDA)");
23
+ }
BigVGAN/alias_free_activation/cuda/anti_alias_activation_cuda.cu ADDED
@@ -0,0 +1,246 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* coding=utf-8
2
+ * Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ #include <ATen/ATen.h>
18
+ #include <cuda.h>
19
+ #include <cuda_runtime.h>
20
+ #include <cuda_fp16.h>
21
+ #include <cuda_profiler_api.h>
22
+ #include <ATen/cuda/CUDAContext.h>
23
+ #include <torch/extension.h>
24
+ #include "type_shim.h"
25
+ #include <assert.h>
26
+ #include <cfloat>
27
+ #include <limits>
28
+ #include <stdint.h>
29
+ #include <c10/macros/Macros.h>
30
+
31
+ namespace
32
+ {
33
+ // Hard-coded hyperparameters
34
+ // WARP_SIZE and WARP_BATCH must match the return values batches_per_warp and
35
+ constexpr int ELEMENTS_PER_LDG_STG = 1; //(WARP_ITERATIONS < 4) ? 1 : 4;
36
+ constexpr int BUFFER_SIZE = 32;
37
+ constexpr int FILTER_SIZE = 12;
38
+ constexpr int HALF_FILTER_SIZE = 6;
39
+ constexpr int UPSAMPLE_REPLICATION_PAD = 5; // 5 on each side, matching torch impl
40
+ constexpr int DOWNSAMPLE_REPLICATION_PAD_LEFT = 5; // matching torch impl
41
+ constexpr int DOWNSAMPLE_REPLICATION_PAD_RIGHT = 6; // matching torch impl
42
+
43
+ template <typename input_t, typename output_t, typename acc_t>
44
+ __global__ void anti_alias_activation_forward(
45
+ output_t *dst,
46
+ const input_t *src,
47
+ const input_t *up_ftr,
48
+ const input_t *down_ftr,
49
+ const input_t *alpha,
50
+ const input_t *beta,
51
+ int batch_size,
52
+ int channels,
53
+ int seq_len)
54
+ {
55
+ // Up and downsample filters
56
+ input_t up_filter[FILTER_SIZE];
57
+ input_t down_filter[FILTER_SIZE];
58
+
59
+ // Load data from global memory including extra indices reserved for replication paddings
60
+ input_t elements[2 * FILTER_SIZE + 2 * BUFFER_SIZE + 2 * UPSAMPLE_REPLICATION_PAD] = {0};
61
+ input_t intermediates[2 * FILTER_SIZE + 2 * BUFFER_SIZE + DOWNSAMPLE_REPLICATION_PAD_LEFT + DOWNSAMPLE_REPLICATION_PAD_RIGHT] = {0};
62
+
63
+ // Output stores downsampled output before writing to dst
64
+ output_t output[BUFFER_SIZE];
65
+
66
+ // blockDim/threadIdx = (128, 1, 1)
67
+ // gridDim/blockIdx = (seq_blocks, channels, batches)
68
+ int block_offset = (blockIdx.x * 128 * BUFFER_SIZE + seq_len * (blockIdx.y + gridDim.y * blockIdx.z));
69
+ int local_offset = threadIdx.x * BUFFER_SIZE;
70
+ int seq_offset = blockIdx.x * 128 * BUFFER_SIZE + local_offset;
71
+
72
+ // intermediate have double the seq_len
73
+ int intermediate_local_offset = threadIdx.x * BUFFER_SIZE * 2;
74
+ int intermediate_seq_offset = blockIdx.x * 128 * BUFFER_SIZE * 2 + intermediate_local_offset;
75
+
76
+ // Get values needed for replication padding before moving pointer
77
+ const input_t *right_most_pntr = src + (seq_len * (blockIdx.y + gridDim.y * blockIdx.z));
78
+ input_t seq_left_most_value = right_most_pntr[0];
79
+ input_t seq_right_most_value = right_most_pntr[seq_len - 1];
80
+
81
+ // Move src and dst pointers
82
+ src += block_offset + local_offset;
83
+ dst += block_offset + local_offset;
84
+
85
+ // Alpha and beta values for snake activatons. Applies exp by default
86
+ alpha = alpha + blockIdx.y;
87
+ input_t alpha_val = expf(alpha[0]);
88
+ beta = beta + blockIdx.y;
89
+ input_t beta_val = expf(beta[0]);
90
+
91
+ #pragma unroll
92
+ for (int it = 0; it < FILTER_SIZE; it += 1)
93
+ {
94
+ up_filter[it] = up_ftr[it];
95
+ down_filter[it] = down_ftr[it];
96
+ }
97
+
98
+ // Apply replication padding for upsampling, matching torch impl
99
+ #pragma unroll
100
+ for (int it = -HALF_FILTER_SIZE; it < BUFFER_SIZE + HALF_FILTER_SIZE; it += 1)
101
+ {
102
+ int element_index = seq_offset + it; // index for element
103
+ if ((element_index < 0) && (element_index >= -UPSAMPLE_REPLICATION_PAD))
104
+ {
105
+ elements[2 * (HALF_FILTER_SIZE + it)] = 2 * seq_left_most_value;
106
+ }
107
+ if ((element_index >= seq_len) && (element_index < seq_len + UPSAMPLE_REPLICATION_PAD))
108
+ {
109
+ elements[2 * (HALF_FILTER_SIZE + it)] = 2 * seq_right_most_value;
110
+ }
111
+ if ((element_index >= 0) && (element_index < seq_len))
112
+ {
113
+ elements[2 * (HALF_FILTER_SIZE + it)] = 2 * src[it];
114
+ }
115
+ }
116
+
117
+ // Apply upsampling strided convolution and write to intermediates. It reserves DOWNSAMPLE_REPLICATION_PAD_LEFT for replication padding of the downsampilng conv later
118
+ #pragma unroll
119
+ for (int it = 0; it < (2 * BUFFER_SIZE + 2 * FILTER_SIZE); it += 1)
120
+ {
121
+ input_t acc = 0.0;
122
+ int element_index = intermediate_seq_offset + it; // index for intermediate
123
+ #pragma unroll
124
+ for (int f_idx = 0; f_idx < FILTER_SIZE; f_idx += 1)
125
+ {
126
+ if ((element_index + f_idx) >= 0)
127
+ {
128
+ acc += up_filter[f_idx] * elements[it + f_idx];
129
+ }
130
+ }
131
+ intermediates[it + DOWNSAMPLE_REPLICATION_PAD_LEFT] = acc;
132
+ }
133
+
134
+ // Apply activation function. It reserves DOWNSAMPLE_REPLICATION_PAD_LEFT and DOWNSAMPLE_REPLICATION_PAD_RIGHT for replication padding of the downsampilng conv later
135
+ double no_div_by_zero = 0.000000001;
136
+ #pragma unroll
137
+ for (int it = 0; it < 2 * BUFFER_SIZE + 2 * FILTER_SIZE; it += 1)
138
+ {
139
+ intermediates[it + DOWNSAMPLE_REPLICATION_PAD_LEFT] += (1.0 / (beta_val + no_div_by_zero)) * sinf(intermediates[it + DOWNSAMPLE_REPLICATION_PAD_LEFT] * alpha_val) * sinf(intermediates[it + DOWNSAMPLE_REPLICATION_PAD_LEFT] * alpha_val);
140
+ }
141
+
142
+ // Apply replication padding before downsampling conv from intermediates
143
+ #pragma unroll
144
+ for (int it = 0; it < DOWNSAMPLE_REPLICATION_PAD_LEFT; it += 1)
145
+ {
146
+ intermediates[it] = intermediates[DOWNSAMPLE_REPLICATION_PAD_LEFT];
147
+ }
148
+ #pragma unroll
149
+ for (int it = DOWNSAMPLE_REPLICATION_PAD_LEFT + 2 * BUFFER_SIZE + 2 * FILTER_SIZE; it < DOWNSAMPLE_REPLICATION_PAD_LEFT + 2 * BUFFER_SIZE + 2 * FILTER_SIZE + DOWNSAMPLE_REPLICATION_PAD_RIGHT; it += 1)
150
+ {
151
+ intermediates[it] = intermediates[DOWNSAMPLE_REPLICATION_PAD_LEFT + 2 * BUFFER_SIZE + 2 * FILTER_SIZE - 1];
152
+ }
153
+
154
+ // Apply downsample strided convolution (assuming stride=2) from intermediates
155
+ #pragma unroll
156
+ for (int it = 0; it < BUFFER_SIZE; it += 1)
157
+ {
158
+ input_t acc = 0.0;
159
+ #pragma unroll
160
+ for (int f_idx = 0; f_idx < FILTER_SIZE; f_idx += 1)
161
+ {
162
+ // Add constant DOWNSAMPLE_REPLICATION_PAD_RIGHT to match torch implementation
163
+ acc += down_filter[f_idx] * intermediates[it * 2 + f_idx + DOWNSAMPLE_REPLICATION_PAD_RIGHT];
164
+ }
165
+ output[it] = acc;
166
+ }
167
+
168
+ // Write output to dst
169
+ #pragma unroll
170
+ for (int it = 0; it < BUFFER_SIZE; it += ELEMENTS_PER_LDG_STG)
171
+ {
172
+ int element_index = seq_offset + it;
173
+ if (element_index < seq_len)
174
+ {
175
+ dst[it] = output[it];
176
+ }
177
+ }
178
+
179
+ }
180
+
181
+ template <typename input_t, typename output_t, typename acc_t>
182
+ void dispatch_anti_alias_activation_forward(
183
+ output_t *dst,
184
+ const input_t *src,
185
+ const input_t *up_ftr,
186
+ const input_t *down_ftr,
187
+ const input_t *alpha,
188
+ const input_t *beta,
189
+ int batch_size,
190
+ int channels,
191
+ int seq_len)
192
+ {
193
+ if (seq_len == 0)
194
+ {
195
+ return;
196
+ }
197
+ else
198
+ {
199
+ // Use 128 threads per block to maximimize gpu utilization
200
+ constexpr int threads_per_block = 128;
201
+ constexpr int seq_len_per_block = 4096;
202
+ int blocks_per_seq_len = (seq_len + seq_len_per_block - 1) / seq_len_per_block;
203
+ dim3 blocks(blocks_per_seq_len, channels, batch_size);
204
+ dim3 threads(threads_per_block, 1, 1);
205
+
206
+ anti_alias_activation_forward<input_t, output_t, acc_t>
207
+ <<<blocks, threads, 0, at::cuda::getCurrentCUDAStream()>>>(dst, src, up_ftr, down_ftr, alpha, beta, batch_size, channels, seq_len);
208
+ }
209
+ }
210
+ }
211
+
212
+ extern "C" torch::Tensor fwd_cuda(torch::Tensor const &input, torch::Tensor const &up_filter, torch::Tensor const &down_filter, torch::Tensor const &alpha, torch::Tensor const &beta)
213
+ {
214
+ // Input is a 3d tensor with dimensions [batches, channels, seq_len]
215
+ const int batches = input.size(0);
216
+ const int channels = input.size(1);
217
+ const int seq_len = input.size(2);
218
+
219
+ // Output
220
+ auto act_options = input.options().requires_grad(false);
221
+
222
+ torch::Tensor anti_alias_activation_results =
223
+ torch::empty({batches, channels, seq_len}, act_options);
224
+
225
+ void *input_ptr = static_cast<void *>(input.data_ptr());
226
+ void *up_filter_ptr = static_cast<void *>(up_filter.data_ptr());
227
+ void *down_filter_ptr = static_cast<void *>(down_filter.data_ptr());
228
+ void *alpha_ptr = static_cast<void *>(alpha.data_ptr());
229
+ void *beta_ptr = static_cast<void *>(beta.data_ptr());
230
+ void *anti_alias_activation_results_ptr = static_cast<void *>(anti_alias_activation_results.data_ptr());
231
+
232
+ DISPATCH_FLOAT_HALF_AND_BFLOAT(
233
+ input.scalar_type(),
234
+ "dispatch anti alias activation_forward",
235
+ dispatch_anti_alias_activation_forward<scalar_t, scalar_t, float>(
236
+ reinterpret_cast<scalar_t *>(anti_alias_activation_results_ptr),
237
+ reinterpret_cast<const scalar_t *>(input_ptr),
238
+ reinterpret_cast<const scalar_t *>(up_filter_ptr),
239
+ reinterpret_cast<const scalar_t *>(down_filter_ptr),
240
+ reinterpret_cast<const scalar_t *>(alpha_ptr),
241
+ reinterpret_cast<const scalar_t *>(beta_ptr),
242
+ batches,
243
+ channels,
244
+ seq_len););
245
+ return anti_alias_activation_results;
246
+ }
BigVGAN/alias_free_activation/cuda/compat.h ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* coding=utf-8
2
+ * Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ /*This code is copied fron NVIDIA apex:
18
+ * https://github.com/NVIDIA/apex
19
+ * with minor changes. */
20
+
21
+ #ifndef TORCH_CHECK
22
+ #define TORCH_CHECK AT_CHECK
23
+ #endif
24
+
25
+ #ifdef VERSION_GE_1_3
26
+ #define DATA_PTR data_ptr
27
+ #else
28
+ #define DATA_PTR data
29
+ #endif
BigVGAN/alias_free_activation/cuda/load.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) 2024 NVIDIA CORPORATION.
2
+ # Licensed under the MIT license.
3
+
4
+ import os
5
+ import pathlib
6
+ import subprocess
7
+
8
+ from torch.utils import cpp_extension
9
+
10
+ """
11
+ Setting this param to a list has a problem of generating different compilation commands (with diferent order of architectures) and leading to recompilation of fused kernels.
12
+ Set it to empty stringo avoid recompilation and assign arch flags explicity in extra_cuda_cflags below
13
+ """
14
+ os.environ["TORCH_CUDA_ARCH_LIST"] = ""
15
+
16
+
17
+ def load():
18
+ # Check if cuda 11 is installed for compute capability 8.0
19
+ cc_flag = []
20
+ _, bare_metal_major, _ = _get_cuda_bare_metal_version(cpp_extension.CUDA_HOME)
21
+ if int(bare_metal_major) >= 11:
22
+ cc_flag.append("-gencode")
23
+ cc_flag.append("arch=compute_80,code=sm_80")
24
+
25
+ # Build path
26
+ srcpath = pathlib.Path(__file__).parent.absolute()
27
+ buildpath = srcpath / "build"
28
+ _create_build_dir(buildpath)
29
+
30
+ # Helper function to build the kernels.
31
+ def _cpp_extention_load_helper(name, sources, extra_cuda_flags):
32
+ return cpp_extension.load(
33
+ name=name,
34
+ sources=sources,
35
+ build_directory=buildpath,
36
+ extra_cflags=[
37
+ "-O3",
38
+ ],
39
+ extra_cuda_cflags=[
40
+ "-O3",
41
+ "-gencode",
42
+ "arch=compute_70,code=sm_70",
43
+ "--use_fast_math",
44
+ ]
45
+ + extra_cuda_flags
46
+ + cc_flag,
47
+ verbose=True,
48
+ )
49
+
50
+ extra_cuda_flags = [
51
+ "-U__CUDA_NO_HALF_OPERATORS__",
52
+ "-U__CUDA_NO_HALF_CONVERSIONS__",
53
+ "--expt-relaxed-constexpr",
54
+ "--expt-extended-lambda",
55
+ ]
56
+
57
+ sources = [
58
+ srcpath / "anti_alias_activation.cpp",
59
+ srcpath / "anti_alias_activation_cuda.cu",
60
+ ]
61
+ anti_alias_activation_cuda = _cpp_extention_load_helper(
62
+ "anti_alias_activation_cuda", sources, extra_cuda_flags
63
+ )
64
+
65
+ return anti_alias_activation_cuda
66
+
67
+
68
+ def _get_cuda_bare_metal_version(cuda_dir):
69
+ raw_output = subprocess.check_output(
70
+ [cuda_dir + "/bin/nvcc", "-V"], universal_newlines=True
71
+ )
72
+ output = raw_output.split()
73
+ release_idx = output.index("release") + 1
74
+ release = output[release_idx].split(".")
75
+ bare_metal_major = release[0]
76
+ bare_metal_minor = release[1][0]
77
+
78
+ return raw_output, bare_metal_major, bare_metal_minor
79
+
80
+
81
+ def _create_build_dir(buildpath):
82
+ try:
83
+ os.mkdir(buildpath)
84
+ except OSError:
85
+ if not os.path.isdir(buildpath):
86
+ print(f"Creation of the build directory {buildpath} failed")
BigVGAN/alias_free_activation/cuda/type_shim.h ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* coding=utf-8
2
+ * Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ #include <ATen/ATen.h>
18
+ #include "compat.h"
19
+
20
+ #define DISPATCH_FLOAT_HALF_AND_BFLOAT(TYPE, NAME, ...) \
21
+ switch (TYPE) \
22
+ { \
23
+ case at::ScalarType::Float: \
24
+ { \
25
+ using scalar_t = float; \
26
+ __VA_ARGS__; \
27
+ break; \
28
+ } \
29
+ case at::ScalarType::Half: \
30
+ { \
31
+ using scalar_t = at::Half; \
32
+ __VA_ARGS__; \
33
+ break; \
34
+ } \
35
+ case at::ScalarType::BFloat16: \
36
+ { \
37
+ using scalar_t = at::BFloat16; \
38
+ __VA_ARGS__; \
39
+ break; \
40
+ } \
41
+ default: \
42
+ AT_ERROR(#NAME, " not implemented for '", toString(TYPE), "'"); \
43
+ }
44
+
45
+ #define DISPATCH_FLOAT_HALF_AND_BFLOAT_INOUT_TYPES(TYPEIN, TYPEOUT, NAME, ...) \
46
+ switch (TYPEIN) \
47
+ { \
48
+ case at::ScalarType::Float: \
49
+ { \
50
+ using scalar_t_in = float; \
51
+ switch (TYPEOUT) \
52
+ { \
53
+ case at::ScalarType::Float: \
54
+ { \
55
+ using scalar_t_out = float; \
56
+ __VA_ARGS__; \
57
+ break; \
58
+ } \
59
+ case at::ScalarType::Half: \
60
+ { \
61
+ using scalar_t_out = at::Half; \
62
+ __VA_ARGS__; \
63
+ break; \
64
+ } \
65
+ case at::ScalarType::BFloat16: \
66
+ { \
67
+ using scalar_t_out = at::BFloat16; \
68
+ __VA_ARGS__; \
69
+ break; \
70
+ } \
71
+ default: \
72
+ AT_ERROR(#NAME, " not implemented for '", toString(TYPEOUT), "'"); \
73
+ } \
74
+ break; \
75
+ } \
76
+ case at::ScalarType::Half: \
77
+ { \
78
+ using scalar_t_in = at::Half; \
79
+ using scalar_t_out = at::Half; \
80
+ __VA_ARGS__; \
81
+ break; \
82
+ } \
83
+ case at::ScalarType::BFloat16: \
84
+ { \
85
+ using scalar_t_in = at::BFloat16; \
86
+ using scalar_t_out = at::BFloat16; \
87
+ __VA_ARGS__; \
88
+ break; \
89
+ } \
90
+ default: \
91
+ AT_ERROR(#NAME, " not implemented for '", toString(TYPEIN), "'"); \
92
+ }
BigVGAN/alias_free_activation/torch/__init__.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ # Adapted from https://github.com/junjun3518/alias-free-torch under the Apache License 2.0
2
+ # LICENSE is in incl_licenses directory.
3
+
4
+ from .filter import *
5
+ from .resample import *
6
+ from .act import *
BigVGAN/alias_free_activation/torch/act.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Adapted from https://github.com/junjun3518/alias-free-torch under the Apache License 2.0
2
+ # LICENSE is in incl_licenses directory.
3
+
4
+ import torch.nn as nn
5
+ from BigVGAN.alias_free_activation.torch.resample import UpSample1d, DownSample1d
6
+
7
+
8
+ class Activation1d(nn.Module):
9
+ def __init__(
10
+ self,
11
+ activation,
12
+ up_ratio: int = 2,
13
+ down_ratio: int = 2,
14
+ up_kernel_size: int = 12,
15
+ down_kernel_size: int = 12,
16
+ ):
17
+ super().__init__()
18
+ self.up_ratio = up_ratio
19
+ self.down_ratio = down_ratio
20
+ self.act = activation
21
+ self.upsample = UpSample1d(up_ratio, up_kernel_size)
22
+ self.downsample = DownSample1d(down_ratio, down_kernel_size)
23
+
24
+ # x: [B,C,T]
25
+ def forward(self, x):
26
+ x = self.upsample(x)
27
+ x = self.act(x)
28
+ x = self.downsample(x)
29
+
30
+ return x
BigVGAN/alias_free_activation/torch/filter.py ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Adapted from https://github.com/junjun3518/alias-free-torch under the Apache License 2.0
2
+ # LICENSE is in incl_licenses directory.
3
+
4
+ import torch
5
+ import torch.nn as nn
6
+ import torch.nn.functional as F
7
+ import math
8
+
9
+ if "sinc" in dir(torch):
10
+ sinc = torch.sinc
11
+ else:
12
+ # This code is adopted from adefossez's julius.core.sinc under the MIT License
13
+ # https://adefossez.github.io/julius/julius/core.html
14
+ # LICENSE is in incl_licenses directory.
15
+ def sinc(x: torch.Tensor):
16
+ """
17
+ Implementation of sinc, i.e. sin(pi * x) / (pi * x)
18
+ __Warning__: Different to julius.sinc, the input is multiplied by `pi`!
19
+ """
20
+ return torch.where(
21
+ x == 0,
22
+ torch.tensor(1.0, device=x.device, dtype=x.dtype),
23
+ torch.sin(math.pi * x) / math.pi / x,
24
+ )
25
+
26
+
27
+ # This code is adopted from adefossez's julius.lowpass.LowPassFilters under the MIT License
28
+ # https://adefossez.github.io/julius/julius/lowpass.html
29
+ # LICENSE is in incl_licenses directory.
30
+ def kaiser_sinc_filter1d(
31
+ cutoff, half_width, kernel_size
32
+ ): # return filter [1,1,kernel_size]
33
+ even = kernel_size % 2 == 0
34
+ half_size = kernel_size // 2
35
+
36
+ # For kaiser window
37
+ delta_f = 4 * half_width
38
+ A = 2.285 * (half_size - 1) * math.pi * delta_f + 7.95
39
+ if A > 50.0:
40
+ beta = 0.1102 * (A - 8.7)
41
+ elif A >= 21.0:
42
+ beta = 0.5842 * (A - 21) ** 0.4 + 0.07886 * (A - 21.0)
43
+ else:
44
+ beta = 0.0
45
+ window = torch.kaiser_window(kernel_size, beta=beta, periodic=False)
46
+
47
+ # ratio = 0.5/cutoff -> 2 * cutoff = 1 / ratio
48
+ if even:
49
+ time = torch.arange(-half_size, half_size) + 0.5
50
+ else:
51
+ time = torch.arange(kernel_size) - half_size
52
+ if cutoff == 0:
53
+ filter_ = torch.zeros_like(time)
54
+ else:
55
+ filter_ = 2 * cutoff * window * sinc(2 * cutoff * time)
56
+ """
57
+ Normalize filter to have sum = 1, otherwise we will have a small leakage of the constant component in the input signal.
58
+ """
59
+ filter_ /= filter_.sum()
60
+ filter = filter_.view(1, 1, kernel_size)
61
+
62
+ return filter
63
+
64
+
65
+ class LowPassFilter1d(nn.Module):
66
+ def __init__(
67
+ self,
68
+ cutoff=0.5,
69
+ half_width=0.6,
70
+ stride: int = 1,
71
+ padding: bool = True,
72
+ padding_mode: str = "replicate",
73
+ kernel_size: int = 12,
74
+ ):
75
+ """
76
+ kernel_size should be even number for stylegan3 setup, in this implementation, odd number is also possible.
77
+ """
78
+ super().__init__()
79
+ if cutoff < -0.0:
80
+ raise ValueError("Minimum cutoff must be larger than zero.")
81
+ if cutoff > 0.5:
82
+ raise ValueError("A cutoff above 0.5 does not make sense.")
83
+ self.kernel_size = kernel_size
84
+ self.even = kernel_size % 2 == 0
85
+ self.pad_left = kernel_size // 2 - int(self.even)
86
+ self.pad_right = kernel_size // 2
87
+ self.stride = stride
88
+ self.padding = padding
89
+ self.padding_mode = padding_mode
90
+ filter = kaiser_sinc_filter1d(cutoff, half_width, kernel_size)
91
+ self.register_buffer("filter", filter)
92
+
93
+ # Input [B, C, T]
94
+ def forward(self, x):
95
+ _, C, _ = x.shape
96
+
97
+ if self.padding:
98
+ x = F.pad(x, (self.pad_left, self.pad_right), mode=self.padding_mode)
99
+ out = F.conv1d(x, self.filter.expand(C, -1, -1), stride=self.stride, groups=C)
100
+
101
+ return out
BigVGAN/alias_free_activation/torch/resample.py ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Adapted from https://github.com/junjun3518/alias-free-torch under the Apache License 2.0
2
+ # LICENSE is in incl_licenses directory.
3
+
4
+ import torch.nn as nn
5
+ from torch.nn import functional as F
6
+ from BigVGAN.alias_free_activation.torch.filter import LowPassFilter1d
7
+ from BigVGAN.alias_free_activation.torch.filter import kaiser_sinc_filter1d
8
+
9
+
10
+ class UpSample1d(nn.Module):
11
+ def __init__(self, ratio=2, kernel_size=None):
12
+ super().__init__()
13
+ self.ratio = ratio
14
+ self.kernel_size = (
15
+ int(6 * ratio // 2) * 2 if kernel_size is None else kernel_size
16
+ )
17
+ self.stride = ratio
18
+ self.pad = self.kernel_size // ratio - 1
19
+ self.pad_left = self.pad * self.stride + (self.kernel_size - self.stride) // 2
20
+ self.pad_right = (
21
+ self.pad * self.stride + (self.kernel_size - self.stride + 1) // 2
22
+ )
23
+ filter = kaiser_sinc_filter1d(
24
+ cutoff=0.5 / ratio, half_width=0.6 / ratio, kernel_size=self.kernel_size
25
+ )
26
+ self.register_buffer("filter", filter)
27
+
28
+ # x: [B, C, T]
29
+ def forward(self, x):
30
+ _, C, _ = x.shape
31
+
32
+ x = F.pad(x, (self.pad, self.pad), mode="replicate")
33
+ x = self.ratio * F.conv_transpose1d(
34
+ x, self.filter.expand(C, -1, -1), stride=self.stride, groups=C
35
+ )
36
+ x = x[..., self.pad_left : -self.pad_right]
37
+
38
+ return x
39
+
40
+
41
+ class DownSample1d(nn.Module):
42
+ def __init__(self, ratio=2, kernel_size=None):
43
+ super().__init__()
44
+ self.ratio = ratio
45
+ self.kernel_size = (
46
+ int(6 * ratio // 2) * 2 if kernel_size is None else kernel_size
47
+ )
48
+ self.lowpass = LowPassFilter1d(
49
+ cutoff=0.5 / ratio,
50
+ half_width=0.6 / ratio,
51
+ stride=ratio,
52
+ kernel_size=self.kernel_size,
53
+ )
54
+
55
+ def forward(self, x):
56
+ xx = self.lowpass(x)
57
+
58
+ return xx
BigVGAN/bigvgan.py ADDED
@@ -0,0 +1,494 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) 2024 NVIDIA CORPORATION.
2
+ # Licensed under the MIT license.
3
+
4
+ # Adapted from https://github.com/jik876/hifi-gan under the MIT license.
5
+ # LICENSE is in incl_licenses directory.
6
+
7
+ import os
8
+ import json
9
+ from pathlib import Path
10
+ from typing import Optional, Union, Dict
11
+
12
+ import torch
13
+ import torch.nn as nn
14
+ from torch.nn import Conv1d, ConvTranspose1d
15
+ from torch.nn.utils import weight_norm, remove_weight_norm
16
+
17
+ # import activations
18
+ from . import activations
19
+ from .utils import init_weights, get_padding
20
+ from .alias_free_activation.torch.act import Activation1d as TorchActivation1d
21
+ from .env import AttrDict
22
+
23
+ from huggingface_hub import PyTorchModelHubMixin, hf_hub_download
24
+
25
+
26
+ def load_hparams_from_json(path) -> AttrDict:
27
+ with open(path) as f:
28
+ data = f.read()
29
+ return AttrDict(json.loads(data))
30
+
31
+
32
+ class AMPBlock1(torch.nn.Module):
33
+ """
34
+ AMPBlock applies Snake / SnakeBeta activation functions with trainable parameters that control periodicity, defined for each layer.
35
+ AMPBlock1 has additional self.convs2 that contains additional Conv1d layers with a fixed dilation=1 followed by each layer in self.convs1
36
+
37
+ Args:
38
+ h (AttrDict): Hyperparameters.
39
+ channels (int): Number of convolution channels.
40
+ kernel_size (int): Size of the convolution kernel. Default is 3.
41
+ dilation (tuple): Dilation rates for the convolutions. Each dilation layer has two convolutions. Default is (1, 3, 5).
42
+ activation (str): Activation function type. Should be either 'snake' or 'snakebeta'. Default is None.
43
+ """
44
+
45
+ def __init__(
46
+ self,
47
+ h: AttrDict,
48
+ channels: int,
49
+ kernel_size: int = 3,
50
+ dilation: tuple = (1, 3, 5),
51
+ activation: str = None,
52
+ ):
53
+ super().__init__()
54
+
55
+ self.h = h
56
+
57
+ self.convs1 = nn.ModuleList(
58
+ [
59
+ weight_norm(
60
+ Conv1d(
61
+ channels,
62
+ channels,
63
+ kernel_size,
64
+ stride=1,
65
+ dilation=d,
66
+ padding=get_padding(kernel_size, d),
67
+ )
68
+ )
69
+ for d in dilation
70
+ ]
71
+ )
72
+ self.convs1.apply(init_weights)
73
+
74
+ self.convs2 = nn.ModuleList(
75
+ [
76
+ weight_norm(
77
+ Conv1d(
78
+ channels,
79
+ channels,
80
+ kernel_size,
81
+ stride=1,
82
+ dilation=1,
83
+ padding=get_padding(kernel_size, 1),
84
+ )
85
+ )
86
+ for _ in range(len(dilation))
87
+ ]
88
+ )
89
+ self.convs2.apply(init_weights)
90
+
91
+ self.num_layers = len(self.convs1) + len(
92
+ self.convs2
93
+ ) # Total number of conv layers
94
+
95
+ # Select which Activation1d, lazy-load cuda version to ensure backward compatibility
96
+ if self.h.get("use_cuda_kernel", False):
97
+ from alias_free_activation.cuda.activation1d import (
98
+ Activation1d as CudaActivation1d,
99
+ )
100
+
101
+ Activation1d = CudaActivation1d
102
+ else:
103
+ Activation1d = TorchActivation1d
104
+
105
+ # Activation functions
106
+ if activation == "snake":
107
+ self.activations = nn.ModuleList(
108
+ [
109
+ Activation1d(
110
+ activation=activations.Snake(
111
+ channels, alpha_logscale=h.snake_logscale
112
+ )
113
+ )
114
+ for _ in range(self.num_layers)
115
+ ]
116
+ )
117
+ elif activation == "snakebeta":
118
+ self.activations = nn.ModuleList(
119
+ [
120
+ Activation1d(
121
+ activation=activations.SnakeBeta(
122
+ channels, alpha_logscale=h.snake_logscale
123
+ )
124
+ )
125
+ for _ in range(self.num_layers)
126
+ ]
127
+ )
128
+ else:
129
+ raise NotImplementedError(
130
+ "activation incorrectly specified. check the config file and look for 'activation'."
131
+ )
132
+
133
+ def forward(self, x):
134
+ acts1, acts2 = self.activations[::2], self.activations[1::2]
135
+ for c1, c2, a1, a2 in zip(self.convs1, self.convs2, acts1, acts2):
136
+ xt = a1(x)
137
+ xt = c1(xt)
138
+ xt = a2(xt)
139
+ xt = c2(xt)
140
+ x = xt + x
141
+
142
+ return x
143
+
144
+ def remove_weight_norm(self):
145
+ for l in self.convs1:
146
+ remove_weight_norm(l)
147
+ for l in self.convs2:
148
+ remove_weight_norm(l)
149
+
150
+
151
+ class AMPBlock2(torch.nn.Module):
152
+ """
153
+ AMPBlock applies Snake / SnakeBeta activation functions with trainable parameters that control periodicity, defined for each layer.
154
+ Unlike AMPBlock1, AMPBlock2 does not contain extra Conv1d layers with fixed dilation=1
155
+
156
+ Args:
157
+ h (AttrDict): Hyperparameters.
158
+ channels (int): Number of convolution channels.
159
+ kernel_size (int): Size of the convolution kernel. Default is 3.
160
+ dilation (tuple): Dilation rates for the convolutions. Each dilation layer has two convolutions. Default is (1, 3, 5).
161
+ activation (str): Activation function type. Should be either 'snake' or 'snakebeta'. Default is None.
162
+ """
163
+
164
+ def __init__(
165
+ self,
166
+ h: AttrDict,
167
+ channels: int,
168
+ kernel_size: int = 3,
169
+ dilation: tuple = (1, 3, 5),
170
+ activation: str = None,
171
+ ):
172
+ super().__init__()
173
+
174
+ self.h = h
175
+
176
+ self.convs = nn.ModuleList(
177
+ [
178
+ weight_norm(
179
+ Conv1d(
180
+ channels,
181
+ channels,
182
+ kernel_size,
183
+ stride=1,
184
+ dilation=d,
185
+ padding=get_padding(kernel_size, d),
186
+ )
187
+ )
188
+ for d in dilation
189
+ ]
190
+ )
191
+ self.convs.apply(init_weights)
192
+
193
+ self.num_layers = len(self.convs) # Total number of conv layers
194
+
195
+ # Select which Activation1d, lazy-load cuda version to ensure backward compatibility
196
+ if self.h.get("use_cuda_kernel", False):
197
+ from alias_free_activation.cuda.activation1d import (
198
+ Activation1d as CudaActivation1d,
199
+ )
200
+
201
+ Activation1d = CudaActivation1d
202
+ else:
203
+ Activation1d = TorchActivation1d
204
+
205
+ # Activation functions
206
+ if activation == "snake":
207
+ self.activations = nn.ModuleList(
208
+ [
209
+ Activation1d(
210
+ activation=activations.Snake(
211
+ channels, alpha_logscale=h.snake_logscale
212
+ )
213
+ )
214
+ for _ in range(self.num_layers)
215
+ ]
216
+ )
217
+ elif activation == "snakebeta":
218
+ self.activations = nn.ModuleList(
219
+ [
220
+ Activation1d(
221
+ activation=activations.SnakeBeta(
222
+ channels, alpha_logscale=h.snake_logscale
223
+ )
224
+ )
225
+ for _ in range(self.num_layers)
226
+ ]
227
+ )
228
+ else:
229
+ raise NotImplementedError(
230
+ "activation incorrectly specified. check the config file and look for 'activation'."
231
+ )
232
+
233
+ def forward(self, x):
234
+ for c, a in zip(self.convs, self.activations):
235
+ xt = a(x)
236
+ xt = c(xt)
237
+ x = xt + x
238
+ return x
239
+
240
+ def remove_weight_norm(self):
241
+ for l in self.convs:
242
+ remove_weight_norm(l)
243
+
244
+
245
+ class BigVGAN(
246
+ torch.nn.Module,
247
+ PyTorchModelHubMixin,
248
+ library_name="bigvgan",
249
+ repo_url="https://github.com/NVIDIA/BigVGAN",
250
+ docs_url="https://github.com/NVIDIA/BigVGAN/blob/main/README.md",
251
+ pipeline_tag="audio-to-audio",
252
+ license="mit",
253
+ tags=["neural-vocoder", "audio-generation", "arxiv:2206.04658"],
254
+ ):
255
+ """
256
+ BigVGAN is a neural vocoder model that applies anti-aliased periodic activation for residual blocks (resblocks).
257
+ New in BigVGAN-v2: it can optionally use optimized CUDA kernels for AMP (anti-aliased multi-periodicity) blocks.
258
+
259
+ Args:
260
+ h (AttrDict): Hyperparameters.
261
+ use_cuda_kernel (bool): If set to True, loads optimized CUDA kernels for AMP. This should be used for inference only, as training is not supported with CUDA kernels.
262
+
263
+ Note:
264
+ - The `use_cuda_kernel` parameter should be used for inference only, as training with CUDA kernels is not supported.
265
+ - Ensure that the activation function is correctly specified in the hyperparameters (h.activation).
266
+ """
267
+
268
+ def __init__(self, h: AttrDict, use_cuda_kernel: bool = False):
269
+ super().__init__()
270
+ self.h = h
271
+ self.h["use_cuda_kernel"] = use_cuda_kernel
272
+
273
+ # Select which Activation1d, lazy-load cuda version to ensure backward compatibility
274
+ if self.h.get("use_cuda_kernel", False):
275
+ from alias_free_activation.cuda.activation1d import (
276
+ Activation1d as CudaActivation1d,
277
+ )
278
+
279
+ Activation1d = CudaActivation1d
280
+ else:
281
+ Activation1d = TorchActivation1d
282
+
283
+ self.num_kernels = len(h.resblock_kernel_sizes)
284
+ self.num_upsamples = len(h.upsample_rates)
285
+
286
+ # Pre-conv
287
+ self.conv_pre = weight_norm(
288
+ Conv1d(h.num_mels, h.upsample_initial_channel, 7, 1, padding=3)
289
+ )
290
+
291
+ # Define which AMPBlock to use. BigVGAN uses AMPBlock1 as default
292
+ if h.resblock == "1":
293
+ resblock_class = AMPBlock1
294
+ elif h.resblock == "2":
295
+ resblock_class = AMPBlock2
296
+ else:
297
+ raise ValueError(
298
+ f"Incorrect resblock class specified in hyperparameters. Got {h.resblock}"
299
+ )
300
+
301
+ # Transposed conv-based upsamplers. does not apply anti-aliasing
302
+ self.ups = nn.ModuleList()
303
+ for i, (u, k) in enumerate(zip(h.upsample_rates, h.upsample_kernel_sizes)):
304
+ self.ups.append(
305
+ nn.ModuleList(
306
+ [
307
+ weight_norm(
308
+ ConvTranspose1d(
309
+ h.upsample_initial_channel // (2**i),
310
+ h.upsample_initial_channel // (2 ** (i + 1)),
311
+ k,
312
+ u,
313
+ padding=(k - u) // 2,
314
+ )
315
+ )
316
+ ]
317
+ )
318
+ )
319
+
320
+ # Residual blocks using anti-aliased multi-periodicity composition modules (AMP)
321
+ self.resblocks = nn.ModuleList()
322
+ for i in range(len(self.ups)):
323
+ ch = h.upsample_initial_channel // (2 ** (i + 1))
324
+ for j, (k, d) in enumerate(
325
+ zip(h.resblock_kernel_sizes, h.resblock_dilation_sizes)
326
+ ):
327
+ self.resblocks.append(
328
+ resblock_class(h, ch, k, d, activation=h.activation)
329
+ )
330
+
331
+ # Post-conv
332
+ activation_post = (
333
+ activations.Snake(ch, alpha_logscale=h.snake_logscale)
334
+ if h.activation == "snake"
335
+ else (
336
+ activations.SnakeBeta(ch, alpha_logscale=h.snake_logscale)
337
+ if h.activation == "snakebeta"
338
+ else None
339
+ )
340
+ )
341
+ if activation_post is None:
342
+ raise NotImplementedError(
343
+ "activation incorrectly specified. check the config file and look for 'activation'."
344
+ )
345
+
346
+ self.activation_post = Activation1d(activation=activation_post)
347
+
348
+ # Whether to use bias for the final conv_post. Default to True for backward compatibility
349
+ self.use_bias_at_final = h.get("use_bias_at_final", True)
350
+ self.conv_post = weight_norm(
351
+ Conv1d(ch, 1, 7, 1, padding=3, bias=self.use_bias_at_final)
352
+ )
353
+
354
+ # Weight initialization
355
+ for i in range(len(self.ups)):
356
+ self.ups[i].apply(init_weights)
357
+ self.conv_post.apply(init_weights)
358
+
359
+ # Final tanh activation. Defaults to True for backward compatibility
360
+ self.use_tanh_at_final = h.get("use_tanh_at_final", True)
361
+
362
+ def forward(self, x):
363
+ # Pre-conv
364
+ x = self.conv_pre(x)
365
+
366
+ for i in range(self.num_upsamples):
367
+ # Upsampling
368
+ for i_up in range(len(self.ups[i])):
369
+ x = self.ups[i][i_up](x)
370
+ # AMP blocks
371
+ xs = None
372
+ for j in range(self.num_kernels):
373
+ if xs is None:
374
+ xs = self.resblocks[i * self.num_kernels + j](x)
375
+ else:
376
+ xs += self.resblocks[i * self.num_kernels + j](x)
377
+ x = xs / self.num_kernels
378
+
379
+ # Post-conv
380
+ x = self.activation_post(x)
381
+ x = self.conv_post(x)
382
+ # Final tanh activation
383
+ if self.use_tanh_at_final:
384
+ x = torch.tanh(x)
385
+ else:
386
+ x = torch.clamp(x, min=-1.0, max=1.0) # Bound the output to [-1, 1]
387
+
388
+ return x
389
+
390
+ def remove_weight_norm(self):
391
+ try:
392
+ print("Removing weight norm...")
393
+ for l in self.ups:
394
+ for l_i in l:
395
+ remove_weight_norm(l_i)
396
+ for l in self.resblocks:
397
+ l.remove_weight_norm()
398
+ remove_weight_norm(self.conv_pre)
399
+ remove_weight_norm(self.conv_post)
400
+ except ValueError:
401
+ print("[INFO] Model already removed weight norm. Skipping!")
402
+ pass
403
+
404
+ # Additional methods for huggingface_hub support
405
+ def _save_pretrained(self, save_directory: Path) -> None:
406
+ """Save weights and config.json from a Pytorch model to a local directory."""
407
+
408
+ model_path = save_directory / "bigvgan_generator.pt"
409
+ torch.save({"generator": self.state_dict()}, model_path)
410
+
411
+ config_path = save_directory / "config.json"
412
+ with open(config_path, "w") as config_file:
413
+ json.dump(self.h, config_file, indent=4)
414
+
415
+ @classmethod
416
+ def _from_pretrained(
417
+ cls,
418
+ *,
419
+ model_id: str,
420
+ revision: str,
421
+ cache_dir: str,
422
+ force_download: bool,
423
+ proxies: Optional[Dict],
424
+ resume_download: bool,
425
+ local_files_only: bool,
426
+ token: Union[str, bool, None],
427
+ map_location: str = "cpu", # Additional argument
428
+ strict: bool = False, # Additional argument
429
+ use_cuda_kernel: bool = False,
430
+ **model_kwargs,
431
+ ):
432
+ """Load Pytorch pretrained weights and return the loaded model."""
433
+
434
+ # Download and load hyperparameters (h) used by BigVGAN
435
+ if os.path.isdir(model_id):
436
+ print("Loading config.json from local directory")
437
+ config_file = os.path.join(model_id, "config.json")
438
+ else:
439
+ config_file = hf_hub_download(
440
+ repo_id=model_id,
441
+ filename="config.json",
442
+ revision=revision,
443
+ cache_dir=cache_dir,
444
+ force_download=force_download,
445
+ proxies=proxies,
446
+ resume_download=resume_download,
447
+ token=token,
448
+ local_files_only=local_files_only,
449
+ )
450
+ h = load_hparams_from_json(config_file)
451
+
452
+ # instantiate BigVGAN using h
453
+ if use_cuda_kernel:
454
+ print(
455
+ f"[WARNING] You have specified use_cuda_kernel=True during BigVGAN.from_pretrained(). Only inference is supported (training is not implemented)!"
456
+ )
457
+ print(
458
+ f"[WARNING] You need nvcc and ninja installed in your system that matches your PyTorch build is using to build the kernel. If not, the model will fail to initialize or generate incorrect waveform!"
459
+ )
460
+ print(
461
+ f"[WARNING] For detail, see the official GitHub repository: https://github.com/NVIDIA/BigVGAN?tab=readme-ov-file#using-custom-cuda-kernel-for-synthesis"
462
+ )
463
+ model = cls(h, use_cuda_kernel=use_cuda_kernel)
464
+
465
+ # Download and load pretrained generator weight
466
+ if os.path.isdir(model_id):
467
+ print("Loading weights from local directory")
468
+ model_file = os.path.join(model_id, "bigvgan_generator.pt")
469
+ else:
470
+ print(f"Loading weights from {model_id}")
471
+ model_file = hf_hub_download(
472
+ repo_id=model_id,
473
+ filename="bigvgan_generator.pt",
474
+ revision=revision,
475
+ cache_dir=cache_dir,
476
+ force_download=force_download,
477
+ proxies=proxies,
478
+ resume_download=resume_download,
479
+ token=token,
480
+ local_files_only=local_files_only,
481
+ )
482
+
483
+ checkpoint_dict = torch.load(model_file, map_location=map_location)
484
+
485
+ try:
486
+ model.load_state_dict(checkpoint_dict["generator"])
487
+ except RuntimeError:
488
+ print(
489
+ f"[INFO] the pretrained checkpoint does not contain weight norm. Loading the checkpoint after removing weight norm!"
490
+ )
491
+ model.remove_weight_norm()
492
+ model.load_state_dict(checkpoint_dict["generator"])
493
+
494
+ return model
BigVGAN/configs/bigvgan_22khz_80band.json ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "resblock": "1",
3
+ "num_gpus": 0,
4
+ "batch_size": 32,
5
+ "learning_rate": 0.0001,
6
+ "adam_b1": 0.8,
7
+ "adam_b2": 0.99,
8
+ "lr_decay": 0.9999996,
9
+ "seed": 1234,
10
+
11
+ "upsample_rates": [4,4,2,2,2,2],
12
+ "upsample_kernel_sizes": [8,8,4,4,4,4],
13
+ "upsample_initial_channel": 1536,
14
+ "resblock_kernel_sizes": [3,7,11],
15
+ "resblock_dilation_sizes": [[1,3,5], [1,3,5], [1,3,5]],
16
+
17
+ "activation": "snakebeta",
18
+ "snake_logscale": true,
19
+
20
+ "resolutions": [[1024, 120, 600], [2048, 240, 1200], [512, 50, 240]],
21
+ "mpd_reshapes": [2, 3, 5, 7, 11],
22
+ "use_spectral_norm": false,
23
+ "discriminator_channel_mult": 1,
24
+
25
+ "segment_size": 8192,
26
+ "num_mels": 80,
27
+ "num_freq": 1025,
28
+ "n_fft": 1024,
29
+ "hop_size": 256,
30
+ "win_size": 1024,
31
+
32
+ "sampling_rate": 22050,
33
+
34
+ "fmin": 0,
35
+ "fmax": 8000,
36
+ "fmax_for_loss": null,
37
+
38
+ "num_workers": 4,
39
+
40
+ "dist_config": {
41
+ "dist_backend": "nccl",
42
+ "dist_url": "tcp://localhost:54321",
43
+ "world_size": 1
44
+ }
45
+ }
BigVGAN/configs/bigvgan_24khz_100band.json ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "resblock": "1",
3
+ "num_gpus": 0,
4
+ "batch_size": 32,
5
+ "learning_rate": 0.0001,
6
+ "adam_b1": 0.8,
7
+ "adam_b2": 0.99,
8
+ "lr_decay": 0.9999996,
9
+ "seed": 1234,
10
+
11
+ "upsample_rates": [4,4,2,2,2,2],
12
+ "upsample_kernel_sizes": [8,8,4,4,4,4],
13
+ "upsample_initial_channel": 1536,
14
+ "resblock_kernel_sizes": [3,7,11],
15
+ "resblock_dilation_sizes": [[1,3,5], [1,3,5], [1,3,5]],
16
+
17
+ "activation": "snakebeta",
18
+ "snake_logscale": true,
19
+
20
+ "resolutions": [[1024, 120, 600], [2048, 240, 1200], [512, 50, 240]],
21
+ "mpd_reshapes": [2, 3, 5, 7, 11],
22
+ "use_spectral_norm": false,
23
+ "discriminator_channel_mult": 1,
24
+
25
+ "segment_size": 8192,
26
+ "num_mels": 100,
27
+ "num_freq": 1025,
28
+ "n_fft": 1024,
29
+ "hop_size": 256,
30
+ "win_size": 1024,
31
+
32
+ "sampling_rate": 24000,
33
+
34
+ "fmin": 0,
35
+ "fmax": 12000,
36
+ "fmax_for_loss": null,
37
+
38
+ "num_workers": 4,
39
+
40
+ "dist_config": {
41
+ "dist_backend": "nccl",
42
+ "dist_url": "tcp://localhost:54321",
43
+ "world_size": 1
44
+ }
45
+ }
BigVGAN/configs/bigvgan_base_22khz_80band.json ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "resblock": "1",
3
+ "num_gpus": 0,
4
+ "batch_size": 32,
5
+ "learning_rate": 0.0001,
6
+ "adam_b1": 0.8,
7
+ "adam_b2": 0.99,
8
+ "lr_decay": 0.9999996,
9
+ "seed": 1234,
10
+
11
+ "upsample_rates": [8,8,2,2],
12
+ "upsample_kernel_sizes": [16,16,4,4],
13
+ "upsample_initial_channel": 512,
14
+ "resblock_kernel_sizes": [3,7,11],
15
+ "resblock_dilation_sizes": [[1,3,5], [1,3,5], [1,3,5]],
16
+
17
+ "activation": "snakebeta",
18
+ "snake_logscale": true,
19
+
20
+ "resolutions": [[1024, 120, 600], [2048, 240, 1200], [512, 50, 240]],
21
+ "mpd_reshapes": [2, 3, 5, 7, 11],
22
+ "use_spectral_norm": false,
23
+ "discriminator_channel_mult": 1,
24
+
25
+ "segment_size": 8192,
26
+ "num_mels": 80,
27
+ "num_freq": 1025,
28
+ "n_fft": 1024,
29
+ "hop_size": 256,
30
+ "win_size": 1024,
31
+
32
+ "sampling_rate": 22050,
33
+
34
+ "fmin": 0,
35
+ "fmax": 8000,
36
+ "fmax_for_loss": null,
37
+
38
+ "num_workers": 4,
39
+
40
+ "dist_config": {
41
+ "dist_backend": "nccl",
42
+ "dist_url": "tcp://localhost:54321",
43
+ "world_size": 1
44
+ }
45
+ }
BigVGAN/configs/bigvgan_base_24khz_100band.json ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "resblock": "1",
3
+ "num_gpus": 0,
4
+ "batch_size": 32,
5
+ "learning_rate": 0.0001,
6
+ "adam_b1": 0.8,
7
+ "adam_b2": 0.99,
8
+ "lr_decay": 0.9999996,
9
+ "seed": 1234,
10
+
11
+ "upsample_rates": [8,8,2,2],
12
+ "upsample_kernel_sizes": [16,16,4,4],
13
+ "upsample_initial_channel": 512,
14
+ "resblock_kernel_sizes": [3,7,11],
15
+ "resblock_dilation_sizes": [[1,3,5], [1,3,5], [1,3,5]],
16
+
17
+ "activation": "snakebeta",
18
+ "snake_logscale": true,
19
+
20
+ "resolutions": [[1024, 120, 600], [2048, 240, 1200], [512, 50, 240]],
21
+ "mpd_reshapes": [2, 3, 5, 7, 11],
22
+ "use_spectral_norm": false,
23
+ "discriminator_channel_mult": 1,
24
+
25
+ "segment_size": 8192,
26
+ "num_mels": 100,
27
+ "num_freq": 1025,
28
+ "n_fft": 1024,
29
+ "hop_size": 256,
30
+ "win_size": 1024,
31
+
32
+ "sampling_rate": 24000,
33
+
34
+ "fmin": 0,
35
+ "fmax": 12000,
36
+ "fmax_for_loss": null,
37
+
38
+ "num_workers": 4,
39
+
40
+ "dist_config": {
41
+ "dist_backend": "nccl",
42
+ "dist_url": "tcp://localhost:54321",
43
+ "world_size": 1
44
+ }
45
+ }
BigVGAN/configs/bigvgan_v2_22khz_80band_256x.json ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "resblock": "1",
3
+ "num_gpus": 0,
4
+ "batch_size": 4,
5
+ "learning_rate": 0.0001,
6
+ "adam_b1": 0.8,
7
+ "adam_b2": 0.99,
8
+ "lr_decay": 0.9999996,
9
+ "seed": 1234,
10
+
11
+ "upsample_rates": [4,4,2,2,2,2],
12
+ "upsample_kernel_sizes": [8,8,4,4,4,4],
13
+ "upsample_initial_channel": 1536,
14
+ "resblock_kernel_sizes": [3,7,11],
15
+ "resblock_dilation_sizes": [[1,3,5], [1,3,5], [1,3,5]],
16
+
17
+ "use_tanh_at_final": false,
18
+ "use_bias_at_final": false,
19
+
20
+ "activation": "snakebeta",
21
+ "snake_logscale": true,
22
+
23
+ "use_cqtd_instead_of_mrd": true,
24
+ "cqtd_filters": 128,
25
+ "cqtd_max_filters": 1024,
26
+ "cqtd_filters_scale": 1,
27
+ "cqtd_dilations": [1, 2, 4],
28
+ "cqtd_hop_lengths": [512, 256, 256],
29
+ "cqtd_n_octaves": [9, 9, 9],
30
+ "cqtd_bins_per_octaves": [24, 36, 48],
31
+
32
+ "mpd_reshapes": [2, 3, 5, 7, 11],
33
+ "use_spectral_norm": false,
34
+ "discriminator_channel_mult": 1,
35
+
36
+ "use_multiscale_melloss": true,
37
+ "lambda_melloss": 15,
38
+
39
+ "clip_grad_norm": 500,
40
+
41
+ "segment_size": 65536,
42
+ "num_mels": 80,
43
+ "num_freq": 1025,
44
+ "n_fft": 1024,
45
+ "hop_size": 256,
46
+ "win_size": 1024,
47
+
48
+ "sampling_rate": 22050,
49
+
50
+ "fmin": 0,
51
+ "fmax": null,
52
+ "fmax_for_loss": null,
53
+
54
+ "num_workers": 4,
55
+
56
+ "dist_config": {
57
+ "dist_backend": "nccl",
58
+ "dist_url": "tcp://localhost:54321",
59
+ "world_size": 1
60
+ }
61
+ }
BigVGAN/configs/bigvgan_v2_22khz_80band_fmax8k_256x.json ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "resblock": "1",
3
+ "num_gpus": 0,
4
+ "batch_size": 4,
5
+ "learning_rate": 0.0001,
6
+ "adam_b1": 0.8,
7
+ "adam_b2": 0.99,
8
+ "lr_decay": 0.9999996,
9
+ "seed": 1234,
10
+
11
+ "upsample_rates": [4,4,2,2,2,2],
12
+ "upsample_kernel_sizes": [8,8,4,4,4,4],
13
+ "upsample_initial_channel": 1536,
14
+ "resblock_kernel_sizes": [3,7,11],
15
+ "resblock_dilation_sizes": [[1,3,5], [1,3,5], [1,3,5]],
16
+
17
+ "use_tanh_at_final": false,
18
+ "use_bias_at_final": false,
19
+
20
+ "activation": "snakebeta",
21
+ "snake_logscale": true,
22
+
23
+ "use_cqtd_instead_of_mrd": true,
24
+ "cqtd_filters": 128,
25
+ "cqtd_max_filters": 1024,
26
+ "cqtd_filters_scale": 1,
27
+ "cqtd_dilations": [1, 2, 4],
28
+ "cqtd_hop_lengths": [512, 256, 256],
29
+ "cqtd_n_octaves": [9, 9, 9],
30
+ "cqtd_bins_per_octaves": [24, 36, 48],
31
+
32
+ "mpd_reshapes": [2, 3, 5, 7, 11],
33
+ "use_spectral_norm": false,
34
+ "discriminator_channel_mult": 1,
35
+
36
+ "use_multiscale_melloss": true,
37
+ "lambda_melloss": 15,
38
+
39
+ "clip_grad_norm": 500,
40
+
41
+ "segment_size": 65536,
42
+ "num_mels": 80,
43
+ "num_freq": 1025,
44
+ "n_fft": 1024,
45
+ "hop_size": 256,
46
+ "win_size": 1024,
47
+
48
+ "sampling_rate": 22050,
49
+
50
+ "fmin": 0,
51
+ "fmax": 8000,
52
+ "fmax_for_loss": null,
53
+
54
+ "num_workers": 4,
55
+
56
+ "dist_config": {
57
+ "dist_backend": "nccl",
58
+ "dist_url": "tcp://localhost:54321",
59
+ "world_size": 1
60
+ }
61
+ }
BigVGAN/configs/bigvgan_v2_24khz_100band_256x.json ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "resblock": "1",
3
+ "num_gpus": 0,
4
+ "batch_size": 4,
5
+ "learning_rate": 0.0001,
6
+ "adam_b1": 0.8,
7
+ "adam_b2": 0.99,
8
+ "lr_decay": 0.9999996,
9
+ "seed": 1234,
10
+
11
+ "upsample_rates": [4,4,2,2,2,2],
12
+ "upsample_kernel_sizes": [8,8,4,4,4,4],
13
+ "upsample_initial_channel": 1536,
14
+ "resblock_kernel_sizes": [3,7,11],
15
+ "resblock_dilation_sizes": [[1,3,5], [1,3,5], [1,3,5]],
16
+
17
+ "use_tanh_at_final": false,
18
+ "use_bias_at_final": false,
19
+
20
+ "activation": "snakebeta",
21
+ "snake_logscale": true,
22
+
23
+ "use_cqtd_instead_of_mrd": true,
24
+ "cqtd_filters": 128,
25
+ "cqtd_max_filters": 1024,
26
+ "cqtd_filters_scale": 1,
27
+ "cqtd_dilations": [1, 2, 4],
28
+ "cqtd_hop_lengths": [512, 256, 256],
29
+ "cqtd_n_octaves": [9, 9, 9],
30
+ "cqtd_bins_per_octaves": [24, 36, 48],
31
+
32
+ "mpd_reshapes": [2, 3, 5, 7, 11],
33
+ "use_spectral_norm": false,
34
+ "discriminator_channel_mult": 1,
35
+
36
+ "use_multiscale_melloss": true,
37
+ "lambda_melloss": 15,
38
+
39
+ "clip_grad_norm": 500,
40
+
41
+ "segment_size": 65536,
42
+ "num_mels": 100,
43
+ "num_freq": 1025,
44
+ "n_fft": 1024,
45
+ "hop_size": 256,
46
+ "win_size": 1024,
47
+
48
+ "sampling_rate": 24000,
49
+
50
+ "fmin": 0,
51
+ "fmax": null,
52
+ "fmax_for_loss": null,
53
+
54
+ "num_workers": 4,
55
+
56
+ "dist_config": {
57
+ "dist_backend": "nccl",
58
+ "dist_url": "tcp://localhost:54321",
59
+ "world_size": 1
60
+ }
61
+ }
BigVGAN/configs/bigvgan_v2_44khz_128band_256x.json ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "resblock": "1",
3
+ "num_gpus": 0,
4
+ "batch_size": 4,
5
+ "learning_rate": 0.0001,
6
+ "adam_b1": 0.8,
7
+ "adam_b2": 0.99,
8
+ "lr_decay": 0.9999996,
9
+ "seed": 1234,
10
+
11
+ "upsample_rates": [4,4,2,2,2,2],
12
+ "upsample_kernel_sizes": [8,8,4,4,4,4],
13
+ "upsample_initial_channel": 1536,
14
+ "resblock_kernel_sizes": [3,7,11],
15
+ "resblock_dilation_sizes": [[1,3,5], [1,3,5], [1,3,5]],
16
+
17
+ "use_tanh_at_final": false,
18
+ "use_bias_at_final": false,
19
+
20
+ "activation": "snakebeta",
21
+ "snake_logscale": true,
22
+
23
+ "use_cqtd_instead_of_mrd": true,
24
+ "cqtd_filters": 128,
25
+ "cqtd_max_filters": 1024,
26
+ "cqtd_filters_scale": 1,
27
+ "cqtd_dilations": [1, 2, 4],
28
+ "cqtd_hop_lengths": [512, 256, 256],
29
+ "cqtd_n_octaves": [9, 9, 9],
30
+ "cqtd_bins_per_octaves": [24, 36, 48],
31
+
32
+ "mpd_reshapes": [2, 3, 5, 7, 11],
33
+ "use_spectral_norm": false,
34
+ "discriminator_channel_mult": 1,
35
+
36
+ "use_multiscale_melloss": true,
37
+ "lambda_melloss": 15,
38
+
39
+ "clip_grad_norm": 500,
40
+
41
+ "segment_size": 65536,
42
+ "num_mels": 128,
43
+ "num_freq": 1025,
44
+ "n_fft": 1024,
45
+ "hop_size": 256,
46
+ "win_size": 1024,
47
+
48
+ "sampling_rate": 44100,
49
+
50
+ "fmin": 0,
51
+ "fmax": null,
52
+ "fmax_for_loss": null,
53
+
54
+ "num_workers": 4,
55
+
56
+ "dist_config": {
57
+ "dist_backend": "nccl",
58
+ "dist_url": "tcp://localhost:54321",
59
+ "world_size": 1
60
+ }
61
+ }
BigVGAN/configs/bigvgan_v2_44khz_128band_512x.json ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "resblock": "1",
3
+ "num_gpus": 0,
4
+ "batch_size": 4,
5
+ "learning_rate": 0.0001,
6
+ "adam_b1": 0.8,
7
+ "adam_b2": 0.99,
8
+ "lr_decay": 0.9999996,
9
+ "seed": 1234,
10
+
11
+ "upsample_rates": [8,4,2,2,2,2],
12
+ "upsample_kernel_sizes": [16,8,4,4,4,4],
13
+ "upsample_initial_channel": 1536,
14
+ "resblock_kernel_sizes": [3,7,11],
15
+ "resblock_dilation_sizes": [[1,3,5], [1,3,5], [1,3,5]],
16
+
17
+ "use_tanh_at_final": false,
18
+ "use_bias_at_final": false,
19
+
20
+ "activation": "snakebeta",
21
+ "snake_logscale": true,
22
+
23
+ "use_cqtd_instead_of_mrd": true,
24
+ "cqtd_filters": 128,
25
+ "cqtd_max_filters": 1024,
26
+ "cqtd_filters_scale": 1,
27
+ "cqtd_dilations": [1, 2, 4],
28
+ "cqtd_hop_lengths": [512, 256, 256],
29
+ "cqtd_n_octaves": [9, 9, 9],
30
+ "cqtd_bins_per_octaves": [24, 36, 48],
31
+
32
+ "mpd_reshapes": [2, 3, 5, 7, 11],
33
+ "use_spectral_norm": false,
34
+ "discriminator_channel_mult": 1,
35
+
36
+ "use_multiscale_melloss": true,
37
+ "lambda_melloss": 15,
38
+
39
+ "clip_grad_norm": 500,
40
+
41
+ "segment_size": 65536,
42
+ "num_mels": 128,
43
+ "num_freq": 2049,
44
+ "n_fft": 2048,
45
+ "hop_size": 512,
46
+ "win_size": 2048,
47
+
48
+ "sampling_rate": 44100,
49
+
50
+ "fmin": 0,
51
+ "fmax": null,
52
+ "fmax_for_loss": null,
53
+
54
+ "num_workers": 4,
55
+
56
+ "dist_config": {
57
+ "dist_backend": "nccl",
58
+ "dist_url": "tcp://localhost:54321",
59
+ "world_size": 1
60
+ }
61
+ }
BigVGAN/demo/__init__.py ADDED
File without changes
BigVGAN/demo/app.py ADDED
@@ -0,0 +1,441 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) 2024 NVIDIA CORPORATION.
2
+ # Licensed under the MIT license.
3
+
4
+ import spaces
5
+ import gradio as gr
6
+ import pandas as pd
7
+ import torch
8
+ import os
9
+ import sys
10
+
11
+ # to import modules from parent_dir
12
+ parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
13
+ sys.path.append(parent_dir)
14
+
15
+ from meldataset import get_mel_spectrogram, MAX_WAV_VALUE
16
+ from bigvgan import BigVGAN
17
+ import librosa
18
+ import numpy as np
19
+ from utils import plot_spectrogram
20
+ import PIL
21
+
22
+ if torch.cuda.is_available():
23
+ device = torch.device("cuda")
24
+ torch.backends.cudnn.benchmark = False
25
+ print(f"using GPU")
26
+ else:
27
+ device = torch.device("cpu")
28
+ print(f"using CPU")
29
+
30
+
31
+ def inference_gradio(input, model_choice): # Input is audio waveform in [T, channel]
32
+ sr, audio = input # Unpack input to sampling rate and audio itself
33
+ audio = np.transpose(audio) # Transpose to [channel, T] for librosa
34
+ audio = audio / MAX_WAV_VALUE # Convert int16 to float range used by BigVGAN
35
+
36
+ model = dict_model[model_choice]
37
+
38
+ if sr != model.h.sampling_rate: # Convert audio to model's sampling rate
39
+ audio = librosa.resample(audio, orig_sr=sr, target_sr=model.h.sampling_rate)
40
+ if len(audio.shape) == 2: # Stereo
41
+ audio = librosa.to_mono(audio) # Convert to mono if stereo
42
+ audio = librosa.util.normalize(audio) * 0.95
43
+
44
+ output, spec_gen = inference_model(
45
+ audio, model
46
+ ) # Output is generated audio in ndarray, int16
47
+
48
+ spec_plot_gen = plot_spectrogram(spec_gen)
49
+
50
+ output_audio = (model.h.sampling_rate, output) # Tuple for gr.Audio output
51
+
52
+ buffer = spec_plot_gen.canvas.buffer_rgba()
53
+ output_image = PIL.Image.frombuffer(
54
+ "RGBA", spec_plot_gen.canvas.get_width_height(), buffer, "raw", "RGBA", 0, 1
55
+ )
56
+
57
+ return output_audio, output_image
58
+
59
+
60
+ @spaces.GPU(duration=120)
61
+ def inference_model(audio_input, model):
62
+ # Load model to device
63
+ model.to(device)
64
+
65
+ with torch.inference_mode():
66
+ wav = torch.FloatTensor(audio_input)
67
+ # Compute mel spectrogram from the ground truth audio
68
+ spec_gt = get_mel_spectrogram(wav.unsqueeze(0), model.h).to(device)
69
+
70
+ y_g_hat = model(spec_gt)
71
+
72
+ audio_gen = y_g_hat.squeeze().cpu()
73
+ spec_gen = get_mel_spectrogram(audio_gen.unsqueeze(0), model.h)
74
+ audio_gen = audio_gen.numpy() # [T], float [-1, 1]
75
+ audio_gen = (audio_gen * MAX_WAV_VALUE).astype("int16") # [T], int16
76
+ spec_gen = spec_gen.squeeze().numpy() # [C, T_frame]
77
+
78
+ # Unload to CPU
79
+ model.to("cpu")
80
+ # Delete GPU tensor
81
+ del spec_gt, y_g_hat
82
+
83
+ return audio_gen, spec_gen
84
+
85
+
86
+ css = """
87
+ a {
88
+ color: inherit;
89
+ text-decoration: underline;
90
+ }
91
+ .gradio-container {
92
+ font-family: 'IBM Plex Sans', sans-serif;
93
+ }
94
+ .gr-button {
95
+ color: white;
96
+ border-color: #000000;
97
+ background: #000000;
98
+ }
99
+ input[type='range'] {
100
+ accent-color: #000000;
101
+ }
102
+ .dark input[type='range'] {
103
+ accent-color: #dfdfdf;
104
+ }
105
+ .container {
106
+ max-width: 730px;
107
+ margin: auto;
108
+ padding-top: 1.5rem;
109
+ }
110
+ #gallery {
111
+ min-height: 22rem;
112
+ margin-bottom: 15px;
113
+ margin-left: auto;
114
+ margin-right: auto;
115
+ border-bottom-right-radius: .5rem !important;
116
+ border-bottom-left-radius: .5rem !important;
117
+ }
118
+ #gallery>div>.h-full {
119
+ min-height: 20rem;
120
+ }
121
+ .details:hover {
122
+ text-decoration: underline;
123
+ }
124
+ .gr-button {
125
+ white-space: nowrap;
126
+ }
127
+ .gr-button:focus {
128
+ border-color: rgb(147 197 253 / var(--tw-border-opacity));
129
+ outline: none;
130
+ box-shadow: var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow, 0 0 #0000);
131
+ --tw-border-opacity: 1;
132
+ --tw-ring-offset-shadow: var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color);
133
+ --tw-ring-shadow: var(--tw-ring-inset) 0 0 0 calc(3px var(--tw-ring-offset-width)) var(--tw-ring-color);
134
+ --tw-ring-color: rgb(191 219 254 / var(--tw-ring-opacity));
135
+ --tw-ring-opacity: .5;
136
+ }
137
+ #advanced-btn {
138
+ font-size: .7rem !important;
139
+ line-height: 19px;
140
+ margin-top: 12px;
141
+ margin-bottom: 12px;
142
+ padding: 2px 8px;
143
+ border-radius: 14px !important;
144
+ }
145
+ #advanced-options {
146
+ margin-bottom: 20px;
147
+ }
148
+ .footer {
149
+ margin-bottom: 45px;
150
+ margin-top: 35px;
151
+ text-align: center;
152
+ border-bottom: 1px solid #e5e5e5;
153
+ }
154
+ .footer>p {
155
+ font-size: .8rem;
156
+ display: inline-block;
157
+ padding: 0 10px;
158
+ transform: translateY(10px);
159
+ background: white;
160
+ }
161
+ .dark .footer {
162
+ border-color: #303030;
163
+ }
164
+ .dark .footer>p {
165
+ background: #0b0f19;
166
+ }
167
+ .acknowledgments h4{
168
+ margin: 1.25em 0 .25em 0;
169
+ font-weight: bold;
170
+ font-size: 115%;
171
+ }
172
+ #container-advanced-btns{
173
+ display: flex;
174
+ flex-wrap: wrap;
175
+ justify-content: space-between;
176
+ align-items: center;
177
+ }
178
+ .animate-spin {
179
+ animation: spin 1s linear infinite;
180
+ }
181
+ @keyframes spin {
182
+ from {
183
+ transform: rotate(0deg);
184
+ }
185
+ to {
186
+ transform: rotate(360deg);
187
+ }
188
+ }
189
+ #share-btn-container {
190
+ display: flex; padding-left: 0.5rem !important; padding-right: 0.5rem !important; background-color: #000000; justify-content: center; align-items: center; border-radius: 9999px !important; width: 13rem;
191
+ margin-top: 10px;
192
+ margin-left: auto;
193
+ }
194
+ #share-btn {
195
+ all: initial; color: #ffffff;font-weight: 600; cursor:pointer; font-family: 'IBM Plex Sans', sans-serif; margin-left: 0.5rem !important; padding-top: 0.25rem !important; padding-bottom: 0.25rem !important;right:0;
196
+ }
197
+ #share-btn * {
198
+ all: unset;
199
+ }
200
+ #share-btn-container div:nth-child(-n+2){
201
+ width: auto !important;
202
+ min-height: 0px !important;
203
+ }
204
+ #share-btn-container .wrap {
205
+ display: none !important;
206
+ }
207
+ .gr-form{
208
+ flex: 1 1 50%; border-top-right-radius: 0; border-bottom-right-radius: 0;
209
+ }
210
+ #prompt-container{
211
+ gap: 0;
212
+ }
213
+ #generated_id{
214
+ min-height: 700px
215
+ }
216
+ #setting_id{
217
+ margin-bottom: 12px;
218
+ text-align: center;
219
+ font-weight: 900;
220
+ }
221
+ """
222
+
223
+ # Script for loading the models
224
+
225
+ LIST_MODEL_ID = [
226
+ "bigvgan_24khz_100band",
227
+ "bigvgan_base_24khz_100band",
228
+ "bigvgan_22khz_80band",
229
+ "bigvgan_base_22khz_80band",
230
+ "bigvgan_v2_22khz_80band_256x",
231
+ "bigvgan_v2_22khz_80band_fmax8k_256x",
232
+ "bigvgan_v2_24khz_100band_256x",
233
+ "bigvgan_v2_44khz_128band_256x",
234
+ "bigvgan_v2_44khz_128band_512x",
235
+ ]
236
+
237
+ dict_model = {}
238
+ dict_config = {}
239
+
240
+ for model_name in LIST_MODEL_ID:
241
+
242
+ generator = BigVGAN.from_pretrained("nvidia/" + model_name)
243
+ generator.remove_weight_norm()
244
+ generator.eval()
245
+
246
+ dict_model[model_name] = generator
247
+ dict_config[model_name] = generator.h
248
+
249
+ # Script for Gradio UI
250
+
251
+ iface = gr.Blocks(css=css, title="BigVGAN - Demo")
252
+
253
+ with iface:
254
+ gr.HTML(
255
+ """
256
+ <div style="text-align: center; max-width: 900px; margin: 0 auto;">
257
+ <div
258
+ style="
259
+ display: inline-flex;
260
+ align-items: center;
261
+ gap: 0.8rem;
262
+ font-size: 1.5rem;
263
+ "
264
+ >
265
+ <h1 style="font-weight: 700; margin-bottom: 7px; line-height: normal;">
266
+ BigVGAN: A Universal Neural Vocoder with Large-Scale Training
267
+ </h1>
268
+ </div>
269
+ <p style="margin-bottom: 10px; font-size: 125%">
270
+ <a href="https://arxiv.org/abs/2206.04658">[Paper]</a> <a href="https://github.com/NVIDIA/BigVGAN">[Code]</a> <a href="https://bigvgan-demo.github.io/">[Demo]</a> <a href="https://research.nvidia.com/labs/adlr/projects/bigvgan/">[Project page]</a>
271
+ </p>
272
+ </div>
273
+ """
274
+ )
275
+ gr.HTML(
276
+ """
277
+ <div>
278
+ <h3>News</h3>
279
+ <p>[Jul 2024] We release BigVGAN-v2 along with pretrained checkpoints. Below are the highlights:</p>
280
+ <ul>
281
+ <li>Custom CUDA kernel for inference: we provide a fused upsampling + activation kernel written in CUDA for accelerated inference speed. Our test shows 1.5 - 3x faster speed on a single A100 GPU.</li>
282
+ <li>Improved discriminator and loss: BigVGAN-v2 is trained using a <a href="https://arxiv.org/abs/2311.14957" target="_blank">multi-scale sub-band CQT discriminator</a> and a <a href="https://arxiv.org/abs/2306.06546" target="_blank">multi-scale mel spectrogram loss</a>.</li>
283
+ <li>Larger training data: BigVGAN-v2 is trained using datasets containing diverse audio types, including speech in multiple languages, environmental sounds, and instruments.</li>
284
+ <li>We provide pretrained checkpoints of BigVGAN-v2 using diverse audio configurations, supporting up to 44 kHz sampling rate and 512x upsampling ratio. See the table below for the link.</li>
285
+ </ul>
286
+ </div>
287
+ """
288
+ )
289
+ gr.HTML(
290
+ """
291
+ <div>
292
+ <h3>Model Overview</h3>
293
+ BigVGAN is a universal neural vocoder model that generates audio waveforms using mel spectrogram as inputs.
294
+ <center><img src="https://user-images.githubusercontent.com/15963413/218609148-881e39df-33af-4af9-ab95-1427c4ebf062.png" width="800" style="margin-top: 20px; border-radius: 15px;"></center>
295
+ </div>
296
+ """
297
+ )
298
+ with gr.Accordion("Input"):
299
+
300
+ model_choice = gr.Dropdown(
301
+ label="Select the model to use",
302
+ info="The default model is bigvgan_v2_24khz_100band_256x",
303
+ value="bigvgan_v2_24khz_100band_256x",
304
+ choices=[m for m in LIST_MODEL_ID],
305
+ interactive=True,
306
+ )
307
+
308
+ audio_input = gr.Audio(
309
+ label="Input Audio", elem_id="input-audio", interactive=True
310
+ )
311
+
312
+ button = gr.Button("Submit")
313
+
314
+ with gr.Accordion("Output"):
315
+ with gr.Column():
316
+ output_audio = gr.Audio(label="Output Audio", elem_id="output-audio")
317
+ output_image = gr.Image(
318
+ label="Output Mel Spectrogram", elem_id="output-image-gen"
319
+ )
320
+
321
+ button.click(
322
+ inference_gradio,
323
+ inputs=[audio_input, model_choice],
324
+ outputs=[output_audio, output_image],
325
+ concurrency_limit=10,
326
+ )
327
+
328
+ gr.Examples(
329
+ [
330
+ [
331
+ os.path.join(os.path.dirname(__file__), "examples/jensen_24k.wav"),
332
+ "bigvgan_v2_24khz_100band_256x",
333
+ ],
334
+ [
335
+ os.path.join(os.path.dirname(__file__), "examples/libritts_24k.wav"),
336
+ "bigvgan_v2_24khz_100band_256x",
337
+ ],
338
+ [
339
+ os.path.join(os.path.dirname(__file__), "examples/queen_24k.wav"),
340
+ "bigvgan_v2_24khz_100band_256x",
341
+ ],
342
+ [
343
+ os.path.join(os.path.dirname(__file__), "examples/dance_24k.wav"),
344
+ "bigvgan_v2_24khz_100band_256x",
345
+ ],
346
+ [
347
+ os.path.join(os.path.dirname(__file__), "examples/megalovania_24k.wav"),
348
+ "bigvgan_v2_24khz_100band_256x",
349
+ ],
350
+ [
351
+ os.path.join(os.path.dirname(__file__), "examples/hifitts_44k.wav"),
352
+ "bigvgan_v2_44khz_128band_256x",
353
+ ],
354
+ [
355
+ os.path.join(os.path.dirname(__file__), "examples/musdbhq_44k.wav"),
356
+ "bigvgan_v2_44khz_128band_256x",
357
+ ],
358
+ [
359
+ os.path.join(os.path.dirname(__file__), "examples/musiccaps1_44k.wav"),
360
+ "bigvgan_v2_44khz_128band_256x",
361
+ ],
362
+ [
363
+ os.path.join(os.path.dirname(__file__), "examples/musiccaps2_44k.wav"),
364
+ "bigvgan_v2_44khz_128band_256x",
365
+ ],
366
+ ],
367
+ fn=inference_gradio,
368
+ inputs=[audio_input, model_choice],
369
+ outputs=[output_audio, output_image],
370
+ )
371
+
372
+ # Define the data for the table
373
+ data = {
374
+ "Model Name": [
375
+ "bigvgan_v2_44khz_128band_512x",
376
+ "bigvgan_v2_44khz_128band_256x",
377
+ "bigvgan_v2_24khz_100band_256x",
378
+ "bigvgan_v2_22khz_80band_256x",
379
+ "bigvgan_v2_22khz_80band_fmax8k_256x",
380
+ "bigvgan_24khz_100band",
381
+ "bigvgan_base_24khz_100band",
382
+ "bigvgan_22khz_80band",
383
+ "bigvgan_base_22khz_80band",
384
+ ],
385
+ "Sampling Rate": [
386
+ "44 kHz",
387
+ "44 kHz",
388
+ "24 kHz",
389
+ "22 kHz",
390
+ "22 kHz",
391
+ "24 kHz",
392
+ "24 kHz",
393
+ "22 kHz",
394
+ "22 kHz",
395
+ ],
396
+ "Mel band": [128, 128, 100, 80, 80, 100, 100, 80, 80],
397
+ "fmax": [22050, 22050, 12000, 11025, 8000, 12000, 12000, 8000, 8000],
398
+ "Upsampling Ratio": [512, 256, 256, 256, 256, 256, 256, 256, 256],
399
+ "Parameters": [
400
+ "122M",
401
+ "112M",
402
+ "112M",
403
+ "112M",
404
+ "112M",
405
+ "112M",
406
+ "14M",
407
+ "112M",
408
+ "14M",
409
+ ],
410
+ "Dataset": [
411
+ "Large-scale Compilation",
412
+ "Large-scale Compilation",
413
+ "Large-scale Compilation",
414
+ "Large-scale Compilation",
415
+ "Large-scale Compilation",
416
+ "LibriTTS",
417
+ "LibriTTS",
418
+ "LibriTTS + VCTK + LJSpeech",
419
+ "LibriTTS + VCTK + LJSpeech",
420
+ ],
421
+ "Fine-Tuned": ["No", "No", "No", "No", "No", "No", "No", "No", "No"],
422
+ }
423
+
424
+ base_url = "https://huggingface.co/nvidia/"
425
+
426
+ df = pd.DataFrame(data)
427
+ df["Model Name"] = df["Model Name"].apply(
428
+ lambda x: f'<a href="{base_url}{x}">{x}</a>'
429
+ )
430
+
431
+ html_table = gr.HTML(
432
+ f"""
433
+ <div style="text-align: center;">
434
+ {df.to_html(index=False, escape=False, classes='border="1" cellspacing="0" cellpadding="5" style="margin-left: auto; margin-right: auto;')}
435
+ <p><b>NOTE: The v1 models are trained using speech audio datasets ONLY! (24kHz models: LibriTTS, 22kHz models: LibriTTS + VCTK + LJSpeech).</b></p>
436
+ </div>
437
+ """
438
+ )
439
+
440
+ iface.queue()
441
+ iface.launch()
BigVGAN/demo/examples/dance_24k.wav ADDED
Binary file (480 kB). View file
 
BigVGAN/demo/examples/hifitts_44k.wav ADDED
Binary file (570 kB). View file
 
BigVGAN/demo/examples/jensen_24k.wav ADDED
Binary file (480 kB). View file
 
BigVGAN/demo/examples/libritts_24k.wav ADDED
Binary file (282 kB). View file
 
BigVGAN/demo/examples/megalovania_24k.wav ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7970ac637e680876d48ad84e9185db1b21da01929fe46d855e8794bd83d14c20
3
+ size 1548328
BigVGAN/demo/examples/musdbhq_44k.wav ADDED
Binary file (918 kB). View file
 
BigVGAN/demo/examples/musiccaps1_44k.wav ADDED
Binary file (887 kB). View file
 
BigVGAN/demo/examples/musiccaps2_44k.wav ADDED
Binary file (887 kB). View file
 
BigVGAN/demo/examples/queen_24k.wav ADDED
Binary file (480 kB). View file
 
BigVGAN/demo/requirements.txt ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ torch
2
+ numpy
3
+ librosa>=0.8.1
4
+ scipy
5
+ tensorboard
6
+ soundfile
7
+ matplotlib
8
+ pesq
9
+ auraloss
10
+ tqdm
11
+ nnAudio
12
+ ninja
13
+ huggingface_hub>=0.23.4
14
+ gradio>=4.38.1
15
+ spaces>=0.28.3
BigVGAN/discriminators.py ADDED
@@ -0,0 +1,651 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) 2024 NVIDIA CORPORATION.
2
+ # Licensed under the MIT license.
3
+
4
+ # Adapted from https://github.com/jik876/hifi-gan under the MIT license.
5
+ # LICENSE is in incl_licenses directory.
6
+
7
+
8
+ import torch
9
+ import torch.nn.functional as F
10
+ import torch.nn as nn
11
+ from torch.nn import Conv2d
12
+ from torch.nn.utils import weight_norm, spectral_norm
13
+ from torchaudio.transforms import Spectrogram, Resample
14
+
15
+ from env import AttrDict
16
+ from utils import get_padding
17
+ import typing
18
+ from typing import Optional, List, Union, Dict, Tuple
19
+
20
+
21
+ class DiscriminatorP(torch.nn.Module):
22
+ def __init__(
23
+ self,
24
+ h: AttrDict,
25
+ period: List[int],
26
+ kernel_size: int = 5,
27
+ stride: int = 3,
28
+ use_spectral_norm: bool = False,
29
+ ):
30
+ super().__init__()
31
+ self.period = period
32
+ self.d_mult = h.discriminator_channel_mult
33
+ norm_f = weight_norm if not use_spectral_norm else spectral_norm
34
+
35
+ self.convs = nn.ModuleList(
36
+ [
37
+ norm_f(
38
+ Conv2d(
39
+ 1,
40
+ int(32 * self.d_mult),
41
+ (kernel_size, 1),
42
+ (stride, 1),
43
+ padding=(get_padding(5, 1), 0),
44
+ )
45
+ ),
46
+ norm_f(
47
+ Conv2d(
48
+ int(32 * self.d_mult),
49
+ int(128 * self.d_mult),
50
+ (kernel_size, 1),
51
+ (stride, 1),
52
+ padding=(get_padding(5, 1), 0),
53
+ )
54
+ ),
55
+ norm_f(
56
+ Conv2d(
57
+ int(128 * self.d_mult),
58
+ int(512 * self.d_mult),
59
+ (kernel_size, 1),
60
+ (stride, 1),
61
+ padding=(get_padding(5, 1), 0),
62
+ )
63
+ ),
64
+ norm_f(
65
+ Conv2d(
66
+ int(512 * self.d_mult),
67
+ int(1024 * self.d_mult),
68
+ (kernel_size, 1),
69
+ (stride, 1),
70
+ padding=(get_padding(5, 1), 0),
71
+ )
72
+ ),
73
+ norm_f(
74
+ Conv2d(
75
+ int(1024 * self.d_mult),
76
+ int(1024 * self.d_mult),
77
+ (kernel_size, 1),
78
+ 1,
79
+ padding=(2, 0),
80
+ )
81
+ ),
82
+ ]
83
+ )
84
+ self.conv_post = norm_f(
85
+ Conv2d(int(1024 * self.d_mult), 1, (3, 1), 1, padding=(1, 0))
86
+ )
87
+
88
+ def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor, List[torch.Tensor]]:
89
+ fmap = []
90
+
91
+ # 1d to 2d
92
+ b, c, t = x.shape
93
+ if t % self.period != 0: # pad first
94
+ n_pad = self.period - (t % self.period)
95
+ x = F.pad(x, (0, n_pad), "reflect")
96
+ t = t + n_pad
97
+ x = x.view(b, c, t // self.period, self.period)
98
+
99
+ for l in self.convs:
100
+ x = l(x)
101
+ x = F.leaky_relu(x, 0.1)
102
+ fmap.append(x)
103
+ x = self.conv_post(x)
104
+ fmap.append(x)
105
+ x = torch.flatten(x, 1, -1)
106
+
107
+ return x, fmap
108
+
109
+
110
+ class MultiPeriodDiscriminator(torch.nn.Module):
111
+ def __init__(self, h: AttrDict):
112
+ super().__init__()
113
+ self.mpd_reshapes = h.mpd_reshapes
114
+ print(f"mpd_reshapes: {self.mpd_reshapes}")
115
+ self.discriminators = nn.ModuleList(
116
+ [
117
+ DiscriminatorP(h, rs, use_spectral_norm=h.use_spectral_norm)
118
+ for rs in self.mpd_reshapes
119
+ ]
120
+ )
121
+
122
+ def forward(self, y: torch.Tensor, y_hat: torch.Tensor) -> Tuple[
123
+ List[torch.Tensor],
124
+ List[torch.Tensor],
125
+ List[List[torch.Tensor]],
126
+ List[List[torch.Tensor]],
127
+ ]:
128
+ y_d_rs = []
129
+ y_d_gs = []
130
+ fmap_rs = []
131
+ fmap_gs = []
132
+ for i, d in enumerate(self.discriminators):
133
+ y_d_r, fmap_r = d(y)
134
+ y_d_g, fmap_g = d(y_hat)
135
+ y_d_rs.append(y_d_r)
136
+ fmap_rs.append(fmap_r)
137
+ y_d_gs.append(y_d_g)
138
+ fmap_gs.append(fmap_g)
139
+
140
+ return y_d_rs, y_d_gs, fmap_rs, fmap_gs
141
+
142
+
143
+ class DiscriminatorR(nn.Module):
144
+ def __init__(self, cfg: AttrDict, resolution: List[List[int]]):
145
+ super().__init__()
146
+
147
+ self.resolution = resolution
148
+ assert (
149
+ len(self.resolution) == 3
150
+ ), f"MRD layer requires list with len=3, got {self.resolution}"
151
+ self.lrelu_slope = 0.1
152
+
153
+ norm_f = weight_norm if cfg.use_spectral_norm == False else spectral_norm
154
+ if hasattr(cfg, "mrd_use_spectral_norm"):
155
+ print(
156
+ f"[INFO] overriding MRD use_spectral_norm as {cfg.mrd_use_spectral_norm}"
157
+ )
158
+ norm_f = (
159
+ weight_norm if cfg.mrd_use_spectral_norm == False else spectral_norm
160
+ )
161
+ self.d_mult = cfg.discriminator_channel_mult
162
+ if hasattr(cfg, "mrd_channel_mult"):
163
+ print(f"[INFO] overriding mrd channel multiplier as {cfg.mrd_channel_mult}")
164
+ self.d_mult = cfg.mrd_channel_mult
165
+
166
+ self.convs = nn.ModuleList(
167
+ [
168
+ norm_f(nn.Conv2d(1, int(32 * self.d_mult), (3, 9), padding=(1, 4))),
169
+ norm_f(
170
+ nn.Conv2d(
171
+ int(32 * self.d_mult),
172
+ int(32 * self.d_mult),
173
+ (3, 9),
174
+ stride=(1, 2),
175
+ padding=(1, 4),
176
+ )
177
+ ),
178
+ norm_f(
179
+ nn.Conv2d(
180
+ int(32 * self.d_mult),
181
+ int(32 * self.d_mult),
182
+ (3, 9),
183
+ stride=(1, 2),
184
+ padding=(1, 4),
185
+ )
186
+ ),
187
+ norm_f(
188
+ nn.Conv2d(
189
+ int(32 * self.d_mult),
190
+ int(32 * self.d_mult),
191
+ (3, 9),
192
+ stride=(1, 2),
193
+ padding=(1, 4),
194
+ )
195
+ ),
196
+ norm_f(
197
+ nn.Conv2d(
198
+ int(32 * self.d_mult),
199
+ int(32 * self.d_mult),
200
+ (3, 3),
201
+ padding=(1, 1),
202
+ )
203
+ ),
204
+ ]
205
+ )
206
+ self.conv_post = norm_f(
207
+ nn.Conv2d(int(32 * self.d_mult), 1, (3, 3), padding=(1, 1))
208
+ )
209
+
210
+ def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor, List[torch.Tensor]]:
211
+ fmap = []
212
+
213
+ x = self.spectrogram(x)
214
+ x = x.unsqueeze(1)
215
+ for l in self.convs:
216
+ x = l(x)
217
+ x = F.leaky_relu(x, self.lrelu_slope)
218
+ fmap.append(x)
219
+ x = self.conv_post(x)
220
+ fmap.append(x)
221
+ x = torch.flatten(x, 1, -1)
222
+
223
+ return x, fmap
224
+
225
+ def spectrogram(self, x: torch.Tensor) -> torch.Tensor:
226
+ n_fft, hop_length, win_length = self.resolution
227
+ x = F.pad(
228
+ x,
229
+ (int((n_fft - hop_length) / 2), int((n_fft - hop_length) / 2)),
230
+ mode="reflect",
231
+ )
232
+ x = x.squeeze(1)
233
+ x = torch.stft(
234
+ x,
235
+ n_fft=n_fft,
236
+ hop_length=hop_length,
237
+ win_length=win_length,
238
+ center=False,
239
+ return_complex=True,
240
+ )
241
+ x = torch.view_as_real(x) # [B, F, TT, 2]
242
+ mag = torch.norm(x, p=2, dim=-1) # [B, F, TT]
243
+
244
+ return mag
245
+
246
+
247
+ class MultiResolutionDiscriminator(nn.Module):
248
+ def __init__(self, cfg, debug=False):
249
+ super().__init__()
250
+ self.resolutions = cfg.resolutions
251
+ assert (
252
+ len(self.resolutions) == 3
253
+ ), f"MRD requires list of list with len=3, each element having a list with len=3. Got {self.resolutions}"
254
+ self.discriminators = nn.ModuleList(
255
+ [DiscriminatorR(cfg, resolution) for resolution in self.resolutions]
256
+ )
257
+
258
+ def forward(self, y: torch.Tensor, y_hat: torch.Tensor) -> Tuple[
259
+ List[torch.Tensor],
260
+ List[torch.Tensor],
261
+ List[List[torch.Tensor]],
262
+ List[List[torch.Tensor]],
263
+ ]:
264
+ y_d_rs = []
265
+ y_d_gs = []
266
+ fmap_rs = []
267
+ fmap_gs = []
268
+
269
+ for i, d in enumerate(self.discriminators):
270
+ y_d_r, fmap_r = d(x=y)
271
+ y_d_g, fmap_g = d(x=y_hat)
272
+ y_d_rs.append(y_d_r)
273
+ fmap_rs.append(fmap_r)
274
+ y_d_gs.append(y_d_g)
275
+ fmap_gs.append(fmap_g)
276
+
277
+ return y_d_rs, y_d_gs, fmap_rs, fmap_gs
278
+
279
+
280
+ # Method based on descript-audio-codec: https://github.com/descriptinc/descript-audio-codec
281
+ # Modified code adapted from https://github.com/gemelo-ai/vocos under the MIT license.
282
+ # LICENSE is in incl_licenses directory.
283
+ class DiscriminatorB(nn.Module):
284
+ def __init__(
285
+ self,
286
+ window_length: int,
287
+ channels: int = 32,
288
+ hop_factor: float = 0.25,
289
+ bands: Tuple[Tuple[float, float], ...] = (
290
+ (0.0, 0.1),
291
+ (0.1, 0.25),
292
+ (0.25, 0.5),
293
+ (0.5, 0.75),
294
+ (0.75, 1.0),
295
+ ),
296
+ ):
297
+ super().__init__()
298
+ self.window_length = window_length
299
+ self.hop_factor = hop_factor
300
+ self.spec_fn = Spectrogram(
301
+ n_fft=window_length,
302
+ hop_length=int(window_length * hop_factor),
303
+ win_length=window_length,
304
+ power=None,
305
+ )
306
+ n_fft = window_length // 2 + 1
307
+ bands = [(int(b[0] * n_fft), int(b[1] * n_fft)) for b in bands]
308
+ self.bands = bands
309
+ convs = lambda: nn.ModuleList(
310
+ [
311
+ weight_norm(nn.Conv2d(2, channels, (3, 9), (1, 1), padding=(1, 4))),
312
+ weight_norm(
313
+ nn.Conv2d(channels, channels, (3, 9), (1, 2), padding=(1, 4))
314
+ ),
315
+ weight_norm(
316
+ nn.Conv2d(channels, channels, (3, 9), (1, 2), padding=(1, 4))
317
+ ),
318
+ weight_norm(
319
+ nn.Conv2d(channels, channels, (3, 9), (1, 2), padding=(1, 4))
320
+ ),
321
+ weight_norm(
322
+ nn.Conv2d(channels, channels, (3, 3), (1, 1), padding=(1, 1))
323
+ ),
324
+ ]
325
+ )
326
+ self.band_convs = nn.ModuleList([convs() for _ in range(len(self.bands))])
327
+
328
+ self.conv_post = weight_norm(
329
+ nn.Conv2d(channels, 1, (3, 3), (1, 1), padding=(1, 1))
330
+ )
331
+
332
+ def spectrogram(self, x: torch.Tensor) -> List[torch.Tensor]:
333
+ # Remove DC offset
334
+ x = x - x.mean(dim=-1, keepdims=True)
335
+ # Peak normalize the volume of input audio
336
+ x = 0.8 * x / (x.abs().max(dim=-1, keepdim=True)[0] + 1e-9)
337
+ x = self.spec_fn(x)
338
+ x = torch.view_as_real(x)
339
+ x = x.permute(0, 3, 2, 1) # [B, F, T, C] -> [B, C, T, F]
340
+ # Split into bands
341
+ x_bands = [x[..., b[0] : b[1]] for b in self.bands]
342
+ return x_bands
343
+
344
+ def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor, List[torch.Tensor]]:
345
+ x_bands = self.spectrogram(x.squeeze(1))
346
+ fmap = []
347
+ x = []
348
+
349
+ for band, stack in zip(x_bands, self.band_convs):
350
+ for i, layer in enumerate(stack):
351
+ band = layer(band)
352
+ band = torch.nn.functional.leaky_relu(band, 0.1)
353
+ if i > 0:
354
+ fmap.append(band)
355
+ x.append(band)
356
+
357
+ x = torch.cat(x, dim=-1)
358
+ x = self.conv_post(x)
359
+ fmap.append(x)
360
+
361
+ return x, fmap
362
+
363
+
364
+ # Method based on descript-audio-codec: https://github.com/descriptinc/descript-audio-codec
365
+ # Modified code adapted from https://github.com/gemelo-ai/vocos under the MIT license.
366
+ # LICENSE is in incl_licenses directory.
367
+ class MultiBandDiscriminator(nn.Module):
368
+ def __init__(
369
+ self,
370
+ h,
371
+ ):
372
+ """
373
+ Multi-band multi-scale STFT discriminator, with the architecture based on https://github.com/descriptinc/descript-audio-codec.
374
+ and the modified code adapted from https://github.com/gemelo-ai/vocos.
375
+ """
376
+ super().__init__()
377
+ # fft_sizes (list[int]): Tuple of window lengths for FFT. Defaults to [2048, 1024, 512] if not set in h.
378
+ self.fft_sizes = h.get("mbd_fft_sizes", [2048, 1024, 512])
379
+ self.discriminators = nn.ModuleList(
380
+ [DiscriminatorB(window_length=w) for w in self.fft_sizes]
381
+ )
382
+
383
+ def forward(self, y: torch.Tensor, y_hat: torch.Tensor) -> Tuple[
384
+ List[torch.Tensor],
385
+ List[torch.Tensor],
386
+ List[List[torch.Tensor]],
387
+ List[List[torch.Tensor]],
388
+ ]:
389
+
390
+ y_d_rs = []
391
+ y_d_gs = []
392
+ fmap_rs = []
393
+ fmap_gs = []
394
+
395
+ for d in self.discriminators:
396
+ y_d_r, fmap_r = d(x=y)
397
+ y_d_g, fmap_g = d(x=y_hat)
398
+ y_d_rs.append(y_d_r)
399
+ fmap_rs.append(fmap_r)
400
+ y_d_gs.append(y_d_g)
401
+ fmap_gs.append(fmap_g)
402
+
403
+ return y_d_rs, y_d_gs, fmap_rs, fmap_gs
404
+
405
+
406
+ # Adapted from https://github.com/open-mmlab/Amphion/blob/main/models/vocoders/gan/discriminator/mssbcqtd.py under the MIT license.
407
+ # LICENSE is in incl_licenses directory.
408
+ class DiscriminatorCQT(nn.Module):
409
+ def __init__(self, cfg: AttrDict, hop_length: int, n_octaves:int, bins_per_octave: int):
410
+ super().__init__()
411
+ self.cfg = cfg
412
+
413
+ self.filters = cfg["cqtd_filters"]
414
+ self.max_filters = cfg["cqtd_max_filters"]
415
+ self.filters_scale = cfg["cqtd_filters_scale"]
416
+ self.kernel_size = (3, 9)
417
+ self.dilations = cfg["cqtd_dilations"]
418
+ self.stride = (1, 2)
419
+
420
+ self.in_channels = cfg["cqtd_in_channels"]
421
+ self.out_channels = cfg["cqtd_out_channels"]
422
+ self.fs = cfg["sampling_rate"]
423
+ self.hop_length = hop_length
424
+ self.n_octaves = n_octaves
425
+ self.bins_per_octave = bins_per_octave
426
+
427
+ # Lazy-load
428
+ from nnAudio import features
429
+
430
+ self.cqt_transform = features.cqt.CQT2010v2(
431
+ sr=self.fs * 2,
432
+ hop_length=self.hop_length,
433
+ n_bins=self.bins_per_octave * self.n_octaves,
434
+ bins_per_octave=self.bins_per_octave,
435
+ output_format="Complex",
436
+ pad_mode="constant",
437
+ )
438
+
439
+ self.conv_pres = nn.ModuleList()
440
+ for _ in range(self.n_octaves):
441
+ self.conv_pres.append(
442
+ nn.Conv2d(
443
+ self.in_channels * 2,
444
+ self.in_channels * 2,
445
+ kernel_size=self.kernel_size,
446
+ padding=self.get_2d_padding(self.kernel_size),
447
+ )
448
+ )
449
+
450
+ self.convs = nn.ModuleList()
451
+
452
+ self.convs.append(
453
+ nn.Conv2d(
454
+ self.in_channels * 2,
455
+ self.filters,
456
+ kernel_size=self.kernel_size,
457
+ padding=self.get_2d_padding(self.kernel_size),
458
+ )
459
+ )
460
+
461
+ in_chs = min(self.filters_scale * self.filters, self.max_filters)
462
+ for i, dilation in enumerate(self.dilations):
463
+ out_chs = min(
464
+ (self.filters_scale ** (i + 1)) * self.filters, self.max_filters
465
+ )
466
+ self.convs.append(
467
+ weight_norm(
468
+ nn.Conv2d(
469
+ in_chs,
470
+ out_chs,
471
+ kernel_size=self.kernel_size,
472
+ stride=self.stride,
473
+ dilation=(dilation, 1),
474
+ padding=self.get_2d_padding(self.kernel_size, (dilation, 1)),
475
+ )
476
+ )
477
+ )
478
+ in_chs = out_chs
479
+ out_chs = min(
480
+ (self.filters_scale ** (len(self.dilations) + 1)) * self.filters,
481
+ self.max_filters,
482
+ )
483
+ self.convs.append(
484
+ weight_norm(
485
+ nn.Conv2d(
486
+ in_chs,
487
+ out_chs,
488
+ kernel_size=(self.kernel_size[0], self.kernel_size[0]),
489
+ padding=self.get_2d_padding(
490
+ (self.kernel_size[0], self.kernel_size[0])
491
+ ),
492
+ )
493
+ )
494
+ )
495
+
496
+ self.conv_post = weight_norm(
497
+ nn.Conv2d(
498
+ out_chs,
499
+ self.out_channels,
500
+ kernel_size=(self.kernel_size[0], self.kernel_size[0]),
501
+ padding=self.get_2d_padding((self.kernel_size[0], self.kernel_size[0])),
502
+ )
503
+ )
504
+
505
+ self.activation = torch.nn.LeakyReLU(negative_slope=0.1)
506
+ self.resample = Resample(orig_freq=self.fs, new_freq=self.fs * 2)
507
+
508
+ self.cqtd_normalize_volume = self.cfg.get("cqtd_normalize_volume", False)
509
+ if self.cqtd_normalize_volume:
510
+ print(
511
+ f"[INFO] cqtd_normalize_volume set to True. Will apply DC offset removal & peak volume normalization in CQTD!"
512
+ )
513
+
514
+ def get_2d_padding(
515
+ self,
516
+ kernel_size: typing.Tuple[int, int],
517
+ dilation: typing.Tuple[int, int] = (1, 1),
518
+ ):
519
+ return (
520
+ ((kernel_size[0] - 1) * dilation[0]) // 2,
521
+ ((kernel_size[1] - 1) * dilation[1]) // 2,
522
+ )
523
+
524
+ def forward(self, x: torch.tensor) -> Tuple[torch.Tensor, List[torch.Tensor]]:
525
+ fmap = []
526
+
527
+ if self.cqtd_normalize_volume:
528
+ # Remove DC offset
529
+ x = x - x.mean(dim=-1, keepdims=True)
530
+ # Peak normalize the volume of input audio
531
+ x = 0.8 * x / (x.abs().max(dim=-1, keepdim=True)[0] + 1e-9)
532
+
533
+ x = self.resample(x)
534
+
535
+ z = self.cqt_transform(x)
536
+
537
+ z_amplitude = z[:, :, :, 0].unsqueeze(1)
538
+ z_phase = z[:, :, :, 1].unsqueeze(1)
539
+
540
+ z = torch.cat([z_amplitude, z_phase], dim=1)
541
+ z = torch.permute(z, (0, 1, 3, 2)) # [B, C, W, T] -> [B, C, T, W]
542
+
543
+ latent_z = []
544
+ for i in range(self.n_octaves):
545
+ latent_z.append(
546
+ self.conv_pres[i](
547
+ z[
548
+ :,
549
+ :,
550
+ :,
551
+ i * self.bins_per_octave : (i + 1) * self.bins_per_octave,
552
+ ]
553
+ )
554
+ )
555
+ latent_z = torch.cat(latent_z, dim=-1)
556
+
557
+ for i, l in enumerate(self.convs):
558
+ latent_z = l(latent_z)
559
+
560
+ latent_z = self.activation(latent_z)
561
+ fmap.append(latent_z)
562
+
563
+ latent_z = self.conv_post(latent_z)
564
+
565
+ return latent_z, fmap
566
+
567
+
568
+ class MultiScaleSubbandCQTDiscriminator(nn.Module):
569
+ def __init__(self, cfg: AttrDict):
570
+ super().__init__()
571
+
572
+ self.cfg = cfg
573
+ # Using get with defaults
574
+ self.cfg["cqtd_filters"] = self.cfg.get("cqtd_filters", 32)
575
+ self.cfg["cqtd_max_filters"] = self.cfg.get("cqtd_max_filters", 1024)
576
+ self.cfg["cqtd_filters_scale"] = self.cfg.get("cqtd_filters_scale", 1)
577
+ self.cfg["cqtd_dilations"] = self.cfg.get("cqtd_dilations", [1, 2, 4])
578
+ self.cfg["cqtd_in_channels"] = self.cfg.get("cqtd_in_channels", 1)
579
+ self.cfg["cqtd_out_channels"] = self.cfg.get("cqtd_out_channels", 1)
580
+ # Multi-scale params to loop over
581
+ self.cfg["cqtd_hop_lengths"] = self.cfg.get("cqtd_hop_lengths", [512, 256, 256])
582
+ self.cfg["cqtd_n_octaves"] = self.cfg.get("cqtd_n_octaves", [9, 9, 9])
583
+ self.cfg["cqtd_bins_per_octaves"] = self.cfg.get(
584
+ "cqtd_bins_per_octaves", [24, 36, 48]
585
+ )
586
+
587
+ self.discriminators = nn.ModuleList(
588
+ [
589
+ DiscriminatorCQT(
590
+ self.cfg,
591
+ hop_length=self.cfg["cqtd_hop_lengths"][i],
592
+ n_octaves=self.cfg["cqtd_n_octaves"][i],
593
+ bins_per_octave=self.cfg["cqtd_bins_per_octaves"][i],
594
+ )
595
+ for i in range(len(self.cfg["cqtd_hop_lengths"]))
596
+ ]
597
+ )
598
+
599
+ def forward(self, y: torch.Tensor, y_hat: torch.Tensor) -> Tuple[
600
+ List[torch.Tensor],
601
+ List[torch.Tensor],
602
+ List[List[torch.Tensor]],
603
+ List[List[torch.Tensor]],
604
+ ]:
605
+
606
+ y_d_rs = []
607
+ y_d_gs = []
608
+ fmap_rs = []
609
+ fmap_gs = []
610
+
611
+ for disc in self.discriminators:
612
+ y_d_r, fmap_r = disc(y)
613
+ y_d_g, fmap_g = disc(y_hat)
614
+ y_d_rs.append(y_d_r)
615
+ fmap_rs.append(fmap_r)
616
+ y_d_gs.append(y_d_g)
617
+ fmap_gs.append(fmap_g)
618
+
619
+ return y_d_rs, y_d_gs, fmap_rs, fmap_gs
620
+
621
+
622
+ class CombinedDiscriminator(nn.Module):
623
+ """
624
+ Wrapper of chaining multiple discrimiantor architectures.
625
+ Example: combine mbd and cqtd as a single class
626
+ """
627
+
628
+ def __init__(self, list_discriminator: List[nn.Module]):
629
+ super().__init__()
630
+ self.discrimiantor = nn.ModuleList(list_discriminator)
631
+
632
+ def forward(self, y: torch.Tensor, y_hat: torch.Tensor) -> Tuple[
633
+ List[torch.Tensor],
634
+ List[torch.Tensor],
635
+ List[List[torch.Tensor]],
636
+ List[List[torch.Tensor]],
637
+ ]:
638
+
639
+ y_d_rs = []
640
+ y_d_gs = []
641
+ fmap_rs = []
642
+ fmap_gs = []
643
+
644
+ for disc in self.discrimiantor:
645
+ y_d_r, y_d_g, fmap_r, fmap_g = disc(y, y_hat)
646
+ y_d_rs.extend(y_d_r)
647
+ fmap_rs.extend(fmap_r)
648
+ y_d_gs.extend(y_d_g)
649
+ fmap_gs.extend(fmap_g)
650
+
651
+ return y_d_rs, y_d_gs, fmap_rs, fmap_gs
BigVGAN/env.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Adapted from https://github.com/jik876/hifi-gan under the MIT license.
2
+ # LICENSE is in incl_licenses directory.
3
+
4
+ import os
5
+ import shutil
6
+
7
+
8
+ class AttrDict(dict):
9
+ def __init__(self, *args, **kwargs):
10
+ super(AttrDict, self).__init__(*args, **kwargs)
11
+ self.__dict__ = self
12
+
13
+
14
+ def build_env(config, config_name, path):
15
+ t_path = os.path.join(path, config_name)
16
+ if config != t_path:
17
+ os.makedirs(path, exist_ok=True)
18
+ shutil.copyfile(config, os.path.join(path, config_name))
BigVGAN/filelists/LibriTTS/dev-clean.txt ADDED
@@ -0,0 +1,115 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dev-clean/1272/128104/1272_128104_000001_000000|A 'JOLLY' ART CRITIC
2
+ dev-clean/1272/141231/1272_141231_000007_000003|And when he attacked, it was always there to beat him aside.
3
+ dev-clean/1272/141231/1272_141231_000033_000002|If anything, he was pressing the attack.
4
+ dev-clean/1462/170138/1462_170138_000012_000002|Dear me, Mac, the girl couldn't possibly be better, you know."
5
+ dev-clean/1462/170142/1462_170142_000002_000005|Alexander did not sit down.
6
+ dev-clean/1462/170142/1462_170142_000029_000001|"I meant to, but somehow I couldn't.
7
+ dev-clean/1462/170142/1462_170142_000046_000004|The sight of you, Bartley, to see you living and happy and successful-can I never make you understand what that means to me?" She pressed his shoulders gently.
8
+ dev-clean/1462/170145/1462_170145_000012_000003|There is a letter for you there, in my desk drawer.
9
+ dev-clean/1462/170145/1462_170145_000033_000000|She felt the strength leap in the arms that held her so lightly.
10
+ dev-clean/1673/143397/1673_143397_000031_000007|He attempted to remove or intimidate the leaders by a common sentence, of acquittal or condemnation; he invested his representatives at Ephesus with ample power and military force; he summoned from either party eight chosen deputies to a free and candid conference in the neighborhood of the capital, far from the contagion of popular frenzy.
11
+ dev-clean/174/168635/174_168635_000040_000000|To teach Cosette to read, and to let her play, this constituted nearly the whole of Jean Valjean's existence.
12
+ dev-clean/174/50561/174_50561_000058_000001|They have the end of the game to themselves.)
13
+ dev-clean/174/84280/174_84280_000015_000000|And perhaps in this story I have said enough for you to understand why Mary has identified herself with something world-wide, has added to herself a symbolical value, and why it is I find in the whole crowded spectacle of mankind, a quality that is also hers, a sense of fine things entangled and stifled and unable to free themselves from the ancient limiting jealousies which law and custom embody.
14
+ dev-clean/1919/142785/1919_142785_000063_000000|[Illustration: SHALOT.]
15
+ dev-clean/1919/142785/1919_142785_000131_000001|Cut the bread into thin slices, place them in a cool oven overnight, and when thoroughly dry and crisp, roll them down into fine crumbs.
16
+ dev-clean/1988/147956/1988_147956_000016_000009|He was neatly dressed.
17
+ dev-clean/1988/148538/1988_148538_000015_000007|These persons then displayed towards each other precisely the same puerile jealousies which animate the men of democracies, the same eagerness to snatch the smallest advantages which their equals contested, and the same desire to parade ostentatiously those of which they were in possession.
18
+ dev-clean/1988/24833/1988_24833_000028_000003|He's taking the kid for a walk when a thunderstorm blows up.
19
+ dev-clean/1988/24833/1988_24833_000059_000000|"Doesn't pay enough?" Pop asks.
20
+ dev-clean/1993/147149/1993_147149_000051_000002|So leaving kind messages to George and Jane Wilson, and hesitating whether she might dare to send a few kind words to Jem, and deciding that she had better not, she stepped out into the bright morning light, so fresh a contrast to the darkened room where death had been.
21
+ dev-clean/1993/147965/1993_147965_000003_000004|I suppose, in the crowded clutter of their cave, the old man had come to believe that peace and order had vanished from the earth, or existed only in the old world he had left so far behind.
22
+ dev-clean/1993/147966/1993_147966_000020_000003|We found the chickens asleep; perhaps they thought night had come to stay.
23
+ dev-clean/2035/147960/2035_147960_000019_000001|He is all over Jimmy's boots. I scream for him to run, but he just hit and hit that snake like he was crazy."
24
+ dev-clean/2035/147961/2035_147961_000011_000002|He grew more and more excited, and kept pointing all around his bed, as if there were things there and he wanted mr Shimerda to see them.
25
+ dev-clean/2035/147961/2035_147961_000025_000002|Beside a frozen pond something happened to the other sledge; peter saw it plainly.
26
+ dev-clean/2035/152373/2035_152373_000010_000007|saint Aidan, the Apostle of Northumbria, had refused the late Egfrid's father absolution, on one occasion, until he solemnly promised to restore their freedom to certain captives of this description.
27
+ dev-clean/2086/149214/2086_149214_000005_000002|It is a legend prolonging itself, from an epoch now gray in the distance, down into our own broad daylight, and bringing along with it some of its legendary mist, which the reader, according to his pleasure, may either disregard, or allow it to float almost imperceptibly about the characters and events for the sake of a picturesque effect.
28
+ dev-clean/2086/149220/2086_149220_000016_000003|In short, I make pictures out of sunshine; and, not to be too much dazzled with my own trade, I have prevailed with Miss Hepzibah to let me lodge in one of these dusky gables.
29
+ dev-clean/2086/149220/2086_149220_000028_000000|Phoebe was on the point of retreating, but turned back, with some hesitation; for she did not exactly comprehend his manner, although, on better observation, its feature seemed rather to be lack of ceremony than any approach to offensive rudeness.
30
+ dev-clean/2277/149874/2277_149874_000007_000001|Her husband asked a few questions and sat down to read the evening paper.
31
+ dev-clean/2277/149896/2277_149896_000007_000006|He saw only her pretty face and neat figure and wondered why life was not arranged so that such joy as he found with her could be steadily maintained.
32
+ dev-clean/2277/149896/2277_149896_000025_000008|He jangled it fiercely several times in succession, but without avail.
33
+ dev-clean/2277/149897/2277_149897_000023_000000|"Well?" said Hurstwood.
34
+ dev-clean/2277/149897/2277_149897_000046_000002|He troubled over many little details and talked perfunctorily to everybody.
35
+ dev-clean/2412/153954/2412_153954_000004_000005|Even in middle age they were still comely, and the old grey haired women at their cottage doors had a dignity, not to say majesty, of their own.
36
+ dev-clean/2428/83699/2428_83699_000009_000000|Now it is a remarkable thing that I have always had an extraordinary predilection for the name Madge.
37
+ dev-clean/2428/83699/2428_83699_000024_000004|I had long been wishing that an old-fashioned Christmas had been completely extinct before I had thought of adventuring in quest of one.
38
+ dev-clean/2428/83699/2428_83699_000047_000000|"Perhaps you had better come inside."
39
+ dev-clean/2428/83705/2428_83705_000015_000004|I did not want any unpleasantness; and I am quite sure there would have been unpleasantness had I demurred.
40
+ dev-clean/2428/83705/2428_83705_000034_000002|"And what," inquired mrs Macpherson, "has Mary Ann given you?"
41
+ dev-clean/251/118436/251_118436_000017_000001|This man was clad in a brown camel hair robe and sandals, and a green turban was on his head. His expression was tranquil, his gaze impersonal.
42
+ dev-clean/251/136532/251_136532_000000_000003|Fitzgerald was still trying to find out how the germ had been transmitted.
43
+ dev-clean/251/136532/251_136532_000020_000004|Without question, he had become, overnight, the most widely known archaeologist in history.
44
+ dev-clean/251/137823/251_137823_000025_000001|Or grazed, at least," Tom added thankfully.
45
+ dev-clean/251/137823/251_137823_000054_000002|The two girls were as much upset as Tom's mother.
46
+ dev-clean/2803/154320/2803_154320_000017_000004|Think of Lady Glenarvan; think of Mary Grant!"
47
+ dev-clean/2803/154328/2803_154328_000028_000000|Wilson and Olbinett joined their companions, and all united to dig through the wall-john with his dagger, the others with stones taken from the ground, or with their nails, while Mulrady, stretched along the ground, watched the native guard through a crevice of the matting.
48
+ dev-clean/2803/154328/2803_154328_000080_000003|Where chance led them, but at any rate they were free.
49
+ dev-clean/2803/161169/2803_161169_000011_000019|What do you think of that from the coal tar.
50
+ dev-clean/2902/9008/2902_9008_000009_000001|He was a Greek, also, but of a more common, and, perhaps, lower type; dark and fiery, thin and graceful; his delicate figure and cheeks, wasted by meditation, harmonised well with the staid and simple philosophic cloak which he wore as a sign of his profession.
51
+ dev-clean/2902/9008/2902_9008_000048_000003|For aught I know or care, the plot may be an exactly opposite one, and the Christians intend to murder all the Jews.
52
+ dev-clean/3000/15664/3000_15664_000013_000004|These volcanic caves are not wanting in interest, and it is well to light a pitch pine torch and take a walk in these dark ways of the underworld whenever opportunity offers, if for no other reason to see with new appreciation on returning to the sunshine the beauties that lie so thick about us.
53
+ dev-clean/3000/15664/3000_15664_000029_000002|Thus the Shasta River issues from a large lake like spring in Shasta Valley, and about two thirds of the volume of the McCloud gushes forth in a grand spring on the east side of the mountain, a few miles back from its immediate base.
54
+ dev-clean/3170/137482/3170_137482_000010_000004|The nobility, the merchants, even workmen in good circumstances, are never seen in the 'magazzino', for cleanliness is not exactly worshipped in such places.
55
+ dev-clean/3170/137482/3170_137482_000037_000001|He was celebrated in Venice not only for his eloquence and his great talents as a statesman, but also for the gallantries of his youth.
56
+ dev-clean/3536/23268/3536_23268_000028_000000|"It is not the first time, I believe, you have acted contrary to that, Miss Milner," replied mrs Horton, and affected a tenderness of voice, to soften the harshness of her words.
57
+ dev-clean/3576/138058/3576_138058_000019_000003|He wondered to see the lance leaning against the tree, the shield on the ground, and Don Quixote in armour and dejected, with the saddest and most melancholy face that sadness itself could produce; and going up to him he said, "Be not so cast down, good man, for you have not fallen into the hands of any inhuman Busiris, but into Roque Guinart's, which are more merciful than cruel."
58
+ dev-clean/3752/4943/3752_4943_000026_000002|Lie quiet!"
59
+ dev-clean/3752/4943/3752_4943_000056_000002|His flogging wouldn't have killed a flea."
60
+ dev-clean/3752/4944/3752_4944_000031_000000|"Well now!" said Meekin, with asperity, "I don't agree with you. Everybody seems to be against that poor fellow-Captain Frere tried to make me think that his letters contained a hidden meaning, but I don't believe they did.
61
+ dev-clean/3752/4944/3752_4944_000063_000003|He'd rather kill himself."
62
+ dev-clean/3752/4944/3752_4944_000094_000000|"The Government may go to----, and you, too!" roared Burgess.
63
+ dev-clean/3853/163249/3853_163249_000058_000000|"I've done it, mother: tell me you're not sorry."
64
+ dev-clean/3853/163249/3853_163249_000125_000004|Help me to be brave and strong, David: don't let me complain or regret, but show me what lies beyond, and teach me to believe that simply doing the right is reward and happiness enough."
65
+ dev-clean/5338/24615/5338_24615_000004_000003|It had been built at a period when castles were no longer necessary, and when the Scottish architects had not yet acquired the art of designing a domestic residence.
66
+ dev-clean/5338/284437/5338_284437_000031_000001|A powerful ruler ought to be rich and to live in a splendid palace.
67
+ dev-clean/5536/43358/5536_43358_000012_000001|Being a natural man, the Indian was intensely poetical.
68
+ dev-clean/5536/43359/5536_43359_000015_000000|The family was not only the social unit, but also the unit of government.
69
+ dev-clean/5694/64025/5694_64025_000004_000006|Our regiment was the advance guard on Saturday evening, and did a little skirmishing; but General Gladden's brigade passed us and assumed a position in our immediate front.
70
+ dev-clean/5694/64029/5694_64029_000006_000005|I read it, and looked up to hand it back to him, when I discovered that he had a pistol cocked and leveled in my face, and says he, "Drop that gun; you are my prisoner." I saw there was no use in fooling about it.
71
+ dev-clean/5694/64029/5694_64029_000024_000002|The ground was literally covered with blue coats dead; and, if I remember correctly, there were eighty dead horses.
72
+ dev-clean/5694/64038/5694_64038_000015_000002|I could not imagine what had become of him.
73
+ dev-clean/5895/34615/5895_34615_000013_000003|Man can do nothing to create beauty, but everything to produce ugliness.
74
+ dev-clean/5895/34615/5895_34615_000025_000000|With this exception, Gwynplaine's laugh was everlasting.
75
+ dev-clean/5895/34622/5895_34622_000029_000002|In the opposite corner was the kitchen.
76
+ dev-clean/5895/34629/5895_34629_000021_000005|The sea is a wall; and if Voltaire-a thing which he very much regretted when it was too late-had not thrown a bridge over to Shakespeare, Shakespeare might still be in England, on the other side of the wall, a captive in insular glory.
77
+ dev-clean/6241/61943/6241_61943_000020_000000|My uncle came out of his cabin pale, haggard, thin, but full of enthusiasm, his eyes dilated with pleasure and satisfaction.
78
+ dev-clean/6241/61946/6241_61946_000014_000000|The rugged summits of the rocky hills were dimly visible on the edge of the horizon, through the misty fogs; every now and then some heavy flakes of snow showed conspicuous in the morning light, while certain lofty and pointed rocks were first lost in the grey low clouds, their summits clearly visible above, like jagged reefs rising from a troublous sea.
79
+ dev-clean/6241/61946/6241_61946_000051_000001|Then my uncle, myself, and guide, two boatmen and the four horses got into a very awkward flat bottom boat.
80
+ dev-clean/6295/64301/6295_64301_000010_000002|The music was broken, and Joseph left alone with the dumb instruments.
81
+ dev-clean/6313/66125/6313_66125_000020_000002|"Are you hurt?"
82
+ dev-clean/6313/66125/6313_66125_000053_000000|"Are you ready?"
83
+ dev-clean/6313/66129/6313_66129_000011_000001|"Cold water is the most nourishing thing we've touched since last night."
84
+ dev-clean/6313/66129/6313_66129_000045_000004|Of course, dogs can't follow the trail of an animal as well, now, as they could with snow on the ground.
85
+ dev-clean/6313/66129/6313_66129_000081_000000|Stacy dismounted and removed the hat carefully to one side.
86
+ dev-clean/6313/76958/6313_76958_000029_000000|Instantly there was a chorus of yells and snarls from the disturbed cowpunchers, accompanied by dire threats as to what they would do to the gopher did he ever disturb their rest in that way again.
87
+ dev-clean/6313/76958/6313_76958_000073_000001|"Those fellows have to go out.
88
+ dev-clean/6319/275224/6319_275224_000014_000001|And what is the matter with the beautiful straggling branches, that they are to be cut off as fast as they appear?
89
+ dev-clean/6319/57405/6319_57405_000019_000000|"It is rather a silly thing to do," said Deucalion; "and yet there can be no harm in it, and we shall see what will happen."
90
+ dev-clean/6319/64726/6319_64726_000017_000002|Then the prince took the princess by the hand; she was dressed in great splendour, but he did not hint that she looked as he had seen pictures of his great grandmother look; he thought her all the more charming for that.
91
+ dev-clean/6345/93302/6345_93302_000000_000001|All LibriVox recordings are in the public domain.
92
+ dev-clean/6345/93302/6345_93302_000049_000000|The fine tact of a noble woman seemed to have deserted her.
93
+ dev-clean/6345/93302/6345_93302_000073_000000|So she said-
94
+ dev-clean/6345/93306/6345_93306_000024_000002|What is it?
95
+ dev-clean/652/130737/652_130737_000031_000001|Good aroma.
96
+ dev-clean/7850/111771/7850_111771_000009_000001|After various flanking movements and costly assaults, the problem of taking Lee narrowed itself down to a siege of Petersburg.
97
+ dev-clean/7850/281318/7850_281318_000012_000000|She began to show them how to weave the bits of things together into nests, as they should be made.
98
+ dev-clean/7850/286674/7850_286674_000006_000001|You would think that, with six legs apiece and three joints in each leg, they might walk quite fast, yet they never did.
99
+ dev-clean/7850/73752/7850_73752_000006_000003|What a Neapolitan ball was his career then!
100
+ dev-clean/7976/105575/7976_105575_000009_000000|The burying party the next morning found nineteen dead Rebels lying together at one place.
101
+ dev-clean/7976/105575/7976_105575_000017_000000|Our regiment now pursued the flying Rebels with great vigor.
102
+ dev-clean/7976/110124/7976_110124_000021_000001|"We two are older and wiser than you are. It is for us to determine what shall be done.
103
+ dev-clean/7976/110124/7976_110124_000053_000002|The doors were strong and held securely.
104
+ dev-clean/7976/110523/7976_110523_000027_000000|"We will go in here," said Hansel, "and have a glorious feast.
105
+ dev-clean/8297/275154/8297_275154_000008_000000|Was this man-haggard, pallid, shabby, looking at him piteously with bloodshot eyes-the handsome, pleasant, prosperous brother whom he remembered?
106
+ dev-clean/8297/275154/8297_275154_000024_000011|Tell me where my wife is living now?"
107
+ dev-clean/8297/275155/8297_275155_000013_000006|What a perfect gentleman!"
108
+ dev-clean/8297/275155/8297_275155_000037_000000|"Say thoroughly worthy of the course forced upon me and my daughter by your brother's infamous conduct-and you will be nearer the mark!"
109
+ dev-clean/8297/275156/8297_275156_000013_000005|No more of it now.
110
+ dev-clean/84/121123/84_121123_000009_000000|But in less than five minutes the staircase groaned beneath an extraordinary weight.
111
+ dev-clean/84/121123/84_121123_000054_000000|It was something terrible to witness the silent agony, the mute despair of Noirtier, whose tears silently rolled down his cheeks.
112
+ dev-clean/84/121550/84_121550_000064_000000|And lo! a sudden lustre ran across On every side athwart the spacious forest, Such that it made me doubt if it were lightning.
113
+ dev-clean/84/121550/84_121550_000156_000000|Nor prayer for inspiration me availed, By means of which in dreams and otherwise I called him back, so little did he heed them.
114
+ dev-clean/84/121550/84_121550_000247_000000|Thus Beatrice; and I, who at the feet Of her commandments all devoted was, My mind and eyes directed where she willed.
115
+ dev-clean/8842/302203/8842_302203_000001_000001|And I remember that on the ninth day, being overcome with intolerable pain, a thought came into my mind concerning my lady: but when it had a little nourished this thought, my mind returned to its brooding over mine enfeebled body.
BigVGAN/filelists/LibriTTS/dev-other.txt ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dev-other/116/288045/116_288045_000003_000000|PART one
2
+ dev-other/116/288045/116_288045_000034_000001|He was only an idol.
3
+ dev-other/116/288047/116_288047_000002_000002|Observing the sun, the moon, and the stars overhead, the primitive man wished to account for them.
4
+ dev-other/116/288048/116_288048_000001_000000|Let me now give an idea of the method I propose to follow in the study of this subject.
5
+ dev-other/116/288048/116_288048_000020_000003|Leaving out Judas, and counting Matthias, who was elected in his place, we have thirteen apostles.
6
+ dev-other/1255/138279/1255_138279_000012_000000|"One.
7
+ dev-other/1255/138279/1255_138279_000049_000001|Will it be by banns or license?"
8
+ dev-other/1255/74899/1255_74899_000020_000000|"Pardon me.
9
+ dev-other/1255/90407/1255_90407_000006_000001|But, as the rain gave not the least sign of cessation, he observed: 'I think we shall have to go back.'
10
+ dev-other/1255/90407/1255_90407_000039_000002|Into it they plodded without pause, crossing the harbour bridge about midnight, wet to the skin.
11
+ dev-other/1255/90413/1255_90413_000023_000001|'Now what the devil this means I cannot tell,' he said to himself, reflecting stock still for a moment on the stairs.
12
+ dev-other/1585/131718/1585_131718_000025_000009|Edison marginalized documents extensively.
13
+ dev-other/1630/141772/1630_141772_000000_000002|Suddenly he again felt that he was alive and suffering from a burning, lacerating pain in his head.
14
+ dev-other/1630/141772/1630_141772_000039_000000|The quiet home life and peaceful happiness of Bald Hills presented itself to him.
15
+ dev-other/1630/73710/1630_73710_000019_000003|I almost wish papa would return, though I dread to see him.
16
+ dev-other/1630/96099/1630_96099_000033_000001|Why did you not follow him?
17
+ dev-other/1650/157641/1650_157641_000037_000001|mr w m
18
+ dev-other/1650/173551/1650_173551_000025_000000|Pierre went into that gloomy study which he had entered with such trepidation in his benefactor's lifetime.
19
+ dev-other/1651/136854/1651_136854_000046_000005|I have, however, this of gratitude, that I think of you with regard, when I do not, perhaps, give the proofs which I ought, of being, Sir,
20
+ dev-other/1686/142278/1686_142278_000015_000000|'No! not doubts as to religion; not the slightest injury to that.' He paused.
21
+ dev-other/1686/142278/1686_142278_000042_000001|Margaret was nearly upset again into a burst of crying.
22
+ dev-other/1701/141759/1701_141759_000001_000001|Not till midwinter was the count at last handed a letter addressed in his son's handwriting.
23
+ dev-other/1701/141759/1701_141759_000048_000000|"Why should you be ashamed?"
24
+ dev-other/1701/141760/1701_141760_000013_000003|"I only sent you the note yesterday by Bolkonski-an adjutant of Kutuzov's, who's a friend of mine.
25
+ dev-other/1701/141760/1701_141760_000056_000000|In spite of Prince Andrew's disagreeable, ironical tone, in spite of the contempt with which Rostov, from his fighting army point of view, regarded all these little adjutants on the staff of whom the newcomer was evidently one, Rostov felt confused, blushed, and became silent.
26
+ dev-other/2506/13150/2506_13150_000022_000000|--Nay-if you don't believe me, you may read the chapter for your pains.
27
+ dev-other/3660/172182/3660_172182_000012_000007|And a year, and a second, and a third, he proceeded thus, until his fame had flown over the face of the kingdom.
28
+ dev-other/3660/172183/3660_172183_000011_000000|So the maiden went forward, keeping in advance of Geraint, as he had desired her; and it grieved him as much as his wrath would permit, to see a maiden so illustrious as she having so much trouble with the care of the horses.
29
+ dev-other/3660/172183/3660_172183_000019_000040|Come with me to the court of a son in law of my sister, which is near here, and thou shalt have the best medical assistance in the kingdom."
30
+ dev-other/3660/6517/3660_6517_000036_000002|Bright sunshine.
31
+ dev-other/3660/6517/3660_6517_000059_000005|Not a single one has lost his good spirits.
32
+ dev-other/3663/172005/3663_172005_000022_000000|She must cross the Slide Brook valley, if possible, and gain the mountain opposite.
33
+ dev-other/3663/172528/3663_172528_000016_000008|He had been brought by my very dear friend Luca Martini, who passed the larger portion of the day with me.
34
+ dev-other/3915/57461/3915_57461_000018_000001|In a fit of madness I was tempted to kill and rob you.
35
+ dev-other/3915/98647/3915_98647_000018_000006|Thus the old custom is passing away.
36
+ dev-other/4323/13259/4323_13259_000009_000011|What would Jesus do?
37
+ dev-other/4323/13259/4323_13259_000020_000003|It seems she had been recently converted during the evangelist's meetings, and was killed while returning from one of the meetings in company with other converts and some of her friends.
38
+ dev-other/4323/18416/4323_18416_000019_000001|So she was asked to sing at musicales and receptions without end, until Alexia exclaimed at last, "They are all raving, stark mad over her, and it's all Polly's own fault, the whole of it."
39
+ dev-other/4323/18416/4323_18416_000050_000000|"I know, child; you think your old Grandpapa does just about right," said mr King soothingly, and highly gratified.
40
+ dev-other/4323/18416/4323_18416_000079_000002|"And I can't tolerate any thoughts I cannot speak."
41
+ dev-other/4323/55228/4323_55228_000028_000000|"Pete told you that I didn't care for any girl, only to paint?" demanded Bertram, angry and mystified.
42
+ dev-other/4323/55228/4323_55228_000071_000000|There was another silence.
43
+ dev-other/4570/102353/4570_102353_000001_000000|CHAPTER four.
44
+ dev-other/4570/14911/4570_14911_000009_000002|EYES-Brown, dark hazel or hazel, not deep set nor bulgy, and with a mild expression.
45
+ dev-other/4570/56594/4570_56594_000012_000000|"'No,' says the gentleman.
46
+ dev-other/4831/18525/4831_18525_000028_000000|"Oh! isn't it 'Oats, Peas, Beans, and Barley grow'?" cried Polly, as they watched them intently.
47
+ dev-other/4831/18525/4831_18525_000078_000001|"I want to write, too, I do," she cried, very much excited.
48
+ dev-other/4831/18525/4831_18525_000122_000000|"O dear me!" exclaimed Polly, softly, for she couldn't even yet get over that dreadful beginning.
49
+ dev-other/4831/25894/4831_25894_000022_000003|The other days were very much like this; sometimes they made more, sometimes less, but Tommo always 'went halves;' and Tessa kept on, in spite of cold and weariness, for her plans grew as her earnings increased, and now she hoped to get useful things, instead of candy and toys alone.
50
+ dev-other/4831/29134/4831_29134_000001_000000|The session was drawing toward its close.
51
+ dev-other/4831/29134/4831_29134_000018_000000|"So this poor little boy grew up to be a man, and had to go out in the world, far from home and friends to earn his living.
52
+ dev-other/5543/27761/5543_27761_000019_000000|Her mother went to hide.
53
+ dev-other/5543/27761/5543_27761_000065_000000|"Agathya says so, madam," answered Fedosya; "it's she that knows."
54
+ dev-other/5543/27761/5543_27761_000107_000000|"Sima, my dear, don't agitate yourself," said Sergey Modestovich in a whisper.
55
+ dev-other/5849/50873/5849_50873_000026_000000|"He has promised to do so."
56
+ dev-other/5849/50873/5849_50873_000074_000000|"The boy did it!
57
+ dev-other/5849/50962/5849_50962_000010_000000|"It's a schooner," said mr Bingham to mr Minturn, "and she has a very heavy cargo."
58
+ dev-other/5849/50963/5849_50963_000009_000003|Well, it was a long, slow job to drag those heavy logs around that point, and just when we were making headway, along comes a storm that drove the schooner and canoes out of business."
59
+ dev-other/5849/50964/5849_50964_000018_000001|There were the shells to be looked after, the fish nets, besides Downy, the duck, and Snoop, the cat.
60
+ dev-other/6123/59150/6123_59150_000016_000001|He kicked him two or three times with his heel in the face.
61
+ dev-other/6123/59186/6123_59186_000008_000000|"Catering care" is an appalling phrase.
62
+ dev-other/6267/53049/6267_53049_000007_000001|"I'd better be putting my grey matter into that algebra instead of wasting it plotting for a party dress that I certainly can't get.
63
+ dev-other/6267/53049/6267_53049_000045_000001|I am named after her."
64
+ dev-other/6267/65525/6267_65525_000018_000000|Dear mr Lincoln:
65
+ dev-other/6267/65525/6267_65525_000045_000006|You can't mistake it."
66
+ dev-other/6455/66379/6455_66379_000020_000002|(Deal, sir, if you please; better luck next time.)"
67
+ dev-other/6455/67803/6455_67803_000038_000000|"Yes," he answered.
68
+ dev-other/6467/56885/6467_56885_000012_000001|As you are so generously taking her on trust, may she never cause you a moment's regret.
69
+ dev-other/6467/97061/6467_97061_000010_000000|A terrible battle ensued, in which both kings performed prodigies of valour.
70
+ dev-other/6841/88291/6841_88291_000006_000006|One stood waiting for them to finish, a sheaf of long j h stamping irons in his hand.
71
+ dev-other/6841/88291/6841_88291_000019_000006|Cries arose in a confusion: "Marker" "Hot iron!" "Tally one!" Dust eddied and dissipated.
72
+ dev-other/6841/88294/6841_88294_000010_000003|Usually I didn't bother with his talk, for it didn't mean anything, but something in his voice made me turn.
73
+ dev-other/6841/88294/6841_88294_000048_000000|He stood there looking straight at me without winking or offering to move.
74
+ dev-other/700/122866/700_122866_000006_000003|You've been thirteen for a month, so I suppose it doesn't seem such a novelty to you as it does to me.
75
+ dev-other/700/122866/700_122866_000023_000006|Ruby Gillis is rather sentimental.
76
+ dev-other/700/122867/700_122867_000012_000004|My career is closed.
77
+ dev-other/700/122867/700_122867_000033_000003|At the end of the week Marilla said decidedly:
78
+ dev-other/700/122868/700_122868_000015_000003|mrs Lynde says that all play acting is abominably wicked."
79
+ dev-other/700/122868/700_122868_000038_000001|And Ruby is in hysterics-oh, Anne, how did you escape?"
80
+ dev-other/7601/101622/7601_101622_000018_000002|The very girls themselves set them on:
81
+ dev-other/7601/175351/7601_175351_000031_000008|Still, during the nights which followed the fifteenth of August, darkness was never profound; although the sun set, he still gave sufficient light by refraction.
82
+ dev-other/7641/96252/7641_96252_000003_000006|For these are careful only for themselves, for their own egoism, just like the bandit, from whom they are only distinguished by the absurdity of their means.
83
+ dev-other/7641/96670/7641_96670_000013_000001|The mist lifted suddenly and she saw three strangers in the palace courtyard.
84
+ dev-other/7641/96684/7641_96684_000009_000000|"What years of happiness have been mine, O Apollo, through your friendship for me," said Admetus.
85
+ dev-other/7641/96684/7641_96684_000031_000002|How noble it was of Admetus to bring him into his house and give entertainment to him while such sorrow was upon him.
86
+ dev-other/7697/105815/7697_105815_000048_000002|And they brought out the jaw bone of an ass with which Samson did such great feats, and the sling and stone with which David slew Goliath of Gath.
87
+ dev-other/8173/294714/8173_294714_000006_000001|"Don't spoil my pleasure in seeing you again by speaking of what can never be! Have you still to be told how it is that you find me here alone with my child?"
88
+ dev-other/8173/294714/8173_294714_000027_000001|What was there to prevent her from insuring her life, if she pleased, and from so disposing of the insurance as to give Van Brandt a direct interest in her death?
89
+ dev-other/8254/115543/8254_115543_000034_000000|"Yes, and how he orders every one about him.
90
+ dev-other/8254/84205/8254_84205_000029_000000|"I'm not afraid of them hitting me, my lad," said Griggs confidently. "Being shot at by fellows with bows and arrows sounds bad enough, but there's not much risk here."
91
+ dev-other/8254/84205/8254_84205_000073_000000|"Right; I do, neighbour, and it's very handsome of you to offer me the chance to back out.
92
+ dev-other/8288/274162/8288_274162_000023_000000|"Exactly.
93
+ dev-other/8288/274162/8288_274162_000078_000000|"So much the worse.
BigVGAN/filelists/LibriTTS/parse_libritts.py ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) 2024 NVIDIA CORPORATION.
2
+ # Licensed under the MIT license.
3
+
4
+ import os, glob
5
+
6
+
7
+ def get_wav_and_text_filelist(data_root, data_type, subsample=1):
8
+ wav_list = sorted(
9
+ [
10
+ path.replace(data_root, "")[1:]
11
+ for path in glob.glob(os.path.join(data_root, data_type, "**/**/*.wav"))
12
+ ]
13
+ )
14
+ wav_list = wav_list[::subsample]
15
+ txt_filelist = [path.replace(".wav", ".normalized.txt") for path in wav_list]
16
+
17
+ txt_list = []
18
+ for txt_file in txt_filelist:
19
+ with open(os.path.join(data_root, txt_file), "r") as f_txt:
20
+ text = f_txt.readline().strip("\n")
21
+ txt_list.append(text)
22
+ wav_list = [path.replace(".wav", "") for path in wav_list]
23
+
24
+ return wav_list, txt_list
25
+
26
+
27
+ def write_filelist(output_path, wav_list, txt_list):
28
+ with open(output_path, "w") as f:
29
+ for i in range(len(wav_list)):
30
+ filename = wav_list[i] + "|" + txt_list[i]
31
+ f.write(filename + "\n")
32
+
33
+
34
+ if __name__ == "__main__":
35
+
36
+ data_root = "filelists/LibriTTS"
37
+
38
+ # Dev and test sets. subsample each sets to get ~100 utterances
39
+ data_type_list = ["dev-clean", "dev-other", "test-clean", "test-other"]
40
+ subsample_list = [50, 50, 50, 50]
41
+ for data_type, subsample in zip(data_type_list, subsample_list):
42
+ print(f"processing {data_type}")
43
+ data_path = os.path.join(data_root, data_type)
44
+ assert os.path.exists(data_path), (
45
+ f"path {data_path} not found. make sure the path is accessible by creating the symbolic link using the following command: "
46
+ f"ln -s /path/to/your/{data_path} {data_path}"
47
+ )
48
+ wav_list, txt_list = get_wav_and_text_filelist(data_root, data_type, subsample)
49
+ write_filelist(os.path.join(data_root, data_type + ".txt"), wav_list, txt_list)
50
+
51
+ # Training and seen speaker validation datasets (libritts-full): train-clean-100 + train-clean-360 + train-other-500
52
+ wav_list_train, txt_list_train = [], []
53
+ for data_type in ["train-clean-100", "train-clean-360", "train-other-500"]:
54
+ print(f"processing {data_type}")
55
+ data_path = os.path.join(data_root, data_type)
56
+ assert os.path.exists(data_path), (
57
+ f"path {data_path} not found. make sure the path is accessible by creating the symbolic link using the following command: "
58
+ f"ln -s /path/to/your/{data_path} {data_path}"
59
+ )
60
+ wav_list, txt_list = get_wav_and_text_filelist(data_root, data_type)
61
+ wav_list_train.extend(wav_list)
62
+ txt_list_train.extend(txt_list)
63
+
64
+ # Split the training set so that the seen speaker validation set contains ~100 utterances
65
+ subsample_val = 3000
66
+ wav_list_val, txt_list_val = (
67
+ wav_list_train[::subsample_val],
68
+ txt_list_train[::subsample_val],
69
+ )
70
+ del wav_list_train[::subsample_val]
71
+ del txt_list_train[::subsample_val]
72
+ write_filelist(
73
+ os.path.join(data_root, "train-full.txt"), wav_list_train, txt_list_train
74
+ )
75
+ write_filelist(os.path.join(data_root, "val-full.txt"), wav_list_val, txt_list_val)
76
+
77
+ print("done")
BigVGAN/filelists/LibriTTS/test-clean.txt ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ test-clean/1089/134686/1089_134686_000001_000001|He hoped there would be stew for dinner, turnips and carrots and bruised potatoes and fat mutton pieces to be ladled out in thick peppered flour fattened sauce. Stuff it into you, his belly counselled him.
2
+ test-clean/1089/134686/1089_134686_000020_000001|We can scut the whole hour.
3
+ test-clean/1089/134691/1089_134691_000004_000001|Yet her mistrust pricked him more keenly than his father's pride and he thought coldly how he had watched the faith which was fading down in his soul ageing and strengthening in her eyes.
4
+ test-clean/1089/134691/1089_134691_000027_000004|Now, at the name of the fabulous artificer, he seemed to hear the noise of dim waves and to see a winged form flying above the waves and slowly climbing the air.
5
+ test-clean/1188/133604/1188_133604_000018_000002|There are just four touches-fine as the finest penmanship-to do that beak; and yet you will find that in the peculiar paroquettish mumbling and nibbling action of it, and all the character in which this nibbling beak differs from the tearing beak of the eagle, it is impossible to go farther or be more precise.
6
+ test-clean/121/121726/121_121726_000046_000003|Tied to a woman.
7
+ test-clean/121/127105/121_127105_000024_000000|He laughed for the first time.
8
+ test-clean/1284/1180/1284_1180_000001_000000|The Crooked Magician
9
+ test-clean/1284/1181/1284_1181_000005_000000|The head of the Patchwork Girl was the most curious part of her.
10
+ test-clean/1320/122612/1320_122612_000019_000005|It is true that the horses are here, but the Hurons are gone; let us, then, hunt for the path by which they parted."
11
+ test-clean/1320/122612/1320_122612_000056_000002|Then he reappeared, creeping along the earth, from which his dress was hardly distinguishable, directly in the rear of his intended captive.
12
+ test-clean/1580/141083/1580_141083_000012_000000|"The first page on the floor, the second in the window, the third where you left it," said he.
13
+ test-clean/1580/141083/1580_141083_000041_000003|Above were three students, one on each story.
14
+ test-clean/1580/141083/1580_141083_000063_000001|Holmes held it out on his open palm in the glare of the electric light.
15
+ test-clean/1580/141083/1580_141083_000110_000001|Where were you when you began to feel bad?"
16
+ test-clean/1580/141084/1580_141084_000024_000002|Pencils, too, and knives-all was satisfactory.
17
+ test-clean/1580/141084/1580_141084_000060_000001|"I frankly admit that I am unable to prove it.
18
+ test-clean/1580/141084/1580_141084_000085_000000|"Good heavens! have you nothing to add?" cried Soames.
19
+ test-clean/1995/1826/1995_1826_000022_000001|Miss Taylor did not know much about cotton, but at least one more remark seemed called for.
20
+ test-clean/1995/1836/1995_1836_000016_000001|No, of course there was no immediate danger; but when people were suddenly thrust beyond their natural station, filled with wild ideas and impossible ambitions, it meant terrible danger to Southern white women.
21
+ test-clean/1995/1837/1995_1837_000024_000000|He heard that she was down stairs and ran to meet her with beating heart.
22
+ test-clean/2300/131720/2300_131720_000016_000005|Having travelled around the world, I had cultivated an indifference to any special difficulties of that kind.
23
+ test-clean/2300/131720/2300_131720_000030_000005|I telephoned again, and felt something would happen, but fortunately it did not.
24
+ test-clean/237/126133/237_126133_000002_000004|It got to be noticed finally; and one and all redoubled their exertions to make everything twice as pleasant as ever!
25
+ test-clean/237/126133/237_126133_000049_000000|But the chubby face didn't look up brightly, as usual: and the next moment, without a bit of warning, Phronsie sprang past them all, even Polly, and flung herself into mr King's arms, in a perfect torrent of sobs.
26
+ test-clean/237/134493/237_134493_000008_000003|Alexandra lets you sleep late.
27
+ test-clean/237/134500/237_134500_000001_000001|Frank sat up until a late hour reading the Sunday newspapers.
28
+ test-clean/237/134500/237_134500_000014_000000|"I don't know all of them, but I know lindens are.
29
+ test-clean/237/134500/237_134500_000034_000000|She sighed despondently.
30
+ test-clean/260/123286/260_123286_000019_000002|Therefore don't talk to me about views and prospects."
31
+ test-clean/260/123286/260_123286_000049_000005|He shakes his head negatively.
32
+ test-clean/260/123288/260_123288_000016_000002|It rushes on from the farthest recesses of the vast cavern.
33
+ test-clean/260/123288/260_123288_000043_000001|I could just see my uncle at full length on the raft, and Hans still at his helm and spitting fire under the action of the electricity which has saturated him.
34
+ test-clean/2830/3979/2830_3979_000007_000000|PREFACE
35
+ test-clean/2830/3980/2830_3980_000018_000001|Humble man that he was, he will not now take a back seat.
36
+ test-clean/2961/961/2961_961_000004_000037|Then your city did bravely, and won renown over the whole earth.
37
+ test-clean/2961/961/2961_961_000023_000003|But violent as were the internal and alimentary fluids, the tide became still more violent when the body came into contact with flaming fire, or the solid earth, or gliding waters, or the stormy wind; the motions produced by these impulses pass through the body to the soul and have the name of sensations.
38
+ test-clean/3570/5694/3570_5694_000009_000003|The canon of reputability is at hand and seizes upon such innovations as are, according to its standard, fit to survive.
39
+ test-clean/3570/5695/3570_5695_000001_000003|But the middle class wife still carries on the business of vicarious leisure, for the good name of the household and its master.
40
+ test-clean/3570/5695/3570_5695_000009_000005|Considered by itself simply-taken in the first degree-this added provocation to which the artisan and the urban laboring classes are exposed may not very seriously decrease the amount of savings; but in its cumulative action, through raising the standard of decent expenditure, its deterrent effect on the tendency to save cannot but be very great.
41
+ test-clean/3570/5696/3570_5696_000011_000006|For this is the basis of award of the instinct of workmanship, and that instinct is the court of final appeal in any question of economic truth or adequacy.
42
+ test-clean/3729/6852/3729_6852_000004_000003|In order to please her, I spoke to her of the Abbe Conti, and I had occasion to quote two lines of that profound writer.
43
+ test-clean/4077/13754/4077_13754_000002_000000|The troops, once in Utah, had to be provisioned; and everything the settlers could spare was eagerly bought at an unusual price. The gold changed hands.
44
+ test-clean/4446/2271/4446_2271_000003_000004|There's everything in seeing Hilda while she's fresh in a part.
45
+ test-clean/4446/2271/4446_2271_000020_000001|Lady Westmere is very fond of Hilda."
46
+ test-clean/4446/2273/4446_2273_000008_000002|I've no need for fine clothes in Mac's play this time, so I can afford a few duddies for myself.
47
+ test-clean/4446/2273/4446_2273_000027_000004|She did my blouses beautifully the last time I was there, and was so delighted to see me again.
48
+ test-clean/4446/2273/4446_2273_000046_000001|"Aren't you afraid to let the wind low like that on your neck?
49
+ test-clean/4446/2275/4446_2275_000013_000000|Hilda was pale by this time, and her eyes were wide with fright.
50
+ test-clean/4446/2275/4446_2275_000038_000006|"You want to tell me that you can only see me like this, as old friends do, or out in the world among people?
51
+ test-clean/4507/16021/4507_16021_000011_000000|It engenders a whole world, la pegre, for which read theft, and a hell, la pegrenne, for which read hunger.
52
+ test-clean/4507/16021/4507_16021_000030_000001|Facts form one of these, and ideas the other.
53
+ test-clean/4970/29093/4970_29093_000010_000000|Delightful illusion of paint and tinsel and silk attire, of cheap sentiment and high and mighty dialogue!
54
+ test-clean/4970/29093/4970_29093_000047_000000|"Never mind the map.
55
+ test-clean/4970/29095/4970_29095_000021_000000|"I will practice it."
56
+ test-clean/4970/29095/4970_29095_000055_000002|He took it with him from the Southern Hotel, when he went to walk, and read it over and again in an unfrequented street as he stumbled along.
57
+ test-clean/4992/41797/4992_41797_000014_000002|He keeps the thou shalt not commandments first rate, Hen Lord does!
58
+ test-clean/4992/41806/4992_41806_000020_000001|Thou who settest the solitary in families, bless the life that is sheltered here.
59
+ test-clean/5105/28241/5105_28241_000004_000004|The late astounding events, however, had rendered Procope manifestly uneasy, and not the less so from his consciousness that the count secretly partook of his own anxiety.
60
+ test-clean/5142/33396/5142_33396_000004_000004|At the prow I carved the head with open mouth and forked tongue thrust out.
61
+ test-clean/5142/33396/5142_33396_000039_000000|"The thralls were bringing in a great pot of meat.
62
+ test-clean/5142/36377/5142_36377_000013_000003|I liked Naomi Colebrook at first sight; liked her pleasant smile; liked her hearty shake of the hand when we were presented to each other.
63
+ test-clean/5639/40744/5639_40744_000003_000006|Mother! dear father! do you hear me?
64
+ test-clean/5639/40744/5639_40744_000022_000000|Just then Leocadia came to herself, and embracing the cross seemed changed into a sea of tears, and the gentleman remained in utter bewilderment, until his wife had repeated to him, from beginning to end, Leocadia's whole story; and he believed it, through the blessed dispensation of Heaven, which had confirmed it by so many convincing testimonies.
65
+ test-clean/5683/32865/5683_32865_000018_000000|Well, it was pretty-French, I dare say-a little set of tablets-a toy-the cover of enamel, studded in small jewels, with a slender border of symbolic flowers, and with a heart in the centre, a mosaic of little carbuncles, rubies, and other red and crimson stones, placed with a view to light and shade.
66
+ test-clean/5683/32866/5683_32866_000005_000000|'Did you see that?' said Wylder in my ear, with a chuckle; and, wagging his head, he added, rather loftily for him, 'Miss Brandon, I reckon, has taken your measure, Master Stanley, as well as i I wonder what the deuce the old dowager sees in him.
67
+ test-clean/5683/32866/5683_32866_000047_000002|I was not a bit afraid of being found out.
68
+ test-clean/5683/32879/5683_32879_000036_000002|Be he near, or be he far, I regard his very name with horror.'
69
+ test-clean/6829/68769/6829_68769_000011_000000|So as soon as breakfast was over the next morning Beth and Kenneth took one of the automobiles, the boy consenting unwillingly to this sort of locomotion because it would save much time.
70
+ test-clean/6829/68769/6829_68769_000051_000001|One morning she tried to light the fire with kerosene, and lost her sight.
71
+ test-clean/6829/68769/6829_68769_000089_000001|Why should you do all this?"
72
+ test-clean/6829/68771/6829_68771_000018_000003|A speakers' stand, profusely decorated, had been erected on the lawn, and hundreds of folding chairs provided for seats.
73
+ test-clean/6930/75918/6930_75918_000000_000001|Night.
74
+ test-clean/6930/81414/6930_81414_000041_000001|Here is his scarf, which has evidently been strained, and on it are spots of blood, while all around are marks indicating a struggle.
75
+ test-clean/7021/79740/7021_79740_000010_000006|I observe that, when you both wish for the same thing, you don't quarrel for it and try to pull it away from one another; but one waits like a lady until the other has done with it.
76
+ test-clean/7021/85628/7021_85628_000017_000000|"I am going to the court ball," answered Anders.
77
+ test-clean/7127/75946/7127_75946_000022_000002|It is necessary, therefore, that he should comply."
78
+ test-clean/7127/75946/7127_75946_000061_000001|Disdainful of a success of which Madame showed no acknowledgement, he thought of nothing but boldly regaining the marked preference of the princess.
79
+ test-clean/7127/75947/7127_75947_000035_000000|"Quite true, and I believe you are right.
80
+ test-clean/7176/88083/7176_88083_000002_000003|He was too imposing in appearance, too gorgeous in apparel, too bold and vigilant in demeanor to be so misunderstood.
81
+ test-clean/7176/88083/7176_88083_000017_000000|Immediately over his outstretched gleaming head flew the hawk.
82
+ test-clean/7176/92135/7176_92135_000011_000000|And, so on in the same vein for some thirty lines.
83
+ test-clean/7176/92135/7176_92135_000074_000001|Tea, please, Matthews.
84
+ test-clean/7729/102255/7729_102255_000011_000003|The Free State Hotel served as barracks.
85
+ test-clean/7729/102255/7729_102255_000028_000009|They were squads of Kansas militia, companies of "peaceful emigrants," or gangs of irresponsible outlaws, to suit the chance, the whim, or the need of the moment.
86
+ test-clean/8230/279154/8230_279154_000003_000002|In the present lecture I shall attempt the analysis of memory knowledge, both as an introduction to the problem of knowledge in general, and because memory, in some form, is presupposed in almost all other knowledge.
87
+ test-clean/8230/279154/8230_279154_000013_000003|One of these is context.
88
+ test-clean/8230/279154/8230_279154_000027_000000|A further stage is RECOGNITION.
89
+ test-clean/8455/210777/8455_210777_000022_000003|And immediately on his sitting down, there got up a gentleman to whom I had not been introduced before this day, and gave the health of Mrs Neverbend and the ladies of Britannula.
90
+ test-clean/8455/210777/8455_210777_000064_000001|Government that he shall be treated with all respect, and that those honours shall be paid to him which are due to the President of a friendly republic.
91
+ test-clean/8463/287645/8463_287645_000023_000001|For instance, Jacob Taylor was noticed on the record book as being twenty three years of age, and the name of his master was entered as "William Pollit;" but as Jacob had never been allowed to learn to read, he might have failed in giving a correct pronunciation of the name.
92
+ test-clean/8463/294825/8463_294825_000048_000000|- CENTIMETER Roughly two fifths of an inch
93
+ test-clean/8463/294828/8463_294828_000046_000001|Conseil did them in a flash, and I was sure the lad hadn't missed a thing, because he classified shirts and suits as expertly as birds and mammals.
94
+ test-clean/8555/284447/8555_284447_000018_000002|The poor Queen, by the way, was seldom seen, as she passed all her time playing solitaire with a deck that was one card short, hoping that before she had lived her entire six hundred years she would win the game.
95
+ test-clean/8555/284447/8555_284447_000049_000000|Now, indeed, the Boolooroo was as angry as he was amazed.
96
+ test-clean/8555/284449/8555_284449_000039_000000|When the courtiers and the people assembled saw the goat they gave a great cheer, for the beast had helped to dethrone their wicked Ruler.
97
+ test-clean/8555/292519/8555_292519_000041_000000|She was alone that night. He had broken into her courtyard. Above the gurgling gutters he heard- surely- a door unchained?
BigVGAN/filelists/LibriTTS/test-other.txt ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ test-other/1688/142285/1688_142285_000000_000000|'Margaret!' said mr Hale, as he returned from showing his guest downstairs; 'I could not help watching your face with some anxiety, when mr Thornton made his confession of having been a shop boy.
2
+ test-other/1688/142285/1688_142285_000046_000000|'No, mamma; that Anne Buckley would never have done.'
3
+ test-other/1998/15444/1998_15444_000012_000000|Simple filtration will sometimes suffice to separate the required substance; in other cases dialysis will be necessary, in order that crystalloid substances may be separated from colloid bodies.
4
+ test-other/1998/29454/1998_29454_000021_000001|Fried eggs and bacon-he had one egg and the man had three-bread and butter-and if the bread was thick, so was the butter-and as many cups of tea as you liked to say thank you for.
5
+ test-other/1998/29454/1998_29454_000053_000001|It almost looked, Dickie thought, as though he had brought them out for some special purpose.
6
+ test-other/1998/29455/1998_29455_000022_000000|It was a wonderful day.
7
+ test-other/1998/29455/1998_29455_000082_000003|But 'e's never let it out."
8
+ test-other/2414/128292/2414_128292_000003_000000|"What!" said he, "have not the most ludicrous things always happened to us old anchorites and saints?
9
+ test-other/2609/156975/2609_156975_000036_000004|The cruel fate of his people and the painful experience in Egypt that had driven him into the wilderness prepared his mind to receive this training.
10
+ test-other/3005/163389/3005_163389_000017_000001|And they laughed all the time, and that made the duke mad; and everybody left, anyway, before the show was over, but one boy which was asleep.
11
+ test-other/3005/163390/3005_163390_000023_000021|S'pose people left money laying around where he was what did he do?
12
+ test-other/3005/163391/3005_163391_000021_000000|"It's a pretty long journey.
13
+ test-other/3005/163399/3005_163399_000013_000002|When we got there she set me down in a split bottomed chair, and set herself down on a little low stool in front of me, holding both of my hands, and says:
14
+ test-other/3005/163399/3005_163399_000045_000000|He sprung to the window at the head of the bed, and that give mrs Phelps the chance she wanted.
15
+ test-other/3080/5040/3080_5040_000000_000010|You have no such ladies in Ireland?
16
+ test-other/3331/159605/3331_159605_000006_000002|I could do so much for all at home how I should enjoy that!" And Polly let her thoughts revel in the luxurious future her fancy painted.
17
+ test-other/3331/159605/3331_159605_000082_000000|"Who got up that nice idea, I should like to know?" demanded Polly, as Fanny stopped for breath.
18
+ test-other/3528/168656/3528_168656_000003_000003|She told wonders of the Abbey of Fontevrault,--that it was like a city, and that there were streets in the monastery.
19
+ test-other/3528/168669/3528_168669_000030_000000|A silence ensued.
20
+ test-other/3528/168669/3528_168669_000075_000000|"Like yourself, reverend Mother."
21
+ test-other/3528/168669/3528_168669_000123_000000|"But the commissary of police-"
22
+ test-other/3528/168669/3528_168669_000137_000000|"That is well."
23
+ test-other/3528/168669/3528_168669_000164_000008|I shall have my lever.
24
+ test-other/3538/142836/3538_142836_000021_000003|However, as late as the reigns of our two last Georges, fabulous sums were often expended upon fanciful desserts.
25
+ test-other/3538/163619/3538_163619_000054_000000|'Now he says that you are to make haste and throw yourself overboard,' answered the step mother.
26
+ test-other/3538/163622/3538_163622_000069_000000|So they travelled onwards again, for many and many a mile, over hill and dale.
27
+ test-other/3538/163624/3538_163624_000038_000000|Then Sigurd went down into that deep place, and dug many pits in it, and in one of the pits he lay hidden with his sword drawn.
28
+ test-other/367/130732/367_130732_000002_000001|Probably nowhere in San Francisco could one get lobster better served than in the Old Delmonico restaurant of the days before the fire.
29
+ test-other/3764/168670/3764_168670_000003_000000|"But you, Father Madeleine?"
30
+ test-other/3764/168670/3764_168670_000043_000000|"Yes."
31
+ test-other/3764/168670/3764_168670_000083_000005|He grumbled:--
32
+ test-other/3764/168671/3764_168671_000012_000003|He did what he liked with him.
33
+ test-other/3764/168671/3764_168671_000046_000000|"Comrade!" cried Fauchelevent.
34
+ test-other/3997/180294/3997_180294_000023_000000|Then, when God allows love to a courtesan, that love, which at first seems like a pardon, becomes for her almost without penitence.
35
+ test-other/3997/180294/3997_180294_000065_000001|The count will be coming back, and there is nothing to be gained by his finding you here."
36
+ test-other/3997/180297/3997_180297_000034_000004|For these people we have to be merry when they are merry, well when they want to sup, sceptics like themselves.
37
+ test-other/3997/182399/3997_182399_000014_000003|Oh, my, no!
38
+ test-other/4198/61336/4198_61336_000000_000003|It is significant to note in this connection that the new king was an unswerving adherent of the cult of Ashur, by the adherents of which he was probably strongly supported.
39
+ test-other/4198/61336/4198_61336_000033_000001|Nabonassar had died and was succeeded by his son Nabu nadin zeri, who, after reigning for two years, was slain in a rebellion.
40
+ test-other/4294/14317/4294_14317_000022_000011|I do not condescend to smite you. He looked at me submissively and said nothing.
41
+ test-other/4294/35475/4294_35475_000018_000001|At last they reached a wide chasm that bounded the Ogre's domain.
42
+ test-other/4294/35475/4294_35475_000050_000002|They said, "We are only waiting to lay some wily plan to capture the Ogre."
43
+ test-other/4294/9934/4294_9934_000025_000000|"Gold; here it is."
44
+ test-other/4350/10919/4350_10919_000006_000000|"Immediately, princess.
45
+ test-other/4350/9170/4350_9170_000005_000001|Authority, in the sense in which the word is ordinarily understood, is a means of forcing a man to act in opposition to his desires.
46
+ test-other/4350/9170/4350_9170_000056_000000|But the fatal significance of universal military service, as the manifestation of the contradiction inherent in the social conception of life, is not only apparent in that.
47
+ test-other/4852/28311/4852_28311_000031_000001|After a step or two, not finding his friend beside him, he turned.
48
+ test-other/4852/28319/4852_28319_000013_000002|mr Wicker waited patiently beside him for a few moments for Chris to get up his courage.
49
+ test-other/533/1066/533_1066_000008_000000|"I mean," he persisted, "do you feel as though you could go through with something rather unusual?"
50
+ test-other/533/131562/533_131562_000018_000000|mr Huntingdon then went up stairs.
51
+ test-other/5442/41168/5442_41168_000002_000001|Sergey Ivanovitch, waiting till the malignant gentleman had finished speaking, said that he thought the best solution would be to refer to the act itself, and asked the secretary to find the act.
52
+ test-other/5442/41169/5442_41169_000003_000000|"He's such a blackguard!
53
+ test-other/5442/41169/5442_41169_000030_000000|"And with what he made he'd increase his stock, or buy some land for a trifle, and let it out in lots to the peasants," Levin added, smiling. He had evidently more than once come across those commercial calculations.
54
+ test-other/5484/24317/5484_24317_000040_000006|Let us hope that you will make this three leaved clover the luck promising four leaved one.
55
+ test-other/5484/24318/5484_24318_000015_000002|The blood of these innocent men would be on his head if he did not listen to her representations.
56
+ test-other/5484/24318/5484_24318_000068_000001|He was appearing before his companions only to give truth its just due.
57
+ test-other/5764/299665/5764_299665_000041_000004|He saw the seeds that man had planted wither and perish, but he sent no rain.
58
+ test-other/5764/299665/5764_299665_000070_000000|Think of the egotism of a man who believes that an infinite being wants his praise!
59
+ test-other/5764/299665/5764_299665_000102_000000|The first stone is that matter-substance-cannot be destroyed, cannot be annihilated.
60
+ test-other/5764/299665/5764_299665_000134_000000|You cannot reform these people with tracts and talk.
61
+ test-other/6070/63485/6070_63485_000025_000003|Hand me the cash, and I will hand you the pocketbook."
62
+ test-other/6070/86744/6070_86744_000027_000000|"Have you bachelor's apartments there?
63
+ test-other/6070/86745/6070_86745_000001_000002|Two windows only of the pavilion faced the street; three other windows looked into the court, and two at the back into the garden.
64
+ test-other/6128/63240/6128_63240_000012_000002|Neither five nor fifteen, and yet not ten exactly, but either nine or eleven.
65
+ test-other/6128/63240/6128_63240_000042_000002|mrs Luna explained to her sister that her freedom of speech was caused by his being a relation-though, indeed, he didn't seem to know much about them.
66
+ test-other/6128/63244/6128_63244_000002_000000|"I can't talk to those people, I can't!" said Olive Chancellor, with a face which seemed to plead for a remission of responsibility.
67
+ test-other/6432/63722/6432_63722_000026_000000|"Not the least in the world-not as much as you do," was the cool answer.
68
+ test-other/6432/63722/6432_63722_000050_000004|Queen Elizabeth was very fond of watches and clocks, and her friends, knowing that, used to present her with beautiful specimens. Some of the watches of her day were made in the form of crosses, purses, little books, and even skulls."
69
+ test-other/6432/63722/6432_63722_000080_000003|When it does it will create a sensation."
70
+ test-other/6432/63723/6432_63723_000026_000000|"No; but he will, or I'll sue him and get judgment.
71
+ test-other/6432/63723/6432_63723_000057_000000|"Then for the love of-"
72
+ test-other/6432/63723/6432_63723_000080_000000|"Hello, Harry!
73
+ test-other/6938/70848/6938_70848_000046_000003|Show me the source!"
74
+ test-other/6938/70848/6938_70848_000104_000000|With biting sarcasm he went on to speak of the Allied diplomats, till then contemptuous of Russia's invitation to an armistice, which had been accepted by the Central Powers.
75
+ test-other/7105/2330/7105_2330_000021_000000|"He won't go unless he has a brass band.
76
+ test-other/7105/2340/7105_2340_000015_000001|We feel that we must live on cream for the rest of our lives.
77
+ test-other/7902/96591/7902_96591_000008_000001|I did not come to frighten you; you frightened me."
78
+ test-other/7902/96591/7902_96591_000048_000000|"No," he thought to himself, "I don't believe they would kill me, but they would knock me about."
79
+ test-other/7902/96592/7902_96592_000024_000001|Once out of that room he could ran, and by daylight the smugglers dare not hunt him down.
80
+ test-other/7902/96592/7902_96592_000063_000000|"What for?" cried Ram.
81
+ test-other/7902/96594/7902_96594_000014_000001|These fellows are very cunning, but we shall be too many for them one of these days."
82
+ test-other/7902/96594/7902_96594_000062_000001|Keep a sharp look out on the cliff to see if Mr Raystoke is making signals for a boat.
83
+ test-other/7902/96595/7902_96595_000039_000000|The man shook his head, and stared as if he didn't half understand the drift of what was said.
84
+ test-other/7975/280057/7975_280057_000009_000000|Naturally we were Southerners in sympathy and in fact.
85
+ test-other/7975/280057/7975_280057_000025_000004|On reaching the camp the first person I saw whom I knew was Cole Younger.
86
+ test-other/7975/280076/7975_280076_000013_000001|I will give you this outline and sketch of my whereabouts and actions at the time of certain robberies with which I am charged.
87
+ test-other/7975/280084/7975_280084_000007_000000|But between the time we broke camp and the time they reached the bridge the three who went ahead drank a quart of whisky, and there was the initial blunder at Northfield.
88
+ test-other/7975/280085/7975_280085_000005_000002|Some of the boys wanted to kill him, on the theory that "dead men tell no tales," while others urged binding him and leaving him in the woods.
89
+ test-other/8131/117016/8131_117016_000005_000000|The Stonewall gang numbered perhaps five hundred.
90
+ test-other/8131/117016/8131_117016_000025_000001|"And don't let them get away!"
91
+ test-other/8131/117016/8131_117016_000047_000006|I can always go back to Earth, and I'll try to take you along.
92
+ test-other/8131/117017/8131_117017_000005_000000|Gordon hit the signal switch, and the Marspeaker let out a shrill whistle.
93
+ test-other/8131/117017/8131_117017_000020_000003|There's no graft out here."
94
+ test-other/8131/117029/8131_117029_000007_000002|Wrecks were being broken up, with salvageable material used for newer homes. Gordon came to a row of temporary bubbles, individual dwellings built like the dome, but opaque for privacy.
95
+ test-other/8131/117029/8131_117029_000023_000004|But there'll be pushers as long as weak men turn to drugs, and graft as long as voters allow the thing to get out of their hands.
96
+ test-other/8188/269288/8188_269288_000018_000000|A few moments later there came a tap at the door.
97
+ test-other/8188/269288/8188_269288_000053_000001|"Do you want to kill me?
98
+ test-other/8188/269290/8188_269290_000035_000001|"But now, Leslie, what is the trouble?
99
+ test-other/8188/269290/8188_269290_000065_000000|"I don't think she is quite well," replied Leslie.
100
+ test-other/8280/266249/8280_266249_000030_000000|The ladies were weary, and retired to their state rooms shortly after tea, but the gentlemen sought the open air again and paced the deck for some time.
101
+ test-other/8280/266249/8280_266249_000113_000000|It was the last game of cards for that trip.
102
+ test-other/8461/278226/8461_278226_000026_000000|Laura thanked the French artist and then took her husband's arm and walked away with him.
103
+ test-other/8461/281231/8461_281231_000029_000002|Before long the towering flames had surmounted every obstruction, and rose to the evening skies one huge and burning beacon, seen far and wide through the adjacent country; tower after tower crashed down, with blazing roof and rafter.
BigVGAN/filelists/LibriTTS/train-full.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:40fa291ec58520a7832baefbe934d40cd0a4d8aa516894357a4d6f5ac3dcfb23
3
+ size 51789567
BigVGAN/filelists/LibriTTS/val-full.txt ADDED
@@ -0,0 +1,119 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ train-clean-100/103/1241/103_1241_000000_000001|matthew Cuthbert is surprised
2
+ train-clean-100/1594/135914/1594_135914_000033_000001|He told them, that having taken refuge in a small village, he there fell sick; that some charitable peasants had taken care of him, but finding he did not recover, a camel driver had undertaken to carry him to the hospital at Bagdad.
3
+ train-clean-100/233/155990/233_155990_000018_000002|I did, however, receive aid from the Emperor of Germany.
4
+ train-clean-100/3240/131231/3240_131231_000041_000003|Some persons, thinking them to be sea fishes, placed them in salt water, according to mr Roberts.
5
+ train-clean-100/40/222/40_222_000026_000000|"No, read it yourself," cried Catherine, whose second thoughts were clearer.
6
+ train-clean-100/4406/16882/4406_16882_000014_000002|Then they set me upon a horse with my wounded child in my lap, and there being no furniture upon the horse's back, as we were going down a steep hill we both fell over the horse's head, at which they, like inhumane creatures, laughed, and rejoiced to see it, though I thought we should there have ended our days, as overcome with so many difficulties.
7
+ train-clean-100/5393/19218/5393_19218_000115_000000|"Where is it going then?"
8
+ train-clean-100/6147/34606/6147_34606_000013_000008|One was "a dancing master;" that is to say he made the rustics frisk about by pricking the calves of their legs with the point of his sword.
9
+ train-clean-100/6848/76049/6848_76049_000003_000007|But suppose she was not all ordinary female person....
10
+ train-clean-100/7505/258964/7505_258964_000026_000007|During the Boer War horses and mules rose in price in the United States on account of British purchases.
11
+ train-clean-100/831/130739/831_130739_000015_000000|But enough of these revelations.
12
+ train-clean-100/887/123291/887_123291_000028_000000|Here the Professor laid hold of the fossil skeleton, and handled it with the skill of a dexterous showman.
13
+ train-clean-360/112/123216/112_123216_000035_000009|The wonderful day had come and Roy's violets had no place in it.
14
+ train-clean-360/1323/149236/1323_149236_000007_000004|It was vain to hope that mere words would quiet a nation which had not, in any age, been very amenable to control, and which was now agitated by hopes and resentments, such as great revolutions, following great oppressions, naturally engender.
15
+ train-clean-360/1463/134465/1463_134465_000058_000000|Both Sandy and I began to laugh.
16
+ train-clean-360/1748/1562/1748_1562_000067_000000|"Oh, Pocket, Pocket," said I; but by this time the party which had gone towards the house, rushed out again, shouting and screaming with laughter.
17
+ train-clean-360/1914/133440/1914_133440_000014_000001|With the last twenty or thirty feet of it a deadly nausea came upon me.
18
+ train-clean-360/207/143321/207_143321_000070_000002|The canoes were not on the river bank.
19
+ train-clean-360/2272/152267/2272_152267_000003_000001|After supper the knight shared his own bed with the leper.
20
+ train-clean-360/2517/135227/2517_135227_000006_000005|As I was anxious to witness some of their purely religious ceremonies, I wished to go.
21
+ train-clean-360/2709/158074/2709_158074_000054_000000|Meanwhile the women continued to protest.
22
+ train-clean-360/2929/86777/2929_86777_000009_000000|A long silence followed; the peach, like the grapes, fell to the ground.
23
+ train-clean-360/318/124224/318_124224_000022_000010|In spite of his prejudice against Edward, he could put himself into Mr Waller's place, and see the thing from his point of view.
24
+ train-clean-360/3368/170952/3368_170952_000006_000000|And can he be fearless of death, or will he choose death in battle rather than defeat and slavery, who believes the world below to be real and terrible?
25
+ train-clean-360/3549/9203/3549_9203_000005_000004|We must hope so. There are examples.
26
+ train-clean-360/3835/178028/3835_178028_000007_000001|That day Prince Vasili no longer boasted of his protege Kutuzov, but remained silent when the commander in chief was mentioned.
27
+ train-clean-360/3994/149798/3994_149798_000005_000002|Afterward we can visit the mountain and punish the cruel magician of the Flatheads."
28
+ train-clean-360/4257/6397/4257_6397_000009_000000|At that time Nostromo had been already long enough in the country to raise to the highest pitch Captain Mitchell's opinion of the extraordinary value of his discovery.
29
+ train-clean-360/454/134728/454_134728_000133_000000|After a week of physical anguish, Unrest and pain, and feverish heat, Toward the ending day a calm and lull comes on, Three hours of peace and soothing rest of brain.
30
+ train-clean-360/4848/28247/4848_28247_000026_000002|Had he gained this arduous height only to behold the rocks carpeted with ice and snow, and reaching interminably to the far off horizon?
31
+ train-clean-360/5039/1189/5039_1189_000091_000000|The Shaggy Man sat down again and seemed well pleased.
32
+ train-clean-360/5261/19373/5261_19373_000011_000001|Some cause was evidently at work on this distant planet, causing it to disagree with its motion as calculated according to the law of gravitation.
33
+ train-clean-360/5538/70919/5538_70919_000032_000001|Only one person in the world could have laid those discoloured pearls at his door in the dead of night. The black figure in the garden, with the chiffon fluttering about its head, was Evelina Grey-or what was left of her.
34
+ train-clean-360/5712/48848/5712_48848_000060_000003|Lily for the time had been raised to a pinnacle,--a pinnacle which might be dangerous, but which was, at any rate, lofty.
35
+ train-clean-360/5935/43322/5935_43322_000050_000002|I think too-yes, I think that on the whole the ritual is impressive.
36
+ train-clean-360/6115/58433/6115_58433_000007_000002|We must run the risk."
37
+ train-clean-360/6341/64956/6341_64956_000040_000000|"Why, papa, I thought we were going to have such a nice time, and she just spoiled it all."
38
+ train-clean-360/6509/67147/6509_67147_000028_000003|It "was n't done" in England.
39
+ train-clean-360/6694/70837/6694_70837_000027_000002|There an enormous smiling sailor stopped me, and when I showed my pass, just said, "If you were Saint Michael himself, comrade, you couldn't pass here!" Through the glass of the door I made out the distorted face and gesticulating arms of a French correspondent, locked in....
40
+ train-clean-360/6956/76046/6956_76046_000055_000001|Twelve hundred, fifteen hundred millions perhaps."
41
+ train-clean-360/7145/87280/7145_87280_000004_000003|This modern Ulysses made a masterful effort, but alas! had no ships to carry him away, and no wax with which to fill his ears.
42
+ train-clean-360/7314/77782/7314_77782_000011_000000|"Well, then, what in thunder is the matter with you?" cried the Lawyer, irritated.
43
+ train-clean-360/7525/92915/7525_92915_000034_000001|It was desperate, too, and lasted nearly all day-and it was one of the important battles of the world, although the numbers engaged in it were not large.
44
+ train-clean-360/7754/108640/7754_108640_000001_000004|Was I aware-was I fully aware of the discrepancy between us?
45
+ train-clean-360/7909/106369/7909_106369_000006_000002|And Colchian Aea lies at the edge of Pontus and of the world."
46
+ train-clean-360/8011/280922/8011_280922_000009_000000|He stretched out his hand, and all at once stroked my cheek.
47
+ train-clean-360/8176/115046/8176_115046_000027_000001|"Bless my soul, I never can understand it!"
48
+ train-clean-360/8459/292347/8459_292347_000015_000000|A woman near Gort, in Galway, says: 'There is a boy, now, of the Cloran's; but I wouldn't for the world let them think I spoke of him; it's two years since he came from America, and since that time he never went to Mass, or to church, or to fairs, or to market, or to stand on the cross roads, or to hurling, or to nothing.
49
+ train-clean-360/8699/291107/8699_291107_000003_000005|He leaned closer over it, regardless of the thin choking haze that spread about his face. In his attitude there was a rigidity of controlled excitement out of keeping with the seeming harmlessness of the experiment.
50
+ train-clean-360/8855/283242/8855_283242_000061_000000|"That couldn't be helped, grannie.
51
+ train-other-500/102/129232/102_129232_000050_000000|Is it otherwise in the newest romance?
52
+ train-other-500/1124/134775/1124_134775_000087_000001|Some of them are enclosed only by hedges, which lends a cheerful aspect to the street.
53
+ train-other-500/1239/138254/1239_138254_000010_000001|It was past twelve when all preparations were finished.
54
+ train-other-500/1373/132103/1373_132103_000056_000000|So they moved on.
55
+ train-other-500/1566/153036/1566_153036_000087_000003|You enter the river close by the trees, and then keep straight for the pile of stones, which is some fifty yards higher up, for the ford crosses the river at an angle."
56
+ train-other-500/1653/142352/1653_142352_000005_000002|If he should not come!
57
+ train-other-500/1710/133294/1710_133294_000023_000000|When the Indians were the sole inhabitants of the wilds from whence they have since been expelled, their wants were few.
58
+ train-other-500/1773/139602/1773_139602_000032_000001|When the rabbit saw that the badger was getting well, he thought of another plan by which he could compass the creature's death.
59
+ train-other-500/1920/1793/1920_1793_000037_000001|She has a little Blenheim lapdog, that she loves a thousand times more than she ever will me!"
60
+ train-other-500/2067/143535/2067_143535_000009_000002|Indeed, there, to the left, was a stone shelf with a little ledge to it three inches or so high, and on the shelf lay what I took to be a corpse; at any rate, it looked like one, with something white thrown over it.
61
+ train-other-500/2208/11020/2208_11020_000037_000001|It's at my place over there.'
62
+ train-other-500/2312/157868/2312_157868_000019_000002|I am the manager of the theatre, and I'm thundering glad that your first play has been produced at the 'New York,' sir.
63
+ train-other-500/2485/151992/2485_151992_000028_000005|At last he looked up at his wife and said, in a gentle tone:
64
+ train-other-500/2587/54186/2587_54186_000015_000000|Concerning the work as a whole he wrote to Clara while in the throes of composition: "This music now in me, and always such beautiful melodies!
65
+ train-other-500/2740/288813/2740_288813_000018_000003|But Philip had kept him apart, had banked him off, and yet drained him to the dregs.
66
+ train-other-500/2943/171001/2943_171001_000122_000000|The sound of his voice pronouncing her name aroused her.
67
+ train-other-500/3063/138651/3063_138651_000028_000000|But, as may be imagined, the unfortunate john was as much surprised by this rencounter as the other two.
68
+ train-other-500/3172/166439/3172_166439_000050_000000|And now at last was clear a thing that had puzzled greatly-the mechanism of that opening process by which sphere became oval disk, pyramid a four pointed star and-as I had glimpsed in the play of the Little Things about Norhala, could see now so plainly in the Keeper-the blocks took this inverted cruciform shape.
69
+ train-other-500/331/132019/331_132019_000038_000000|"I say, this is folly!
70
+ train-other-500/3467/166570/3467_166570_000054_000001|Does he never mention Orlando?"
71
+ train-other-500/3587/140711/3587_140711_000015_000001|O fie, mrs Jervis, said I, how could you serve me so? Besides, it looks too free both in me, and to him.
72
+ train-other-500/3675/187020/3675_187020_000026_000001|"I wonder what would be suitable?
73
+ train-other-500/3819/134146/3819_134146_000019_000001|Also the figure half hidden by the cupboard door-was a female figure, massive, and in flowing robes.
74
+ train-other-500/3912/77626/3912_77626_000003_000004|You may almost distinguish the figures on the clock that has just told the hour.
75
+ train-other-500/4015/63729/4015_63729_000058_000000|"It does."
76
+ train-other-500/413/22436/413_22436_000035_000003|I conjecture, the French squadron is bound for Malta and Alexandria, and the Spanish fleet for the attack of Minorca."
77
+ train-other-500/4218/41159/4218_41159_000028_000002|Yes? That worries Alexey.
78
+ train-other-500/4352/10940/4352_10940_000037_000002|He doesn't exist."
79
+ train-other-500/4463/26871/4463_26871_000023_000000|"I did not notice him following me," she said timidly.
80
+ train-other-500/4591/14356/4591_14356_000019_000000|"Within three days," cried the enchanter, loudly, "bring Rinaldo and Ricciardetto into the pass of Ronces Valles.
81
+ train-other-500/4738/291957/4738_291957_000000_000001|ODE ON THE SPRING.
82
+ train-other-500/4824/36029/4824_36029_000045_000003|And indeed Janet herself had taken no part in the politics, content merely to advise the combatants upon their demeanour.
83
+ train-other-500/4936/65528/4936_65528_000014_000007|I immediately responded, "Yes, they are most terrible struck on each other," and I said it in a tone that indicated I thought it a most beautiful and lovely thing that they should be so.
84
+ train-other-500/5019/38670/5019_38670_000017_000000|"Let me make you a present of the gloves," she said, with her irresistible smile.
85
+ train-other-500/5132/33409/5132_33409_000016_000001|They waited on the table in Valhalla.
86
+ train-other-500/52/121057/52_121057_000019_000000|"I," cried the steward with a strange expression.
87
+ train-other-500/5321/53046/5321_53046_000025_000003|I gather from what mrs joel said that she's rather touched in her mind too, and has an awful hankering to get home here-to this very house.
88
+ train-other-500/5429/210770/5429_210770_000029_000006|But this was not all.
89
+ train-other-500/557/129797/557_129797_000072_000001|The guns were manned, the gunners already kindling fuses, when the buccaneer fleet, whilst still heading for Palomas, was observed to bear away to the west.
90
+ train-other-500/572/128861/572_128861_000016_000002|My home was desolate.
91
+ train-other-500/5826/53497/5826_53497_000044_000001|If it be as you say, he will have shown himself noble, and his nobility will have consisted in this, that he has been willing to take that which he does not want, in order that he may succour one whom he loves.
92
+ train-other-500/5906/52158/5906_52158_000055_000000|The impression that he gets this knowledge or suspicion from the outside is due, the scientists say, to the fact that his thinking has proceeded at such lightning like speed that he was unable to watch the wheels go round.
93
+ train-other-500/6009/57639/6009_57639_000038_000000|This, friendly reader, is my only motive.
94
+ train-other-500/6106/58196/6106_58196_000007_000001|I tell you that you must make the dress.
95
+ train-other-500/6178/86034/6178_86034_000079_000004|Then she will grow calmer, and will know you again.
96
+ train-other-500/6284/63091/6284_63091_000133_000001|I don't want to go anywhere where anybody'll see me."
97
+ train-other-500/6436/104980/6436_104980_000009_000002|I guess you never heard about this house."
98
+ train-other-500/6540/232291/6540_232291_000017_000003|The girl was not wholly a savage.
99
+ train-other-500/6627/67844/6627_67844_000046_000002|The other girls had stopped talking, and now looked at Sylvia as if wondering what she would say.
100
+ train-other-500/6707/77351/6707_77351_000002_000006|But our first words I may give you, because though they conveyed nothing to me at the time, afterwards they meant much.
101
+ train-other-500/6777/76694/6777_76694_000013_000011|When they are forcibly put out of Garraway's on Saturday night-which they must be, for they never would go out of their own accord-where do they vanish until Monday morning?
102
+ train-other-500/690/133452/690_133452_000011_000000|Campany lifted his quill pen and pointed to a case of big leather bound volumes in a far corner of the room.
103
+ train-other-500/7008/34667/7008_34667_000032_000002|What had happened?
104
+ train-other-500/7131/92815/7131_92815_000039_000001|The cabman tried to pass to the left, but a heavy express wagon cut him off.
105
+ train-other-500/7220/77911/7220_77911_000005_000000|"Do?
106
+ train-other-500/7326/245693/7326_245693_000008_000000|Whether the Appetite Is a Special Power of the Soul?
107
+ train-other-500/7392/105672/7392_105672_000013_000005|Whoever, being required, refused to answer upon oath to any article of this act of settlement, was declared to be guilty of treason; and by this clause a species of political inquisition was established in the kingdom, as well as the accusations of treason multiplied to an unreasonable degree.
108
+ train-other-500/7512/98636/7512_98636_000017_000002|A man thus rarely makes provision for the future, and looks with scorn on foreign customs which seem to betoken a fear lest, in old age, ungrateful children may neglect their parents and cast them aside.
109
+ train-other-500/7654/258963/7654_258963_000007_000007|Egypt, for a time reduced to a semi desert condition, has only in the past century been restored to a certain extent by the use of new methods and a return to the old ones.
110
+ train-other-500/7769/99396/7769_99396_000020_000002|I had to go out once a day in search of food.
111
+ train-other-500/791/127519/791_127519_000086_000000|This was how it came about.
112
+ train-other-500/8042/113769/8042_113769_000021_000000|House the second.
113
+ train-other-500/8180/274725/8180_274725_000010_000000|"What fools men are in love matters," quoth Patty to herself-"at least most men!" with a thought backward to Mark's sensible choosing.
114
+ train-other-500/8291/282929/8291_282929_000031_000006|He's in a devil of a-Well, he needs the money, and I've got to get it for him. You know my word's good, Cooper."
115
+ train-other-500/8389/120181/8389_120181_000022_000000|"No," I answered.
116
+ train-other-500/8476/269293/8476_269293_000078_000001|Annie, in some wonder, went downstairs alone.
117
+ train-other-500/8675/295195/8675_295195_000004_000004|Everything had gone on prosperously with them, and they had reared many successive families of young Nutcrackers, who went forth to assume their places in the forest of life, and to reflect credit on their bringing up,--so that naturally enough they began to have a very easy way of considering themselves models of wisdom.
118
+ train-other-500/9000/282381/9000_282381_000016_000008|Bank facings seemed to indicate that the richest pay dirt lay at bed rock.
119
+ train-other-500/978/132494/978_132494_000017_000001|And what made you come at that very minute?
BigVGAN/incl_licenses/LICENSE_1 ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2020 Jungil Kong
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.