diff --git a/README.md b/README.md new file mode 100644 index 0000000000000000000000000000000000000000..479e3fc83d2cd89bd1801f454e6c1b6e7628d7f7 --- /dev/null +++ b/README.md @@ -0,0 +1,88 @@ +--- +tags: +- espnet +- audio +- automatic-speech-recognition +- speech-translation +language: multilingual +datasets: +- owsm_v3.1_lowrestriction +license: cc-by-4.0 +--- + +## OWSM: Open Whisper-style Speech Model + +[OWSM](https://arxiv.org/abs/2309.13876) is an Open Whisper-style Speech Model from [CMU WAVLab](https://www.wavlab.org/). It reproduces Whisper-style training using publicly available data and an open-source toolkit [ESPnet](https://github.com/espnet/espnet). + +Our demo is available [here](https://huggingface.co/spaces/pyf98/OWSM_v3_demo). The [project page](https://www.wavlab.org/activities/2024/owsm/) contains various resources. + +[OWSM v3.1](https://arxiv.org/abs/2401.16658) is an improved version of OWSM v3. It significantly outperforms OWSM v3 in almost all evaluation benchmarks. +We do not include any new training data. Instead, we utilize a state-of-the-art speech encoder, [E-Branchformer](https://arxiv.org/abs/2210.00077). + +This is a small size model with 367M parameters and is trained on 70k hours of public speech data with lower restrictions (compared to the full OWSM data). Please check our [project page](https://www.wavlab.org/activities/2024/owsm/) for more information. +Specifically, it supports the following speech-to-text tasks: +- Speech recognition +- Utterance-level alignment +- Long-form transcription +- Language identification + + +### Citing OWSM, Branchformers and ESPnet + +```BibTex +@misc{peng2024owsm, + title={OWSM v3.1: Better and Faster Open Whisper-Style Speech Models based on E-Branchformer}, + author={Yifan Peng and Jinchuan Tian and William Chen and Siddhant Arora and Brian Yan and Yui Sudo and Muhammad Shakeel and Kwanghee Choi and Jiatong Shi and Xuankai Chang and Jee-weon Jung and Shinji Watanabe}, + year={2024}, + eprint={2401.16658}, + archivePrefix={arXiv}, + primaryClass={cs.CL} +} +@INPROCEEDINGS{owsm-asru23, + author={Peng, Yifan and Tian, Jinchuan and Yan, Brian and Berrebbi, Dan and Chang, Xuankai and Li, Xinjian and Shi, Jiatong and Arora, Siddhant and Chen, William and Sharma, Roshan and Zhang, Wangyou and Sudo, Yui and Shakeel, Muhammad and Jung, Jee-Weon and Maiti, Soumi and Watanabe, Shinji}, + booktitle={2023 IEEE Automatic Speech Recognition and Understanding Workshop (ASRU)}, + title={Reproducing Whisper-Style Training Using An Open-Source Toolkit And Publicly Available Data}, + year={2023}, + doi={10.1109/ASRU57964.2023.10389676} +} +@inproceedings{peng23b_interspeech, + author={Yifan Peng and Kwangyoun Kim and Felix Wu and Brian Yan and Siddhant Arora and William Chen and Jiyang Tang and Suwon Shon and Prashant Sridhar and Shinji Watanabe}, + title={{A Comparative Study on E-Branchformer vs Conformer in Speech Recognition, Translation, and Understanding Tasks}}, + year=2023, + booktitle={Proc. INTERSPEECH 2023}, + pages={2208--2212}, + doi={10.21437/Interspeech.2023-1194} +} +@inproceedings{kim2023branchformer, + title={E-branchformer: Branchformer with enhanced merging for speech recognition}, + author={Kim, Kwangyoun and Wu, Felix and Peng, Yifan and Pan, Jing and Sridhar, Prashant and Han, Kyu J and Watanabe, Shinji}, + booktitle={2022 IEEE Spoken Language Technology Workshop (SLT)}, + pages={84--91}, + year={2023}, + organization={IEEE} +} +@InProceedings{pmlr-v162-peng22a, + title = {Branchformer: Parallel {MLP}-Attention Architectures to Capture Local and Global Context for Speech Recognition and Understanding}, + author = {Peng, Yifan and Dalmia, Siddharth and Lane, Ian and Watanabe, Shinji}, + booktitle = {Proceedings of the 39th International Conference on Machine Learning}, + pages = {17627--17643}, + year = {2022}, + editor = {Chaudhuri, Kamalika and Jegelka, Stefanie and Song, Le and Szepesvari, Csaba and Niu, Gang and Sabato, Sivan}, + volume = {162}, + series = {Proceedings of Machine Learning Research}, + month = {17--23 Jul}, + publisher = {PMLR}, + pdf = {https://proceedings.mlr.press/v162/peng22a/peng22a.pdf}, + url = {https://proceedings.mlr.press/v162/peng22a.html}, + abstract = {Conformer has proven to be effective in many speech processing tasks. It combines the benefits of extracting local dependencies using convolutions and global dependencies using self-attention. Inspired by this, we propose a more flexible, interpretable and customizable encoder alternative, Branchformer, with parallel branches for modeling various ranged dependencies in end-to-end speech processing. In each encoder layer, one branch employs self-attention or its variant to capture long-range dependencies, while the other branch utilizes an MLP module with convolutional gating (cgMLP) to extract local relationships. We conduct experiments on several speech recognition and spoken language understanding benchmarks. Results show that our model outperforms both Transformer and cgMLP. It also matches with or outperforms state-of-the-art results achieved by Conformer. Furthermore, we show various strategies to reduce computation thanks to the two-branch architecture, including the ability to have variable inference complexity in a single trained model. The weights learned for merging branches indicate how local and global dependencies are utilized in different layers, which benefits model designing.} +} +@inproceedings{watanabe2018espnet, + author={Shinji Watanabe and Takaaki Hori and Shigeki Karita and Tomoki Hayashi and Jiro Nishitoba and Yuya Unno and Nelson Yalta and Jahn Heymann and Matthew Wiesner and Nanxin Chen and Adithya Renduchintala and Tsubasa Ochiai}, + title={{ESPnet}: End-to-End Speech Processing Toolkit}, + year={2018}, + booktitle={Proceedings of Interspeech}, + pages={2207--2211}, + doi={10.21437/Interspeech.2018-1456}, + url={http://dx.doi.org/10.21437/Interspeech.2018-1456} +} +``` diff --git a/data/token_list/bpe_unigram50000/bpe.model b/data/token_list/bpe_unigram50000/bpe.model new file mode 100644 index 0000000000000000000000000000000000000000..57f6cad77341e1c1f7ea472538544ca6d4c5cbe1 --- /dev/null +++ b/data/token_list/bpe_unigram50000/bpe.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:af3103a5e6dbaea47c4ca88e2ae92e536ff335c40876de2b2963d2af52746453 +size 1053520 diff --git a/exp/s2t_stats_raw_bpe50000/train/feats_stats.npz b/exp/s2t_stats_raw_bpe50000/train/feats_stats.npz new file mode 100644 index 0000000000000000000000000000000000000000..b4e32c14f9cac87d66cc9b9bce606eb6557c4fcd --- /dev/null +++ b/exp/s2t_stats_raw_bpe50000/train/feats_stats.npz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f1171609665f1de3c8b99eb2f23392e69a32c6d7027361b9645e0ffaa5fdeae6 +size 1402 diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/RESULTS.md b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/RESULTS.md new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/config.yaml b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8e9f2f02677752679079b7ff0b9308f7237702b5 --- /dev/null +++ b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/config.yaml @@ -0,0 +1,50271 @@ +config: conf/train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn.yaml +print_config: false +log_level: INFO +drop_last_iter: false +dry_run: false +iterator_type: sequence +valid_iterator_type: null +output_dir: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000 +ngpu: 1 +seed: 42 +num_workers: 6 +num_att_plot: 0 +dist_backend: nccl +dist_init_method: file:///weka/home-pengyf/espnet-owsm-train/egs2/owsm_v3.1_licensefree_nosa/s2t1/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/.dist_init_d681a8ba-15d8-4a99-88f9-39beba562b1a +dist_world_size: 16 +dist_rank: 0 +local_rank: 0 +dist_master_addr: null +dist_master_port: null +dist_launcher: slurm +multiprocessing_distributed: true +unused_parameters: false +sharded_ddp: false +cudnn_enabled: true +cudnn_benchmark: false +cudnn_deterministic: true +collect_stats: false +write_collected_feats: false +max_epoch: 45 +patience: null +val_scheduler_criterion: +- valid +- loss +early_stopping_criterion: +- valid +- loss +- min +best_model_criterion: +- - valid + - acc + - max +- - valid + - total_count + - max +keep_nbest_models: 5 +nbest_averaging_interval: 0 +grad_clip: 5.0 +grad_clip_type: 2.0 +grad_noise: false +accum_grad: 1 +no_forward_run: false +resume: true +train_dtype: float32 +use_amp: true +log_interval: 500 +use_matplotlib: true +use_tensorboard: true +create_graph_in_tensorboard: false +use_wandb: true +wandb_project: null +wandb_id: null +wandb_entity: null +wandb_name: null +wandb_model_log_interval: -1 +detect_anomaly: false +use_lora: false +save_lora_only: true +lora_conf: {} +pretrain_path: null +init_param: [] +ignore_init_mismatch: false +freeze_param: [] +num_iters_per_epoch: 15000 +batch_size: 256 +valid_batch_size: null +batch_bins: 1000000 +valid_batch_bins: null +train_shape_file: +- exp/s2t_stats_raw_bpe50000/splits3/speech_shape +- exp/s2t_stats_raw_bpe50000/splits3/text_prev_shape.bpe +- exp/s2t_stats_raw_bpe50000/splits3/text_ctc_shape.bpe +- exp/s2t_stats_raw_bpe50000/splits3/text_shape.bpe +valid_shape_file: +- exp/s2t_stats_raw_bpe50000/valid/speech_shape +- exp/s2t_stats_raw_bpe50000/valid/text_prev_shape.bpe +- exp/s2t_stats_raw_bpe50000/valid/text_ctc_shape.bpe +- exp/s2t_stats_raw_bpe50000/valid/text_shape.bpe +batch_type: unsorted +valid_batch_type: null +fold_length: +- 80000 +- 150 +- 150 +- 150 +sort_in_batch: descending +shuffle_within_batch: false +sort_batch: descending +multiple_iterator: true +chunk_length: 500 +chunk_shift_ratio: 0.5 +num_cache_chunks: 1024 +chunk_excluded_key_prefixes: [] +chunk_default_fs: null +train_data_path_and_name_and_type: +- - exp/s2t_stats_raw_bpe50000/splits3/wav.scp + - speech + - kaldi_ark +- - exp/s2t_stats_raw_bpe50000/splits3/text.prev + - text_prev + - text +- - exp/s2t_stats_raw_bpe50000/splits3/text.ctc + - text_ctc + - text +- - exp/s2t_stats_raw_bpe50000/splits3/text + - text + - text +valid_data_path_and_name_and_type: +- - dump/raw/dev/wav.scp + - speech + - kaldi_ark +- - dump/raw/dev/text.prev + - text_prev + - text +- - dump/raw/dev/text.ctc + - text_ctc + - text +- - dump/raw/dev/text + - text + - text +allow_variable_data_keys: false +max_cache_size: 0.0 +max_cache_fd: 32 +allow_multi_rates: false +valid_max_cache_size: null +exclude_weight_decay: false +exclude_weight_decay_conf: {} +optim: adamw +optim_conf: + lr: 0.0005 + betas: + - 0.9 + - 0.98 + eps: 1.0e-06 + weight_decay: 0.0 +scheduler: piecewiselinearwarmuplr +scheduler_conf: + warmup_steps_list: + - 0 + - 30000 + - 60000 + warmup_lr_list: + - 0.0 + - 5.0e-05 + - 0.0005 +token_list: +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +-
+- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- <0.00> +- <0.02> +- <0.04> +- <0.06> +- <0.08> +- <0.10> +- <0.12> +- <0.14> +- <0.16> +- <0.18> +- <0.20> +- <0.22> +- <0.24> +- <0.26> +- <0.28> +- <0.30> +- <0.32> +- <0.34> +- <0.36> +- <0.38> +- <0.40> +- <0.42> +- <0.44> +- <0.46> +- <0.48> +- <0.50> +- <0.52> +- <0.54> +- <0.56> +- <0.58> +- <0.60> +- <0.62> +- <0.64> +- <0.66> +- <0.68> +- <0.70> +- <0.72> +- <0.74> +- <0.76> +- <0.78> +- <0.80> +- <0.82> +- <0.84> +- <0.86> +- <0.88> +- <0.90> +- <0.92> +- <0.94> +- <0.96> +- <0.98> +- <1.00> +- <1.02> +- <1.04> +- <1.06> +- <1.08> +- <1.10> +- <1.12> +- <1.14> +- <1.16> +- <1.18> +- <1.20> +- <1.22> +- <1.24> +- <1.26> +- <1.28> +- <1.30> +- <1.32> +- <1.34> +- <1.36> +- <1.38> +- <1.40> +- <1.42> +- <1.44> +- <1.46> +- <1.48> +- <1.50> +- <1.52> +- <1.54> +- <1.56> +- <1.58> +- <1.60> +- <1.62> +- <1.64> +- <1.66> +- <1.68> +- <1.70> +- <1.72> +- <1.74> +- <1.76> +- <1.78> +- <1.80> +- <1.82> +- <1.84> +- <1.86> +- <1.88> +- <1.90> +- <1.92> +- <1.94> +- <1.96> +- <1.98> +- <2.00> +- <2.02> +- <2.04> +- <2.06> +- <2.08> +- <2.10> +- <2.12> +- <2.14> +- <2.16> +- <2.18> +- <2.20> +- <2.22> +- <2.24> +- <2.26> +- <2.28> +- <2.30> +- <2.32> +- <2.34> +- <2.36> +- <2.38> +- <2.40> +- <2.42> +- <2.44> +- <2.46> +- <2.48> +- <2.50> +- <2.52> +- <2.54> +- <2.56> +- <2.58> +- <2.60> +- <2.62> +- <2.64> +- <2.66> +- <2.68> +- <2.70> +- <2.72> +- <2.74> +- <2.76> +- <2.78> +- <2.80> +- <2.82> +- <2.84> +- <2.86> +- <2.88> +- <2.90> +- <2.92> +- <2.94> +- <2.96> +- <2.98> +- <3.00> +- <3.02> +- <3.04> +- <3.06> +- <3.08> +- <3.10> +- <3.12> +- <3.14> +- <3.16> +- <3.18> +- <3.20> +- <3.22> +- <3.24> +- <3.26> +- <3.28> +- <3.30> +- <3.32> +- <3.34> +- <3.36> +- <3.38> +- <3.40> +- <3.42> +- <3.44> +- <3.46> +- <3.48> +- <3.50> +- <3.52> +- <3.54> +- <3.56> +- <3.58> +- <3.60> +- <3.62> +- <3.64> +- <3.66> +- <3.68> +- <3.70> +- <3.72> +- <3.74> +- <3.76> +- <3.78> +- <3.80> +- <3.82> +- <3.84> +- <3.86> +- <3.88> +- <3.90> +- <3.92> +- <3.94> +- <3.96> +- <3.98> +- <4.00> +- <4.02> +- <4.04> +- <4.06> +- <4.08> +- <4.10> +- <4.12> +- <4.14> +- <4.16> +- <4.18> +- <4.20> +- <4.22> +- <4.24> +- <4.26> +- <4.28> +- <4.30> +- <4.32> +- <4.34> +- <4.36> +- <4.38> +- <4.40> +- <4.42> +- <4.44> +- <4.46> +- <4.48> +- <4.50> +- <4.52> +- <4.54> +- <4.56> +- <4.58> +- <4.60> +- <4.62> +- <4.64> +- <4.66> +- <4.68> +- <4.70> +- <4.72> +- <4.74> +- <4.76> +- <4.78> +- <4.80> +- <4.82> +- <4.84> +- <4.86> +- <4.88> +- <4.90> +- <4.92> +- <4.94> +- <4.96> +- <4.98> +- <5.00> +- <5.02> +- <5.04> +- <5.06> +- <5.08> +- <5.10> +- <5.12> +- <5.14> +- <5.16> +- <5.18> +- <5.20> +- <5.22> +- <5.24> +- <5.26> +- <5.28> +- <5.30> +- <5.32> +- <5.34> +- <5.36> +- <5.38> +- <5.40> +- <5.42> +- <5.44> +- <5.46> +- <5.48> +- <5.50> +- <5.52> +- <5.54> +- <5.56> +- <5.58> +- <5.60> +- <5.62> +- <5.64> +- <5.66> +- <5.68> +- <5.70> +- <5.72> +- <5.74> +- <5.76> +- <5.78> +- <5.80> +- <5.82> +- <5.84> +- <5.86> +- <5.88> +- <5.90> +- <5.92> +- <5.94> +- <5.96> +- <5.98> +- <6.00> +- <6.02> +- <6.04> +- <6.06> +- <6.08> +- <6.10> +- <6.12> +- <6.14> +- <6.16> +- <6.18> +- <6.20> +- <6.22> +- <6.24> +- <6.26> +- <6.28> +- <6.30> +- <6.32> +- <6.34> +- <6.36> +- <6.38> +- <6.40> +- <6.42> +- <6.44> +- <6.46> +- <6.48> +- <6.50> +- <6.52> +- <6.54> +- <6.56> +- <6.58> +- <6.60> +- <6.62> +- <6.64> +- <6.66> +- <6.68> +- <6.70> +- <6.72> +- <6.74> +- <6.76> +- <6.78> +- <6.80> +- <6.82> +- <6.84> +- <6.86> +- <6.88> +- <6.90> +- <6.92> +- <6.94> +- <6.96> +- <6.98> +- <7.00> +- <7.02> +- <7.04> +- <7.06> +- <7.08> +- <7.10> +- <7.12> +- <7.14> +- <7.16> +- <7.18> +- <7.20> +- <7.22> +- <7.24> +- <7.26> +- <7.28> +- <7.30> +- <7.32> +- <7.34> +- <7.36> +- <7.38> +- <7.40> +- <7.42> +- <7.44> +- <7.46> +- <7.48> +- <7.50> +- <7.52> +- <7.54> +- <7.56> +- <7.58> +- <7.60> +- <7.62> +- <7.64> +- <7.66> +- <7.68> +- <7.70> +- <7.72> +- <7.74> +- <7.76> +- <7.78> +- <7.80> +- <7.82> +- <7.84> +- <7.86> +- <7.88> +- <7.90> +- <7.92> +- <7.94> +- <7.96> +- <7.98> +- <8.00> +- <8.02> +- <8.04> +- <8.06> +- <8.08> +- <8.10> +- <8.12> +- <8.14> +- <8.16> +- <8.18> +- <8.20> +- <8.22> +- <8.24> +- <8.26> +- <8.28> +- <8.30> +- <8.32> +- <8.34> +- <8.36> +- <8.38> +- <8.40> +- <8.42> +- <8.44> +- <8.46> +- <8.48> +- <8.50> +- <8.52> +- <8.54> +- <8.56> +- <8.58> +- <8.60> +- <8.62> +- <8.64> +- <8.66> +- <8.68> +- <8.70> +- <8.72> +- <8.74> +- <8.76> +- <8.78> +- <8.80> +- <8.82> +- <8.84> +- <8.86> +- <8.88> +- <8.90> +- <8.92> +- <8.94> +- <8.96> +- <8.98> +- <9.00> +- <9.02> +- <9.04> +- <9.06> +- <9.08> +- <9.10> +- <9.12> +- <9.14> +- <9.16> +- <9.18> +- <9.20> +- <9.22> +- <9.24> +- <9.26> +- <9.28> +- <9.30> +- <9.32> +- <9.34> +- <9.36> +- <9.38> +- <9.40> +- <9.42> +- <9.44> +- <9.46> +- <9.48> +- <9.50> +- <9.52> +- <9.54> +- <9.56> +- <9.58> +- <9.60> +- <9.62> +- <9.64> +- <9.66> +- <9.68> +- <9.70> +- <9.72> +- <9.74> +- <9.76> +- <9.78> +- <9.80> +- <9.82> +- <9.84> +- <9.86> +- <9.88> +- <9.90> +- <9.92> +- <9.94> +- <9.96> +- <9.98> +- <10.00> +- <10.02> +- <10.04> +- <10.06> +- <10.08> +- <10.10> +- <10.12> +- <10.14> +- <10.16> +- <10.18> +- <10.20> +- <10.22> +- <10.24> +- <10.26> +- <10.28> +- <10.30> +- <10.32> +- <10.34> +- <10.36> +- <10.38> +- <10.40> +- <10.42> +- <10.44> +- <10.46> +- <10.48> +- <10.50> +- <10.52> +- <10.54> +- <10.56> +- <10.58> +- <10.60> +- <10.62> +- <10.64> +- <10.66> +- <10.68> +- <10.70> +- <10.72> +- <10.74> +- <10.76> +- <10.78> +- <10.80> +- <10.82> +- <10.84> +- <10.86> +- <10.88> +- <10.90> +- <10.92> +- <10.94> +- <10.96> +- <10.98> +- <11.00> +- <11.02> +- <11.04> +- <11.06> +- <11.08> +- <11.10> +- <11.12> +- <11.14> +- <11.16> +- <11.18> +- <11.20> +- <11.22> +- <11.24> +- <11.26> +- <11.28> +- <11.30> +- <11.32> +- <11.34> +- <11.36> +- <11.38> +- <11.40> +- <11.42> +- <11.44> +- <11.46> +- <11.48> +- <11.50> +- <11.52> +- <11.54> +- <11.56> +- <11.58> +- <11.60> +- <11.62> +- <11.64> +- <11.66> +- <11.68> +- <11.70> +- <11.72> +- <11.74> +- <11.76> +- <11.78> +- <11.80> +- <11.82> +- <11.84> +- <11.86> +- <11.88> +- <11.90> +- <11.92> +- <11.94> +- <11.96> +- <11.98> +- <12.00> +- <12.02> +- <12.04> +- <12.06> +- <12.08> +- <12.10> +- <12.12> +- <12.14> +- <12.16> +- <12.18> +- <12.20> +- <12.22> +- <12.24> +- <12.26> +- <12.28> +- <12.30> +- <12.32> +- <12.34> +- <12.36> +- <12.38> +- <12.40> +- <12.42> +- <12.44> +- <12.46> +- <12.48> +- <12.50> +- <12.52> +- <12.54> +- <12.56> +- <12.58> +- <12.60> +- <12.62> +- <12.64> +- <12.66> +- <12.68> +- <12.70> +- <12.72> +- <12.74> +- <12.76> +- <12.78> +- <12.80> +- <12.82> +- <12.84> +- <12.86> +- <12.88> +- <12.90> +- <12.92> +- <12.94> +- <12.96> +- <12.98> +- <13.00> +- <13.02> +- <13.04> +- <13.06> +- <13.08> +- <13.10> +- <13.12> +- <13.14> +- <13.16> +- <13.18> +- <13.20> +- <13.22> +- <13.24> +- <13.26> +- <13.28> +- <13.30> +- <13.32> +- <13.34> +- <13.36> +- <13.38> +- <13.40> +- <13.42> +- <13.44> +- <13.46> +- <13.48> +- <13.50> +- <13.52> +- <13.54> +- <13.56> +- <13.58> +- <13.60> +- <13.62> +- <13.64> +- <13.66> +- <13.68> +- <13.70> +- <13.72> +- <13.74> +- <13.76> +- <13.78> +- <13.80> +- <13.82> +- <13.84> +- <13.86> +- <13.88> +- <13.90> +- <13.92> +- <13.94> +- <13.96> +- <13.98> +- <14.00> +- <14.02> +- <14.04> +- <14.06> +- <14.08> +- <14.10> +- <14.12> +- <14.14> +- <14.16> +- <14.18> +- <14.20> +- <14.22> +- <14.24> +- <14.26> +- <14.28> +- <14.30> +- <14.32> +- <14.34> +- <14.36> +- <14.38> +- <14.40> +- <14.42> +- <14.44> +- <14.46> +- <14.48> +- <14.50> +- <14.52> +- <14.54> +- <14.56> +- <14.58> +- <14.60> +- <14.62> +- <14.64> +- <14.66> +- <14.68> +- <14.70> +- <14.72> +- <14.74> +- <14.76> +- <14.78> +- <14.80> +- <14.82> +- <14.84> +- <14.86> +- <14.88> +- <14.90> +- <14.92> +- <14.94> +- <14.96> +- <14.98> +- <15.00> +- <15.02> +- <15.04> +- <15.06> +- <15.08> +- <15.10> +- <15.12> +- <15.14> +- <15.16> +- <15.18> +- <15.20> +- <15.22> +- <15.24> +- <15.26> +- <15.28> +- <15.30> +- <15.32> +- <15.34> +- <15.36> +- <15.38> +- <15.40> +- <15.42> +- <15.44> +- <15.46> +- <15.48> +- <15.50> +- <15.52> +- <15.54> +- <15.56> +- <15.58> +- <15.60> +- <15.62> +- <15.64> +- <15.66> +- <15.68> +- <15.70> +- <15.72> +- <15.74> +- <15.76> +- <15.78> +- <15.80> +- <15.82> +- <15.84> +- <15.86> +- <15.88> +- <15.90> +- <15.92> +- <15.94> +- <15.96> +- <15.98> +- <16.00> +- <16.02> +- <16.04> +- <16.06> +- <16.08> +- <16.10> +- <16.12> +- <16.14> +- <16.16> +- <16.18> +- <16.20> +- <16.22> +- <16.24> +- <16.26> +- <16.28> +- <16.30> +- <16.32> +- <16.34> +- <16.36> +- <16.38> +- <16.40> +- <16.42> +- <16.44> +- <16.46> +- <16.48> +- <16.50> +- <16.52> +- <16.54> +- <16.56> +- <16.58> +- <16.60> +- <16.62> +- <16.64> +- <16.66> +- <16.68> +- <16.70> +- <16.72> +- <16.74> +- <16.76> +- <16.78> +- <16.80> +- <16.82> +- <16.84> +- <16.86> +- <16.88> +- <16.90> +- <16.92> +- <16.94> +- <16.96> +- <16.98> +- <17.00> +- <17.02> +- <17.04> +- <17.06> +- <17.08> +- <17.10> +- <17.12> +- <17.14> +- <17.16> +- <17.18> +- <17.20> +- <17.22> +- <17.24> +- <17.26> +- <17.28> +- <17.30> +- <17.32> +- <17.34> +- <17.36> +- <17.38> +- <17.40> +- <17.42> +- <17.44> +- <17.46> +- <17.48> +- <17.50> +- <17.52> +- <17.54> +- <17.56> +- <17.58> +- <17.60> +- <17.62> +- <17.64> +- <17.66> +- <17.68> +- <17.70> +- <17.72> +- <17.74> +- <17.76> +- <17.78> +- <17.80> +- <17.82> +- <17.84> +- <17.86> +- <17.88> +- <17.90> +- <17.92> +- <17.94> +- <17.96> +- <17.98> +- <18.00> +- <18.02> +- <18.04> +- <18.06> +- <18.08> +- <18.10> +- <18.12> +- <18.14> +- <18.16> +- <18.18> +- <18.20> +- <18.22> +- <18.24> +- <18.26> +- <18.28> +- <18.30> +- <18.32> +- <18.34> +- <18.36> +- <18.38> +- <18.40> +- <18.42> +- <18.44> +- <18.46> +- <18.48> +- <18.50> +- <18.52> +- <18.54> +- <18.56> +- <18.58> +- <18.60> +- <18.62> +- <18.64> +- <18.66> +- <18.68> +- <18.70> +- <18.72> +- <18.74> +- <18.76> +- <18.78> +- <18.80> +- <18.82> +- <18.84> +- <18.86> +- <18.88> +- <18.90> +- <18.92> +- <18.94> +- <18.96> +- <18.98> +- <19.00> +- <19.02> +- <19.04> +- <19.06> +- <19.08> +- <19.10> +- <19.12> +- <19.14> +- <19.16> +- <19.18> +- <19.20> +- <19.22> +- <19.24> +- <19.26> +- <19.28> +- <19.30> +- <19.32> +- <19.34> +- <19.36> +- <19.38> +- <19.40> +- <19.42> +- <19.44> +- <19.46> +- <19.48> +- <19.50> +- <19.52> +- <19.54> +- <19.56> +- <19.58> +- <19.60> +- <19.62> +- <19.64> +- <19.66> +- <19.68> +- <19.70> +- <19.72> +- <19.74> +- <19.76> +- <19.78> +- <19.80> +- <19.82> +- <19.84> +- <19.86> +- <19.88> +- <19.90> +- <19.92> +- <19.94> +- <19.96> +- <19.98> +- <20.00> +- <20.02> +- <20.04> +- <20.06> +- <20.08> +- <20.10> +- <20.12> +- <20.14> +- <20.16> +- <20.18> +- <20.20> +- <20.22> +- <20.24> +- <20.26> +- <20.28> +- <20.30> +- <20.32> +- <20.34> +- <20.36> +- <20.38> +- <20.40> +- <20.42> +- <20.44> +- <20.46> +- <20.48> +- <20.50> +- <20.52> +- <20.54> +- <20.56> +- <20.58> +- <20.60> +- <20.62> +- <20.64> +- <20.66> +- <20.68> +- <20.70> +- <20.72> +- <20.74> +- <20.76> +- <20.78> +- <20.80> +- <20.82> +- <20.84> +- <20.86> +- <20.88> +- <20.90> +- <20.92> +- <20.94> +- <20.96> +- <20.98> +- <21.00> +- <21.02> +- <21.04> +- <21.06> +- <21.08> +- <21.10> +- <21.12> +- <21.14> +- <21.16> +- <21.18> +- <21.20> +- <21.22> +- <21.24> +- <21.26> +- <21.28> +- <21.30> +- <21.32> +- <21.34> +- <21.36> +- <21.38> +- <21.40> +- <21.42> +- <21.44> +- <21.46> +- <21.48> +- <21.50> +- <21.52> +- <21.54> +- <21.56> +- <21.58> +- <21.60> +- <21.62> +- <21.64> +- <21.66> +- <21.68> +- <21.70> +- <21.72> +- <21.74> +- <21.76> +- <21.78> +- <21.80> +- <21.82> +- <21.84> +- <21.86> +- <21.88> +- <21.90> +- <21.92> +- <21.94> +- <21.96> +- <21.98> +- <22.00> +- <22.02> +- <22.04> +- <22.06> +- <22.08> +- <22.10> +- <22.12> +- <22.14> +- <22.16> +- <22.18> +- <22.20> +- <22.22> +- <22.24> +- <22.26> +- <22.28> +- <22.30> +- <22.32> +- <22.34> +- <22.36> +- <22.38> +- <22.40> +- <22.42> +- <22.44> +- <22.46> +- <22.48> +- <22.50> +- <22.52> +- <22.54> +- <22.56> +- <22.58> +- <22.60> +- <22.62> +- <22.64> +- <22.66> +- <22.68> +- <22.70> +- <22.72> +- <22.74> +- <22.76> +- <22.78> +- <22.80> +- <22.82> +- <22.84> +- <22.86> +- <22.88> +- <22.90> +- <22.92> +- <22.94> +- <22.96> +- <22.98> +- <23.00> +- <23.02> +- <23.04> +- <23.06> +- <23.08> +- <23.10> +- <23.12> +- <23.14> +- <23.16> +- <23.18> +- <23.20> +- <23.22> +- <23.24> +- <23.26> +- <23.28> +- <23.30> +- <23.32> +- <23.34> +- <23.36> +- <23.38> +- <23.40> +- <23.42> +- <23.44> +- <23.46> +- <23.48> +- <23.50> +- <23.52> +- <23.54> +- <23.56> +- <23.58> +- <23.60> +- <23.62> +- <23.64> +- <23.66> +- <23.68> +- <23.70> +- <23.72> +- <23.74> +- <23.76> +- <23.78> +- <23.80> +- <23.82> +- <23.84> +- <23.86> +- <23.88> +- <23.90> +- <23.92> +- <23.94> +- <23.96> +- <23.98> +- <24.00> +- <24.02> +- <24.04> +- <24.06> +- <24.08> +- <24.10> +- <24.12> +- <24.14> +- <24.16> +- <24.18> +- <24.20> +- <24.22> +- <24.24> +- <24.26> +- <24.28> +- <24.30> +- <24.32> +- <24.34> +- <24.36> +- <24.38> +- <24.40> +- <24.42> +- <24.44> +- <24.46> +- <24.48> +- <24.50> +- <24.52> +- <24.54> +- <24.56> +- <24.58> +- <24.60> +- <24.62> +- <24.64> +- <24.66> +- <24.68> +- <24.70> +- <24.72> +- <24.74> +- <24.76> +- <24.78> +- <24.80> +- <24.82> +- <24.84> +- <24.86> +- <24.88> +- <24.90> +- <24.92> +- <24.94> +- <24.96> +- <24.98> +- <25.00> +- <25.02> +- <25.04> +- <25.06> +- <25.08> +- <25.10> +- <25.12> +- <25.14> +- <25.16> +- <25.18> +- <25.20> +- <25.22> +- <25.24> +- <25.26> +- <25.28> +- <25.30> +- <25.32> +- <25.34> +- <25.36> +- <25.38> +- <25.40> +- <25.42> +- <25.44> +- <25.46> +- <25.48> +- <25.50> +- <25.52> +- <25.54> +- <25.56> +- <25.58> +- <25.60> +- <25.62> +- <25.64> +- <25.66> +- <25.68> +- <25.70> +- <25.72> +- <25.74> +- <25.76> +- <25.78> +- <25.80> +- <25.82> +- <25.84> +- <25.86> +- <25.88> +- <25.90> +- <25.92> +- <25.94> +- <25.96> +- <25.98> +- <26.00> +- <26.02> +- <26.04> +- <26.06> +- <26.08> +- <26.10> +- <26.12> +- <26.14> +- <26.16> +- <26.18> +- <26.20> +- <26.22> +- <26.24> +- <26.26> +- <26.28> +- <26.30> +- <26.32> +- <26.34> +- <26.36> +- <26.38> +- <26.40> +- <26.42> +- <26.44> +- <26.46> +- <26.48> +- <26.50> +- <26.52> +- <26.54> +- <26.56> +- <26.58> +- <26.60> +- <26.62> +- <26.64> +- <26.66> +- <26.68> +- <26.70> +- <26.72> +- <26.74> +- <26.76> +- <26.78> +- <26.80> +- <26.82> +- <26.84> +- <26.86> +- <26.88> +- <26.90> +- <26.92> +- <26.94> +- <26.96> +- <26.98> +- <27.00> +- <27.02> +- <27.04> +- <27.06> +- <27.08> +- <27.10> +- <27.12> +- <27.14> +- <27.16> +- <27.18> +- <27.20> +- <27.22> +- <27.24> +- <27.26> +- <27.28> +- <27.30> +- <27.32> +- <27.34> +- <27.36> +- <27.38> +- <27.40> +- <27.42> +- <27.44> +- <27.46> +- <27.48> +- <27.50> +- <27.52> +- <27.54> +- <27.56> +- <27.58> +- <27.60> +- <27.62> +- <27.64> +- <27.66> +- <27.68> +- <27.70> +- <27.72> +- <27.74> +- <27.76> +- <27.78> +- <27.80> +- <27.82> +- <27.84> +- <27.86> +- <27.88> +- <27.90> +- <27.92> +- <27.94> +- <27.96> +- <27.98> +- <28.00> +- <28.02> +- <28.04> +- <28.06> +- <28.08> +- <28.10> +- <28.12> +- <28.14> +- <28.16> +- <28.18> +- <28.20> +- <28.22> +- <28.24> +- <28.26> +- <28.28> +- <28.30> +- <28.32> +- <28.34> +- <28.36> +- <28.38> +- <28.40> +- <28.42> +- <28.44> +- <28.46> +- <28.48> +- <28.50> +- <28.52> +- <28.54> +- <28.56> +- <28.58> +- <28.60> +- <28.62> +- <28.64> +- <28.66> +- <28.68> +- <28.70> +- <28.72> +- <28.74> +- <28.76> +- <28.78> +- <28.80> +- <28.82> +- <28.84> +- <28.86> +- <28.88> +- <28.90> +- <28.92> +- <28.94> +- <28.96> +- <28.98> +- <29.00> +- <29.02> +- <29.04> +- <29.06> +- <29.08> +- <29.10> +- <29.12> +- <29.14> +- <29.16> +- <29.18> +- <29.20> +- <29.22> +- <29.24> +- <29.26> +- <29.28> +- <29.30> +- <29.32> +- <29.34> +- <29.36> +- <29.38> +- <29.40> +- <29.42> +- <29.44> +- <29.46> +- <29.48> +- <29.50> +- <29.52> +- <29.54> +- <29.56> +- <29.58> +- <29.60> +- <29.62> +- <29.64> +- <29.66> +- <29.68> +- <29.70> +- <29.72> +- <29.74> +- <29.76> +- <29.78> +- <29.80> +- <29.82> +- <29.84> +- <29.86> +- <29.88> +- <29.90> +- <29.92> +- <29.94> +- <29.96> +- <29.98> +- <30.00> +- ▁ +- ▁the +- ▁and +- ▁of +- s +- ▁to +- ▁a +- . +- ▁in +- ▁i +- '''' +- ▁he +- ▁that +- ▁was +- ▁it +- ▁his +- ',' +- ▁for +- ▁with +- '-' +- ▁is +- ▁you +- ▁as +- ▁had +- t +- ▁de +- ▁be +- ▁her +- ▁not +- e +- ▁but +- ▁on +- ▁at +- ▁she +- ▁by +- ▁have +- ▁they +- ▁him +- ▁this +- ▁which +- ▁all +- ▁from +- ▁la +- ▁so +- ▁me +- ▁my +- ▁were +- ▁we +- ▁one +- ▁no +- n +- a +- en +- d +- ▁an +- ▁or +- ▁are +- ▁said +- ▁there +- ▁their +- o +- ▁when +- ▁would +- ▁if +- ▁them +- ▁who +- ▁en +- ▁been +- ▁what +- ing +- er +- ▁will +- ▁do +- i +- ▁out +- ed +- m +- ▁man +- ▁up +- ▁then +- es +- ▁more +- ▁into +- ▁could +- y +- ▁now +- r +- ▁some +- ▁your +- ▁very +- ▁has +- ▁time +- ▁die +- ▁can +- re +- ly +- ▁about +- ▁que +- ▁little +- ▁than +- ▁like +- ▁only +- ▁un +- ▁our +- ▁over +- ▁upon +- ▁any +- ▁did +- ▁other +- ▁two +- '?' +- ▁und +- ▁l +- ▁know +- ▁see +- ▁well +- ▁y +- ▁before +- ▁after +- ▁made +- ▁d +- ▁der +- ▁good +- ▁el +- ▁down +- ▁its +- ▁should +- te +- ▁great +- ▁such +- ▁us +- ▁these +- ▁come +- ▁le +- ▁men +- ▁go +- ▁how +- ▁must +- ▁may +- ▁first +- ▁na +- ▁much +- ▁never +- ▁old +- ▁mr +- ▁where +- ▁came +- ▁way +- in +- ▁am +- u +- ’ +- ll +- ▁long +- ▁back +- ▁day +- ▁here +- ▁own +- ▁mu +- ▁himself +- ▁even +- ▁again +- ▁life +- ▁say +- ▁those +- ▁through +- ▁most +- ▁just +- ▁think +- ▁make +- ▁se +- ▁might +- ▁went +- ve +- ▁al +- ▁many +- ▁don +- ▁too +- ▁every +- ▁et +- ▁shall +- ▁thought +- ▁being +- ▁hand +- ▁away +- ▁still +- ▁La +- ▁den +- ▁without +- j +- na +- ▁while +- ▁people +- ▁les +- ▁take +- de +- ar +- ▁last +- ▁eyes +- ▁god +- ▁o +- 'on' +- ▁van +- ▁yet +- ▁ku +- ▁des +- al +- is +- ▁three +- ▁found +- ▁also +- ▁get +- ▁nothing +- ▁du +- ▁place +- ▁same +- ▁house +- ▁though +- ▁n +- to +- ▁under +- ▁new +- ▁let +- ▁once +- ▁The +- ▁another +- ▁ya +- ▁face +- ▁à +- ▁off +- ▁saw +- ▁things +- an +- ▁per +- '!' +- ▁young +- ▁war +- ▁father +- ▁head +- ▁es +- ▁tell +- ▁ever +- ▁er +- ▁te +- ▁right +- ▁zu +- l +- ▁del +- ▁far +- ▁night +- k +- ▁world +- ▁put +- ▁left +- ▁ne +- ▁I +- ta +- ▁work +- ▁because +- ▁con +- ▁part +- ▁took +- ▁give +- ▁look +- ▁love +- est +- le +- ▁why +- ▁di +- ▁always +- ▁het +- ▁A +- ▁looked +- ▁door +- z +- ▁thing +- ▁against +- as +- ▁years +- ▁mind +- la +- ne +- ▁een +- ▁home +- ▁seemed +- ▁sie +- ▁moment +- ▁going +- ▁asked +- ten +- ▁heart +- ▁got +- il +- ▁mrs +- ▁son +- ▁something +- ▁each +- se +- é +- ▁mother +- ▁told +- ▁knew +- ▁few +- ra +- man +- ▁sir +- ▁heard +- ga +- ▁find +- ▁das +- da +- ▁і +- ▁una +- ▁woman +- ▁between +- ge +- ▁oh +- st +- ▁met +- ▁done +- ▁half +- ▁wa +- ▁king +- ▁room +- ▁both +- ▁dat +- ▁water +- ▁side +- os +- ▁yes +- ▁als +- ▁better +- ▁ein +- ▁seen +- us +- ▁called +- ▁soon +- е +- ▁quite +- ▁name +- ▁va +- ▁course +- ▁want +- ▁ich +- ▁enough +- ▁having +- ro +- ▁il +- ▁не +- ▁ik +- ch +- ▁ko +- ▁whole +- ▁light +- ▁mit +- ▁nicht +- ▁miss +- do +- c +- 'no' +- ▁ni +- ▁ver +- ▁lord +- mo +- it +- ▁sich +- ▁whom +- ▁nor +- un +- ▁lady +- а +- ho +- ment +- ▁gave +- ness +- ▁El +- ▁von +- ▁set +- ▁almost +- ▁however +- ▁hands +- ▁days +- ▁end +- ▁began +- م +- ▁white +- ▁w +- я +- м +- ▁je +- f +- ▁turned +- ▁на +- ▁auf +- у +- ▁ha +- h +- ▁da +- ▁poor +- ▁among +- ▁ist +- ▁morning +- ▁until +- ▁ba +- ▁por +- ▁ma +- ▁su +- ka +- ▁felt +- ma +- ▁estas +- ▁words +- ▁" +- ▁op +- ▁kind +- ▁four +- ▁since +- ▁best +- ▁country +- ▁death +- ▁voice +- ▁si +- we +- ▁hundred +- ▁brought +- ▁small +- me +- й +- ▁next +- ers +- ▁word +- be +- 。 +- ▁perhaps +- ▁does +- ▁boy +- w +- ▁myself +- ▁stood +- ▁anything +- ▁girl +- ce +- ▁round +- ▁з +- ni +- ▁hij +- ▁ge +- va +- ▁est +- ▁herself +- ▁los +- 이 +- ▁u +- ▁dear +- ▁matter +- ▁thus +- b +- p +- ▁rather +- ▁taken +- ▁till +- ▁full +- wa +- den +- ▁dem +- ы +- ▁par +- ▁five +- je +- ▁present +- za +- ▁friend +- ▁sent +- ▁high +- ba +- ▁together +- ▁twenty +- li +- g +- ▁kaj +- ▁He +- ▁mi +- ▁air +- ▁general +- ▁thou +- el +- ▁true +- at +- ▁indeed +- ▁near +- ▁open +- he +- ▁rest +- ▁power +- ▁large +- ▁children +- ▁sure +- ke +- ▁keep +- ▁themselves +- ▁whose +- ko +- ▁fire +- ▁feet +- ▁wife +- ▁others +- men +- ▁given +- ▁cannot +- ▁nature +- sa +- ▁sea +- go +- ▁qui +- ▁money +- ti +- ▁certain +- ▁believe +- ▁help +- ▁gone +- ir +- ▁cried +- ▁along +- th +- ▁black +- ▁па +- ca +- ▁ў +- ▁used +- ▁c +- ▁second +- ▁case +- ▁point +- zi +- ▁often +- ▁child +- ▁passed +- ter +- ▁niet +- ha +- ▁sat +- ▁fact +- ▁hear +- or +- ▁year +- ▁alone +- ▁S +- ia +- ▁order +- ▁그 +- ▁known +- ri +- ▁therefore +- ▁during +- ▁use +- ▁call +- ▁li +- et +- lo +- т +- ▁im +- ó +- ▁dans +- ▁ten +- ▁zijn +- ▁behind +- em +- ▁lay +- ▁body +- ▁state +- ▁hope +- ▁really +- ی +- 가 +- ya +- ▁above +- ▁looking +- ▁pas +- ▁leave +- ▁ye +- х +- ton +- ▁answered +- ▁land +- ▁sa +- ▁replied +- ▁um +- and +- ▁b +- ▁city +- ▁red +- ▁thousand +- der +- ▁dead +- ▁lo +- ▁mean +- ▁six +- by +- ▁held +- ▁hard +- ▁within +- на +- ▁fell +- ▁around +- ▁less +- ▁earth +- ▁least +- ry +- ▁already +- ▁whether +- ur +- с +- ▁hour +- ن +- ▁speak +- ▁read +- ▁thy +- ▁itself +- co +- ▁és +- ▁became +- ▁Il +- '"' +- ▁women +- ▁reason +- ▁eine +- ▁거 +- ▁une +- ▁у +- ▁L +- ▁care +- ▁hat +- ci +- ':' +- ▁soul +- ▁says +- ▁friends +- ▁several +- ▁person +- ▁live +- ant +- ▁coming +- ▁short +- x +- ▁means +- ▁question +- ▁In +- ▁sort +- ▁times +- ▁pour +- ت +- ▁за +- ▁dark +- ка +- ▁wie +- oj +- ▁au +- ▁ground +- ▁possible +- di +- ie +- ▁form +- ▁human +- ▁amb +- ла +- ▁either +- sch +- ▁feel +- ki +- ▁business +- ▁town +- ▁lost +- ▁com +- ▁bed +- ▁turn +- ▁qu +- ▁En +- ш +- ▁zij +- ▁big +- nt +- ▁sur +- ▁Li +- ▁family +- ja +- ver +- ▁won +- ▁suddenly +- ▁become +- к +- ▁It +- ▁everything +- ▁manner +- ▁later +- aba +- ▁kept +- 는 +- ▁truth +- ▁master +- ▁aus +- ▁ser +- ▁sun +- à +- ▁bi +- um +- ▁letter +- ▁didn +- ▁wish +- ▁rose +- ▁spirit +- ▁evening +- ▁making +- ▁fear +- ▁need +- ation +- ▁close +- ▁returned +- ▁idea +- ze +- ▁brother +- ▁om +- ▁show +- ▁tu +- so +- 도 +- ▁strong +- ▁sense +- ▁church +- the +- ▁ask +- ▁sight +- ▁para +- ▁understand +- able +- ▁aan +- ▁sometimes +- ▁across +- ▁number +- ▁different +- 的 +- son +- ▁table +- ▁river +- ▁talk +- ل +- ▁bring +- р +- ent +- ▁vor +- am +- ▁towards +- ▁return +- vo +- ▁answer +- ▁beautiful +- ▁real +- ▁아 +- ▁else +- mi +- п +- ▁Le +- ity +- ▁fine +- ▁arm +- ▁public +- ▁ready +- ▁voor +- day +- ▁story +- ▁doubt +- ▁followed +- mu +- д +- ▁strange +- ▁p +- ▁auch +- ▁gu +- ▁aber +- ▁z +- ▁— +- v +- ▁stand +- ▁captain +- gen +- less +- ▁law +- ▁hold +- ▁sin +- ، +- ▁suppose +- ye +- ▁able +- ▁bad +- ▁spoke +- ▁што +- ▁run +- ▁sound +- tu +- лі +- ▁free +- ig +- ting +- ▁john +- ▁да +- land +- ste +- ▁Es +- ▁arms +- ▁cold +- н +- ▁ja +- ▁car +- 지 +- ▁book +- л +- ▁english +- ak +- ado +- ▁eye +- ▁front +- ▁za +- ▁art +- si +- ▁wind +- ▁pass +- ▁ad +- ling +- ▁deep +- ▁husband +- ▁received +- і +- ▁remember +- ▁cause +- id +- ▁А +- ▁past +- ▁horse +- ▁reached +- ▁ce +- ▁street +- та +- ▁cut +- ▁road +- ada +- du +- ▁haar +- ▁근데 +- ▁england +- ▁era +- ▁daß +- yo +- ▁У +- ▁appeared +- ▁hair +- ▁happy +- ▁saying +- га +- ▁wanted +- ▁pro +- ▁longer +- ▁gold +- ▁seems +- ه +- ▁early +- ▁vi +- ▁blood +- ▁che +- ▁안 +- ▁ought +- ▁subject +- aj +- ▁carried +- 을 +- hi +- ▁po +- ▁em +- 고 +- ▁low +- ▁Die +- ▁wild +- ▁taking +- ў +- ▁dan +- ▁clear +- ▁line +- ven +- zo +- ▁none +- ▁feeling +- bo +- ▁forward +- po +- ▁company +- ▁nach +- ▁further +- ▁eighteen +- ▁ran +- ▁continued +- ▁toward +- ▁certainly +- ر +- ▁change +- ▁hours +- ▁maar +- ра +- ▁window +- ▁dit +- ad +- ▁seven +- ▁common +- ▁seem +- ▁daughter +- pa +- ine +- ic +- ▁De +- wi +- ▁natural +- ▁fellow +- ▁beyond +- ▁á +- ku +- 에 +- ▁las +- ▁neither +- ▁doing +- ▁court +- ▁green +- ▁vous +- ▁force +- ▁meet +- ▁tried +- ны +- '".' +- ▁living +- ted +- ▁although +- ▁doctor +- ▁school +- ▁bu +- im +- ▁lui +- ▁interest +- ▁mal +- ▁opened +- ▁noch +- ▁play +- ку +- ▁position +- ▁O +- ▁account +- room +- ▁pretty +- ▁mir +- ▁fair +- ▁glad +- ▁fall +- ▁character +- ▁party +- bi +- ▁necessary +- ▁els +- ty +- ▁estis +- ▁blue +- ▁added +- ▁für +- ▁act +- ▁co +- ▁except +- q +- ▁plus +- ▁silence +- ▁miles +- ▁eight +- gi +- ▁government +- ▁self +- ▁sleep +- ▁ki +- ▁thirty +- ▁k +- кі +- ▁nearly +- ▁yo +- ▁nur +- ▁nu +- ▁bear +- ش +- ▁cap +- ؟ +- les +- ▁mar +- ▁foot +- ▁prince +- ▁mine +- que +- ken +- fa +- ▁bo +- ▁christ +- ▁entered +- ▁D +- zu +- ny +- ▁wall +- ▁following +- ▁ah +- ng +- ▁ab +- ren +- ▁knowledge +- ▁minutes +- ▁heaven +- ▁M +- ▁happened +- ▁view +- ▁late +- ▁non +- ▁Mu +- ▁lived +- ▁army +- ▁try +- ▁ara +- ▁а +- ▁chance +- ▁queen +- ▁ship +- ▁bit +- ▁hem +- ▁forth +- ▁Re +- ist +- ▁sister +- ▁attention +- ▁N +- ▁peace +- led +- ▁yourself +- ▁и +- ▁뭐 +- ▁pay +- ▁dr +- ▁french +- ▁über +- ▁어 +- ung +- ait +- ▁hall +- ▁ago +- ▁como +- ▁fifty +- да +- ley +- ▁Se +- tion +- ner +- ▁trouble +- ▁afraid +- ▁purpose +- ▁gentleman +- ▁service +- ▁می +- ▁age +- з +- ▁important +- ▁north +- ion +- ي +- и +- ▁distance +- ▁led +- vi +- ▁secret +- ▁third +- ▁save +- ▁Na +- jo +- ▁zoo +- ▁thinking +- ▁hi +- ut +- pe +- ck +- ▁с +- ▁walked +- au +- ▁wir +- ▁و +- ▁laid +- ▁south +- ek +- ▁length +- ish +- ▁died +- ▁comes +- ▁pleasure +- ▁standing +- ▁trees +- ▁пра +- ▁sweet +- ▁effect +- ж +- ю +- ▁slowly +- ▁probably +- ▁muri +- ▁wait +- ger +- 어 +- ▁wrong +- ▁smile +- ▁paper +- wo +- ▁whatever +- ▁heavy +- ana +- ▁london +- ▁гэта +- ita +- ▁desire +- ▁week +- ▁mary +- ▁dis +- ▁rich +- ▁deal +- ▁quiet +- ▁drew +- ▁pre +- tes +- ▁pri +- ▁faith +- не +- ▁instead +- ▁ill +- ina +- ▁wonder +- ▁st +- ▁simple +- ▁mon +- ▁lips +- ▁camp +- ▁tree +- nie +- ▁object +- tt +- ▁hardly +- ru +- ▁Per +- ▁stay +- ▁started +- ▁remained +- elle +- د +- clock +- ▁mouth +- í +- ▁T +- ▁post +- ▁stopped +- ▁top +- ▁caught +- era +- ▁immediately +- ▁history +- ez +- ▁send +- ▁C +- ▁exclaimed +- ▁U +- ▁uncle +- و +- ▁fast +- ▁No +- ▁strength +- ▁mij +- io +- ▁einen +- ai +- ▁sind +- ▁chief +- ▁joy +- ▁dinner +- ▁boys +- ▁g +- ▁mais +- ▁uit +- ▁food +- fe +- gu +- ▁kon +- ▁office +- ты +- ▁placed +- ▁getting +- ▁walk +- ▁Al +- ▁floor +- ▁moved +- ▁fa +- ▁bright +- ▁written +- ▁outside +- ▁zich +- ▁Ma +- ▁single +- ▁wood +- ▁grew +- ▁watch +- ▁E +- ▁в +- ▁по +- ▁nine +- ▁beauty +- ▁loved +- ▁anar +- ▁visit +- ز +- age +- ▁seeing +- ▁follow +- ▁ka +- ster +- ▁months +- vu +- ▁impossible +- ▁ад +- ▁Mi +- ▁broken +- ▁dog +- ard +- ▁Les +- ▁wat +- ▁forty +- ring +- end +- ▁easy +- ▁stone +- une +- ▁stop +- ▁werden +- ▁boat +- ▁greater +- ▁sit +- ▁v +- ні +- ва +- ▁please +- ▁figure +- ▁filled +- ▁Ha +- ▁hier +- ▁І +- ▁sitting +- ▁вы +- ▁eat +- eth +- ▁cast +- ▁laughed +- ▁Er +- 은 +- ▁evil +- ▁bar +- ▁battle +- ▁quickly +- ▁enemy +- ▁presence +- ful +- ▁waiting +- ▁according +- ▁ihm +- 서 +- ▁struck +- ле +- ▁chair +- ▁wo +- ▁wurde +- ▁avec +- ▁music +- ▁beginning +- ▁married +- ма +- ▁mich +- ▁makes +- ▁hatte +- ▁duty +- ▁sein +- ▁kwa +- ler +- ▁oku +- ▁west +- ▁plain +- ▁ihr +- cu +- ▁Ba +- ▁plan +- ▁막 +- ▁fight +- ▁més +- ▁regard +- ▁especially +- ▁due +- ▁perfect +- ik +- ▁danger +- ▁village +- ▁pa +- ▁below +- ▁bien +- ▁durch +- ate +- ▁particular +- ▁opinion +- ▁beside +- ч +- ▁ho +- г +- ▁showed +- ▁tears +- ble +- ▁step +- ▁france +- ▁vol +- ▁latter +- des +- ▁grand +- ham +- '...' +- ism +- ▁f +- ben +- ▁garden +- ▁girls +- ▁holy +- ▁brown +- che +- ▁states +- ▁afternoon +- ▁fer +- ▁besides +- lar +- ▁fresh +- ▁future +- аў +- ▁snow +- ▁appearance +- ▁silent +- ▁winter +- ▁wide +- ▁result +- mer +- ▁cross +- bu +- ▁Be +- ▁aba +- ды +- ب +- ura +- ▁conversation +- ▁entre +- ▁carry +- ▁sudden +- ▁horses +- ▁grace +- sh +- ▁tom +- ▁comme +- ▁hot +- ▁learned +- ▁kandi +- ▁merely +- ▁raised +- ▁arrived +- ▁wrote +- س +- ran +- ▁straight +- ▁thoughts +- ля +- ▁youth +- ▁knows +- ▁various +- A +- ць +- ▁Я +- ▁society +- ▁unto +- ▁leaving +- ▁action +- ▁meant +- ча +- ль +- ▁broke +- chen +- ▁sky +- ▁summer +- ▁terrible +- ▁born +- ▁condition +- bra +- ▁direction +- ▁tre +- ▁write +- ▁Ur +- ul +- zen +- ▁expression +- ▁dé +- ▁ke +- ▁unter +- ▁journey +- ▁An +- ▁wird +- ▁spring +- ▁trying +- ée +- ban +- lu +- gan +- ▁pale +- ber +- ft +- ▁madame +- ▁experience +- мі +- ▁books +- ▁easily +- ▁나 +- ▁seu +- 를 +- rs +- gel +- ▁main +- ▁Sa +- ▁giving +- ef +- ▁wenn +- és +- ▁jack +- ▁cy +- ▁entirely +- ▁minute +- ▁american +- ▁usual +- ▁umu +- ▁drink +- ▁george +- ▁kan +- ▁hill +- ▁corner +- her +- ли +- ▁pain +- pi +- fer +- ▁من +- up +- ▁gi +- ▁letters +- ▁Ni +- ▁aunt +- ▁middle +- ▁lives +- ▁Ne +- । +- ▁worth +- ▁expected +- ▁speaking +- ▁tone +- ▁twelve +- ▁h +- ▁nun +- nya +- car +- ▁chapter +- ▁waren +- ▁talking +- sten +- ▁covered +- ▁unless +- ▁inter +- lan +- ▁nie +- ▁train +- ▁fit +- ▁occasion +- ▁respect +- ▁afterwards +- ira +- ▁couldn +- ▁finally +- ▁ses +- ▁elle +- ò +- ▁warm +- ende +- ▁sub +- ▁trust +- ▁escape +- ▁spot +- ▁cry +- ; +- ▁grave +- ▁lot +- ▁bei +- ▁naar +- ▁lie +- ▁game +- ▁success +- в +- ▁그냥 +- ure +- ▁dream +- ▁turning +- ▁bank +- ▁reply +- ▁instant +- ▁jesus +- ji +- ▁به +- ▁command +- ▁K +- ▁influence +- á +- ▁sign +- ▁island +- ▁Але +- ▁appear +- ▁cette +- но +- ▁einem +- ▁ihn +- bar +- nu +- ▁내가 +- ▁forget +- ▁places +- ning +- ▁persons +- ▁mere +- ▁learn +- ous +- ▁soft +- ▁running +- ▁Ĝi +- ▁remain +- fu +- kin +- ▁field +- mar +- ▁charge +- ▁silver +- о +- ▁rock +- 만 +- ▁wouldn +- ▁한 +- ▁killed +- house +- ria +- ▁forest +- ari +- ▁surprise +- ▁Ad +- ▁simply +- ▁begin +- ▁marriage +- ▁march +- han +- ▁board +- cy +- len +- ▁news +- ▁considered +- ▁touch +- con +- ▁pleasant +- б +- ary +- ▁consider +- ▁scarcely +- per +- ▁laugh +- ▁class +- ▁period +- ▁einer +- ▁shot +- ▁move +- ▁wished +- è +- ▁ngo +- ▁reach +- ▁allowed +- ndo +- 다 +- ▁difficult +- morrow +- ▁bij +- ▁peter +- ció +- ▁generally +- lich +- ▁space +- ▁mark +- ▁closed +- ▁Vi +- ▁alle +- ▁safe +- ian +- ▁Ka +- ▁lead +- ▁private +- ▁race +- ville +- ▁dress +- ▁former +- my +- ▁Wa +- ▁breath +- ▁break +- ▁그런 +- ▁ari +- ▁beneath +- ▁picture +- ▁circumstances +- ▁uns +- ▁tea +- ▁We +- nd +- ▁sus +- cha +- ▁iron +- ▁glass +- ▁noble +- mal +- ▁observed +- ▁Do +- ▁ta +- ▁Der +- like +- ▁piece +- ▁somewhat +- té +- ▁B +- ▁scene +- ▁paid +- ▁bound +- ▁system +- ▁changed +- ▁discovered +- ▁darkness +- ا +- if +- ▁quick +- dan +- ق +- night +- ▁pot +- ▁judge +- ▁enter +- ▁Com +- ▁doch +- ▁się +- ▁divine +- ▁tra +- ам +- ▁personal +- don +- ▁special +- ▁neck +- ▁tall +- ▁sont +- lin +- ▁mountain +- ▁parts +- ▁united +- ▁ze +- ▁Un +- ▁vint +- ▁religion +- ▁guard +- 기 +- ▁пры +- ▁paris +- ▁greatest +- ▁isn +- ▁goes +- ▁ladies +- ▁fifteen +- ▁determined +- ▁believed +- ▁thank +- ▁walls +- ▁fortune +- ▁mis +- ▁sharp +- ры +- ▁vast +- ida +- ojn +- ▁wonderful +- ▁built +- ▁jo +- ▁seine +- ▁nous +- ▁ex +- ся +- ▁más +- ▁ب +- ▁broad +- ▁justice +- ▁rain +- ▁nog +- ies +- ▁hath +- ford +- ▁start +- ▁kun +- ▁Sie +- ▁perfectly +- wood +- ▁decided +- ker +- ive +- ▁wel +- ▁gij +- ▁presently +- ▁ancient +- ك +- ▁modern +- ▁sixty +- ▁surely +- ▁colonel +- ▁yn +- ▁declared +- ▁steps +- ▁flowers +- sha +- це +- ▁ob +- ɛ +- ▁east +- ▁working +- ▁habit +- ▁likely +- ▁watched +- ‘ +- sen +- ron +- ▁wieder +- tic +- ▁sorry +- ▁exactly +- ▁진짜 +- ▁crowd +- ▁bw +- ▁understood +- ▁seat +- ▁zou +- ▁This +- ▁ru +- fi +- port +- ▁thick +- ▁lying +- ▁threw +- ▁station +- ▁social +- gar +- ▁deux +- ier +- ▁smiled +- ▁happiness +- ▁value +- mente +- ▁soldiers +- ▁На +- bre +- ▁dels +- una +- ▁worse +- ▁language +- ▁yellow +- op +- ▁cyane +- ▁як +- чы +- tor +- ▁sake +- ▁situation +- ع +- ▁study +- ▁offered +- ▁drawing +- ▁moon +- ama +- ▁monsieur +- ▁attempt +- ве +- ▁fond +- bel +- ▁shook +- ▁gran +- ▁Si +- ▁cat +- ▁spent +- ▁clothes +- ▁shore +- ▁pleased +- ▁passing +- ando +- tra +- ▁henry +- ix +- ▁ears +- sta +- ▁honor +- ▁narrow +- red +- ▁zum +- ▁proper +- ▁sobre +- ía +- ▁яго +- ▁Va +- ▁fixed +- ▁haben +- ▁expect +- ▁formed +- ▁meeting +- ▁animal +- ▁existence +- ite +- ▁promise +- ▁wine +- kan +- ol +- ▁leaves +- шы +- ем +- ▁tout +- ▁honour +- ▁nice +- ▁était +- ante +- де +- eg +- ▁P +- 야 +- ▁notice +- ▁spite +- ▁fallen +- but +- ▁mad +- ▁wise +- ▁receive +- ▁royal +- го +- ▁G +- ک +- ▁evidence +- ло +- ▁memory +- ▁dropped +- ▁è +- ▁fi +- field +- ▁cent +- ▁ourselves +- min +- dy +- ju +- ▁passion +- ach +- ▁mijn +- ▁bis +- ▁bird +- ▁york +- ▁Ver +- ▁supposed +- ▁christian +- ▁Das +- ▁sick +- ▁dos +- ▁prepared +- ▁golden +- ▁rise +- ▁duke +- ▁repeated +- ▁named +- ▁box +- tte +- ▁그래서 +- ▁She +- ali +- pen +- ▁vain +- ▁german +- tres +- ▁다 +- ▁drawn +- ▁shut +- ▁works +- ▁speech +- ▁finished +- ▁dry +- ▁creature +- ▁building +- of +- ▁square +- ▁castle +- ▁pe +- ▁dollars +- ▁birds +- ▁count +- ▁complete +- line +- ▁pocket +- ons +- ▁listen +- cho +- way +- ▁song +- ą +- ▁knowing +- igen +- ▁passage +- ▁kill +- ▁movement +- ▁bill +- 是 +- ▁difficulty +- ▁sad +- ▁fate +- ▁daar +- ex +- ▁Els +- ▁major +- ان +- ▁ubu +- ▁color +- ▁lower +- ▁lines +- ▁courage +- ▁spoken +- ▁officers +- ▁houses +- 대 +- ▁played +- ▁opportunity +- ɣ +- ah +- nes +- ▁deg +- dor +- yi +- ▁writing +- ▁Bu +- ▁gray +- ▁principal +- ▁president +- ▁allow +- ▁month +- ம் +- ▁hu +- ▁paul +- ag +- ▁officer +- one +- ▁mountains +- ere +- ▁weeks +- ▁servant +- ба +- ▁nga +- ▁bread +- cia +- ▁ear +- ▁princess +- ▁effort +- ▁ur +- ▁note +- ▁looks +- mon +- ▁cost +- ▁이제 +- ige +- ▁band +- ▁lake +- پ +- · +- ня +- ▁attack +- ▁property +- ▁mass +- ▁questions +- ▁comfort +- ▁guess +- ▁stranger +- ▁moral +- ▁F +- ▁glance +- ▁equal +- ▁inside +- ▁reading +- ▁marry +- ▁tan +- ▁health +- ▁ay +- ▁grass +- ç +- ▁higher +- can +- ê +- ▁fancy +- ▁pray +- ▁С +- ▁capital +- ▁win +- ḥ +- ▁proved +- ন +- ▁oder +- eur +- ▁را +- heid +- му +- ▁anxious +- mos +- ▁roman +- ▁surface +- ement +- ▁serious +- ▁holding +- ▁glory +- ▁casa +- ▁ti +- ▁animals +- ▁pen +- ▁tired +- ano +- ▁sword +- ▁original +- ▁altogether +- ▁path +- ▁pride +- sche +- ▁avait +- ро +- ities +- nce +- ▁thin +- ▁Mo +- tin +- ▁carefully +- ▁usually +- ▁hurt +- ▁ordered +- ▁hung +- ▁mehr +- shi +- ▁blow +- ▁nobody +- ▁ri +- ▁weather +- ▁Ta +- ▁அ +- ou +- ▁pure +- ▁meaning +- ▁pan +- ▁waited +- ▁beat +- ▁Ki +- ▁Elle +- ▁ibi +- ▁coast +- ▁national +- ▁example +- ▁indian +- ▁gentlemen +- ▁hotel +- ▁opposite +- ▁quietly +- ▁Sant +- ▁spread +- ▁caused +- ▁worked +- ▁prove +- ▁final +- ▁search +- ▁Aba +- ▁remembered +- ▁dare +- ныя +- ▁cases +- ▁produced +- tter +- ▁pounds +- ▁conduct +- art +- ▁gate +- 로 +- ▁promised +- ▁fish +- ud +- ▁kann +- ▁storm +- ▁rule +- ▁edge +- ▁breast +- king +- ▁weak +- ava +- ▁valley +- ▁ankaŭ +- ▁centre +- ▁talked +- ▁lose +- ▁possession +- ▁terror +- ▁proud +- ▁kuri +- tro +- ▁laws +- ف +- ▁در +- ire +- ▁Hi +- ▁grow +- ▁fingers +- ▁Co +- lie +- ▁ben +- ▁shoulder +- ▁dick +- ▁Te +- ated +- ▁curious +- ę +- ▁forgotten +- tan +- ▁ju +- ▁difference +- ▁calm +- ▁charles +- ▁support +- র +- ▁popular +- ú +- ах +- tel +- ме +- ▁seventy +- ▁breakfast +- ▁quarter +- ▁rode +- ▁drive +- ▁brave +- ▁matters +- ▁companion +- for +- ▁orders +- ▁So +- ▁alles +- ▁degree +- ▁seventeen +- ▁bottom +- ▁engaged +- om +- let +- ▁offer +- ▁serve +- ▁forced +- ▁clean +- 나 +- ▁shadow +- ▁control +- ▁america +- ▁ordinary +- ▁group +- ▁evidently +- cer +- ▁To +- ▁gar +- ▁william +- uka +- ▁col +- ▁police +- ▁draw +- ▁diese +- ▁Ge +- ▁dann +- ▁sand +- ▁everybody +- ▁palace +- ries +- ▁local +- ▁progress +- nen +- dos +- S +- ▁Su +- ▁bent +- well +- ▁served +- ▁“ +- ти +- ҡ +- ▁angry +- ise +- ▁opening +- ▁ook +- ł +- ▁stream +- ▁trade +- ä +- ▁cousin +- ▁political +- ▁surprised +- ▁Ku +- ▁watching +- ▁kingdom +- се +- ▁я +- ▁europe +- ▁fool +- ▁material +- ▁pu +- nou +- ха +- ile +- ▁greatly +- iz +- ▁waar +- ▁star +- nde +- ▁authority +- chi +- ▁mas +- ▁coat +- ido +- ▁kwi +- ▁bay +- ▁seva +- nia +- ḍ +- ▁sing +- ни +- ▁ball +- ▁gentle +- ند +- ging +- ту +- ▁individual +- ▁thrown +- ence +- lle +- ▁confidence +- ▁seek +- ▁sagte +- ▁information +- ot +- ▁Mar +- ▁walking +- ▁They +- ▁desert +- vis +- ▁Me +- ▁ways +- ▁settled +- ▁rising +- ▁prayer +- ▁거야 +- ▁finding +- ▁weight +- 는데 +- ▁shoulders +- res +- ▁servants +- ize +- ▁gun +- win +- ▁با +- ▁season +- ▁direct +- ▁nti +- val +- dem +- ▁shown +- nda +- ▁fashion +- side +- ▁ride +- ▁Ra +- ▁base +- ▁prison +- ▁bell +- ▁noticed +- ▁similar +- ▁empty +- ▁familiar +- за +- ▁touched +- ▁fort +- bla +- ▁doesn +- ern +- ▁loss +- hu +- ▁frank +- ▁ideas +- ▁legs +- ▁sought +- ▁instance +- ▁از +- ▁heads +- ▁kuba +- ▁dir +- ▁port +- ▁sufficient +- ▁happen +- nni +- ació +- ▁fait +- ▁Гэта +- sse +- kel +- hen +- ▁explained +- ischen +- ▁delight +- น +- ▁ice +- ▁smoke +- ▁woods +- ▁dieser +- ▁schon +- ▁ён +- ▁interesting +- ▁taste +- ▁content +- los +- ▁couple +- ▁nation +- ▁affairs +- ▁judgment +- tar +- ▁british +- 해 +- ▁mag +- ▁catch +- ▁min +- ▁pli +- ▁failed +- ▁burst +- ▁wij +- ▁amount +- ▁false +- ▁james +- ▁rate +- ▁naturally +- ▁nta +- ▁immer +- ь +- ▁actually +- ▁members +- ▁blind +- ▁alive +- ▁telling +- 면 +- ▁explain +- ▁minister +- ▁whispered +- ส +- ▁fat +- ▁professor +- tre +- ges +- ▁feelings +- ▁slow +- ▁aside +- fo +- ▁lu +- vin +- ▁denn +- ▁раз +- то +- ▁pity +- ▁ou +- ▁És +- ▁growing +- ▁было +- ▁prevent +- ▁buy +- ▁loud +- ▁streets +- ▁refused +- ▁share +- ▁nose +- ▁task +- ▁century +- ▁otherwise +- ▁agreed +- ally +- ▁baby +- ns +- ▁chi +- ▁pero +- ▁religious +- ▁obliged +- ย +- ale +- ▁considerable +- ▁tender +- ▁add +- ▁Di +- ▁intended +- ▁names +- ▁grey +- ▁ces +- ▁indians +- ero +- af +- ship +- ▁advantage +- ff +- ey +- ▁imagine +- ▁murder +- cher +- ▁pointed +- ▁bow +- ▁mighty +- ▁excellent +- ▁troops +- ▁twice +- 리 +- ▁keeping +- cht +- ▁aniran +- ▁park +- ▁Ro +- ▁gives +- ▁priest +- ▁carrer +- ▁Tu +- ▁bon +- ▁apparently +- ▁distant +- ▁són +- lla +- ▁그거 +- ▁Je +- ▁required +- ▁stock +- ▁windows +- itat +- ▁af +- ▁bag +- خ +- ▁게 +- rwa +- ▁brain +- ▁تو +- ▁brief +- ▁gathered +- use +- ▁Pro +- ▁eene +- ▁rome +- ▁때 +- ▁noise +- ▁club +- ▁report +- ance +- ▁좀 +- ▁bat +- jn +- ▁suggested +- 요 +- ▁dressed +- ▁importance +- ▁devil +- ▁salt +- E +- ▁grant +- ▁superior +- ▁saint +- ▁است +- ▁grown +- ▁crime +- са +- ▁hills +- ▁для +- ▁forms +- து +- ко +- ▁san +- ß +- ▁moving +- ré +- ▁sam +- fen +- те +- ) +- rie +- ▁labor +- ▁leading +- ▁needed +- ward +- ▁lies +- ▁cover +- ▁rough +- ▁terms +- ▁della +- ▁science +- ▁flesh +- ▁flat +- ▁Gu +- ▁pieces +- ▁accept +- iki +- ▁haven +- ▁military +- amu +- ▁sorrow +- ▁stage +- ▁Bo +- ▁sixteen +- ң +- ▁tongue +- ▁sides +- via +- ▁dust +- su +- vel +- ▁events +- ▁remarked +- ▁hearts +- ▁mentioned +- ▁habe +- ▁firm +- ▁conditions +- ▁double +- ▁liked +- ▁eighty +- ▁saved +- I +- ▁honest +- ▁daily +- ▁gut +- ▁아니 +- ding +- ну +- ac +- ▁Ri +- ▁aid +- ві +- ris +- ▁ships +- ▁hebben +- ▁union +- ▁stars +- nk +- ▁temple +- ▁seized +- ▁handsome +- цца +- ▁ci +- ż +- ▁succeeded +- van +- ▁мы +- ▁molt +- ▁faint +- ▁carriage +- ▁liberty +- ▁taught +- ду +- ▁bridge +- ▁ring +- ▁fourth +- ▁listened +- ▁wore +- ре +- ▁contrary +- ர +- ats +- сы +- ika +- mbi +- ▁soldier +- ably +- ▁powerful +- ▁aware +- ü +- ال +- ▁pair +- ▁corn +- ▁dangerous +- ▁zur +- ▁apart +- ▁cha +- ▁hearing +- ▁faces +- ▁papers +- ▁philip +- ▁throw +- ata +- ▁directly +- ▁smiling +- ité +- ▁unknown +- bert +- ә +- ▁style +- ▁수 +- ▁sans +- ▁level +- ▁Da +- ok +- ▁david +- time +- ▁size +- ▁fly +- ▁mistress +- ▁possibly +- ша +- لا +- tas +- ating +- ай +- ▁bitter +- ▁나는 +- ▁sunday +- ▁busy +- ▁cruel +- ▁lifted +- ▁که +- ien +- ▁cal +- ▁supper +- ▁heat +- ▁putting +- ▁Pa +- ▁sum +- ▁worthy +- aient +- ▁slight +- ▁Po +- ▁cya +- ▁huge +- ▁affair +- ▁Bi +- ▁fill +- ▁cook +- 잖아 +- ungen +- iza +- ▁aux +- ▁particularly +- cle +- ▁nevertheless +- ▁nk +- ▁governor +- ▁enemies +- ▁fully +- ió +- ▁teeth +- ных +- оў +- ▁satisfied +- wer +- ▁( +- ▁seiner +- ▁demanded +- ▁yi +- ▁emperor +- ć +- zer +- ым +- ▁joined +- ▁lovely +- ▁این +- mes +- ▁fruit +- ▁geen +- ▁completely +- ▁occupied +- ▁entire +- ▁virtue +- ned +- ista +- are +- cio +- pre +- ен +- bye +- ▁W +- ▁favor +- ▁ihre +- nye +- 한 +- ▁practice +- ▁farther +- ▁zi +- sel +- ▁flight +- ▁takes +- ción +- ▁bore +- ▁nom +- ▁Ya +- ▁impression +- лы +- nis +- ▁pardon +- stand +- ▁shape +- ▁points +- ▁measure +- же +- 에서 +- ab +- ▁har +- ▁education +- ▁wealth +- ▁occurred +- ▁interested +- ▁carrying +- ▁playing +- ▁regarded +- ros +- ala +- ▁price +- ▁laughing +- ▁foreign +- ła +- mba +- най +- ▁finger +- nga +- ▁wasn +- ▁fan +- ub +- mann +- ▁suffered +- ▁bare +- ▁esta +- ▁ariko +- ▁wy +- ▁будзе +- ich +- ves +- ▁ба +- las +- ▁kitchen +- ▁drove +- ▁famous +- ▁presented +- ▁crossed +- ras +- ▁uh +- law +- வ +- ps +- gira +- ins +- ▁Con +- ▁també +- ▁Ru +- ▁powers +- par +- ▁ain +- 게 +- mmer +- ▁There +- ▁spirits +- ▁frequently +- illa +- ▁regular +- ▁excitement +- ▁mercy +- دا +- ▁yesterday +- ▁colour +- ▁suffer +- gera +- ▁risk +- ▁civil +- र +- eb +- ▁peculiar +- ▁willing +- の +- iĝis +- ndi +- rin +- ▁hast +- ▁robert +- vor +- ▁З +- ▁dull +- ▁잘 +- ▁nearer +- ▁shop +- tz +- ▁native +- ▁hay +- ther +- ▁species +- ▁bir +- ini +- ▁calling +- ▁так +- ▁relief +- umu +- ▁deck +- ▁marked +- ▁wounded +- ▁été +- ola +- ▁pulled +- xi +- ▁beg +- ▁roof +- ▁spiritual +- ▁asleep +- ▁gegen +- ত +- ▁smith +- ▁absolutely +- ▁behold +- ▁parents +- ▁gain +- ▁speed +- ▁necessity +- ▁milk +- ▁rapidly +- ▁eu +- ▁zal +- ito +- ▁iran +- ▁falling +- ▁tri +- ▁job +- жа +- ▁ahead +- oli +- 人 +- ▁drop +- ▁possessed +- ▁ré +- ula +- ▁mil +- ▁cre +- ▁sheep +- ato +- ▁wear +- ▁너무 +- ney +- ▁deze +- ▁desired +- ▁freedom +- ament +- ▁bodies +- tura +- ▁highest +- ม +- ▁meer +- ▁truly +- ▁ceased +- ▁dogs +- ▁clearly +- ▁dance +- ▁slave +- ▁facts +- ▁described +- ▁imagination +- ▁hidden +- ▁crown +- ▁gods +- ▁edward +- ow +- ▁claim +- сці +- ▁appears +- ani +- lon +- out +- ▁suffering +- ▁throughout +- ▁join +- table +- ▁fighting +- tta +- ▁remains +- ▁Па +- ▁anger +- ona +- ▁sold +- ▁worden +- ▁wisdom +- ▁upper +- ▁dozen +- 스 +- ய +- ▁selbst +- sti +- ▁obtained +- ▁favour +- ní +- ib +- ▁height +- ▁trail +- ▁ж +- ▁driven +- ▁flower +- ▁hoe +- อ +- ▁author +- ▁demand +- ▁harm +- ▁worst +- ▁abantu +- ▁struggle +- work +- ▁mention +- ▁knight +- ▁prisoner +- ак +- ▁papa +- ▁anybody +- ▁Sch +- ▁advanced +- ▁problem +- ▁В +- ▁entrance +- ▁moments +- ▁seated +- ▁admit +- ▁sehr +- ▁burning +- ▁process +- ▁пад +- ▁satisfaction +- she +- ▁midst +- ▁Ab +- ▁fault +- ▁shame +- ▁Du +- า +- ug +- ▁loose +- 、 +- ▁suit +- ▁R +- ▁circle +- ▁cloud +- ▁لە +- nte +- 有 +- ▁angel +- ▁triumph +- ▁não +- ▁enjoy +- mbe +- ▁instantly +- ▁kiel +- lia +- ▁press +- ▁ئا +- ▁harry +- ▁cool +- iko +- sco +- 一 +- heit +- ▁provided +- ▁store +- ▁frightened +- qui +- das +- ▁buried +- ▁nineteen +- ▁sei +- î +- zwa +- ▁fun +- mas +- ▁Ar +- ▁expressed +- ▁welcome +- ora +- ▁rocks +- ▁accepted +- mbo +- ▁excuse +- ▁hun +- ▁advice +- ▁같애 +- ▁pi +- ▁andere +- ▁аб +- ▁rank +- ▁hate +- ру +- ▁іх +- ▁proceeded +- ▁birth +- ▁principle +- ized +- schen +- č +- ▁stones +- ▁dia +- ү +- ц +- blo +- five +- ue +- ▁motion +- ▁zwei +- ▁majesty +- ▁eleven +- ▁portion +- ▁stories +- ▁equally +- ▁removed +- ▁onder +- ▁throat +- š +- ▁hoped +- ▁knees +- ă +- ella +- ▁ninety +- ▁wants +- ▁sons +- 시 +- ▁leur +- ▁key +- ▁sprang +- ▁jane +- uko +- ▁belief +- ▁ocean +- mwe +- ▁council +- ▁You +- ▁awful +- ▁heb +- ▁rare +- ▁paused +- ▁lad +- iri +- ▁grief +- ▁많이 +- ▁pel +- ▁mile +- ▁cup +- ▁cabin +- ▁hence +- ▁skin +- ▁type +- ▁host +- дзе +- ▁careful +- ▁message +- ▁What +- ▁Ho +- ▁rec +- ▁admitted +- ▁са +- ▁해 +- ர் +- ang +- ▁established +- ▁mai +- était +- ▁fields +- ф +- ▁iki +- ▁unable +- ри +- ح +- ▁somewhere +- ▁fin +- ▁arrival +- ▁gradually +- ▁worship +- ky +- ino +- ▁exist +- ▁kam +- ▁yari +- int +- ▁market +- isi +- ▁cro +- ▁extraordinary +- all +- ▁begun +- ati +- ius +- ▁mor +- ▁lamp +- во +- 在 +- ▁hurried +- ▁disappeared +- 인 +- ged +- ▁leg +- ▁dying +- ▁advance +- tie +- ▁larger +- ▁forces +- ▁mental +- ▁choose +- ▁continue +- lichen +- dia +- ▁Fa +- ▁friendship +- ▁waters +- ego +- ▁blessed +- ▁countenance +- ▁بە +- oni +- ▁faithful +- ▁remarkable +- idad +- ▁ama +- ▁mistake +- ▁hare +- ▁tres +- ล +- ге +- ▁intention +- ient +- ş +- ▁б +- ▁central +- ▁solid +- ▁plenty +- pro +- ▁vessel +- ▁origin +- ▁stick +- ▁resolved +- ▁oil +- ▁signal +- ▁quatre +- ▁chamber +- ▁gaze +- ▁features +- ▁sympathy +- ▁fut +- place +- ▁evident +- ▁active +- க் +- sis +- eren +- ▁dreadful +- ▁member +- мы +- ▁concerning +- ▁нас +- ▁farm +- ▁meine +- ap +- ▁actual +- غ +- ▁method +- ▁splendid +- ▁jim +- qu +- ▁innocent +- ▁absence +- hin +- ils +- ▁dawn +- ▁heeft +- ці +- ło +- ▁design +- 라 +- ▁mein +- ব +- ▁cyangwa +- ▁gently +- ▁express +- mont +- ▁understanding +- 하고 +- ▁sell +- sto +- ہ +- ▁sol +- цы +- ▁everywhere +- ▁Ye +- bed +- ▁hit +- ด +- med +- ▁horror +- osa +- ▁conscious +- ▁kindly +- па +- ▁louis +- ▁hole +- ▁Im +- вы +- ▁increased +- ▁kra +- ▁produce +- ▁record +- tive +- ▁Y +- ▁zeide +- ным +- ▁mystery +- ста +- tal +- ▁sang +- ▁despair +- ▁rooms +- ▁conscience +- kwa +- ▁affection +- ет +- ments +- гу +- bri +- ▁sacred +- dra +- ▁richard +- ▁temps +- ische +- mis +- ▁ended +- ▁ging +- ▁bearing +- dd +- ▁mat +- nic +- ▁dim +- ▁bought +- ▁stairs +- ▁neza +- ques +- ▁supply +- ди +- pin +- ph +- ▁cor +- dre +- ▁stepped +- ก +- ▁ruin +- ▁strike +- ▁match +- ▁ibyo +- ▁His +- ▁closely +- ▁ĝi +- ▁약간 +- ▁pressed +- ▁mort +- ▁tiu +- N +- cent +- ▁picked +- ▁yeah +- ▁concerned +- ▁torn +- ▁maybe +- ▁joe +- ▁super +- izi +- ▁list +- ▁moreover +- ▁choice +- ibi +- ▁film +- ▁больш +- ▁또 +- ▁gli +- ▁இ +- ப் +- ▁employed +- ▁rushed +- tis +- hn +- ▁constant +- kla +- ▁jetzt +- ▁до +- ▁tent +- ▁wondered +- ▁guns +- ▁asking +- ▁college +- ▁К +- ▁ganz +- ▁wings +- ▁kuko +- ▁visible +- ▁extent +- ▁gas +- imi +- ▁excited +- ▁vision +- B +- rem +- ▁energy +- ette +- ▁eager +- ▁tis +- ▁fra +- ▁imp +- anar +- ▁dar +- ▁fu +- ▁term +- ▁clouds +- ▁efforts +- ▁este +- ▁patient +- ▁escaped +- ▁chose +- ▁yards +- че +- ▁bal +- set +- ▁sooner +- ▁weary +- stone +- ها +- eux +- ▁trial +- ▁où +- ▁meat +- ▁softly +- aga +- stra +- ▁우리 +- ын +- ▁spend +- ▁fue +- ▁wohl +- ▁él +- ▁elizabeth +- ที่ +- eau +- ▁пера +- ▁whenever +- cie +- mond +- ε +- ▁doors +- ▁singing +- ▁deeply +- ▁physical +- ки +- ▁fail +- ▁то +- ▁sed +- ▁nay +- ▁capable +- ▁temper +- ▁friendly +- ▁wake +- ▁artist +- ▁On +- ▁fought +- ە +- ▁shouted +- ▁helped +- ▁safety +- ▁zijne +- ▁aussi +- T +- ▁fairly +- ▁poet +- san +- лан +- ▁treated +- ▁bishop +- ▁Мы +- ▁ease +- жы +- ▁brothers +- ▁relations +- O +- نا +- ▁plej +- ▁uko +- ная +- ▁obtain +- ▁attitude +- ▁gained +- gue +- met +- rio +- ▁wicked +- ▁zei +- บ +- ▁bin +- cal +- ▁chosen +- ше +- ▁tal +- ▁rw +- ▁lover +- ▁inquired +- ▁jest +- ▁han +- рэ +- ▁está +- ▁Ga +- rat +- nos +- 네 +- ▁discover +- ▁maid +- pt +- ▁вельмі +- ▁genius +- 'off' +- ▁sta +- ▁southern +- rick +- ▁grande +- ▁nations +- ▁As +- dis +- ▁precious +- ric +- ▁useful +- ง +- ▁exact +- ▁flying +- ▁theory +- ▁machine +- ▁worn +- ক +- ▁arthur +- ▁throne +- ▁voyage +- ன் +- ▁becomes +- uch +- ▁consequence +- 데 +- biri +- ▁V +- né +- ▁hell +- ▁fierce +- 大 +- ▁becoming +- sion +- ▁trip +- ▁había +- ▁connection +- ▁voices +- qua +- ▁seinen +- iyi +- ▁approached +- ▁tower +- ▁Lo +- ▁extreme +- stro +- ssa +- ▁avoid +- ▁development +- gwa +- ▁tie +- ға +- 为 +- ин +- lang +- ▁violent +- ▁intelligence +- ▁zo +- ▁figures +- ▁fleet +- த்த +- ▁proof +- tum +- ▁approach +- ▁못 +- ▁range +- ▁attend +- ▁Ein +- ▁pause +- ces +- ▁viel +- мо +- ▁surrounded +- ec +- ▁И +- ▁fo +- ▁relation +- ▁earl +- ties +- ▁delicate +- ▁numerous +- ▁même +- ▁series +- ▁moi +- ▁witness +- del +- ▁victory +- ▁consideration +- ▁older +- ▁thoroughly +- toj +- ▁minds +- ▁regret +- lé +- ан +- ṛ +- ▁Не +- ▁katika +- ▁keen +- ▁cottage +- ▁raise +- oc +- نی +- ▁miserable +- ম +- ▁anne +- liche +- ▁somebody +- ▁secure +- ▁reg +- looking +- nzi +- ev +- mel +- ▁refuse +- ▁acquaintance +- ▁Tom +- ▁region +- mie +- ▁accompanied +- ▁내 +- ▁image +- rá +- ▁crew +- ▁keine +- نى +- ▁gay +- ат +- tur +- ▁interrupted +- ▁толькі +- ร +- ▁nurse +- ists +- ▁hopes +- ▁tail +- ž +- ▁accident +- ▁ili +- ▁ont +- ▁teach +- ▁previous +- wed +- od +- ▁somehow +- ▁plans +- ▁bold +- ▁hy +- ▁Bar +- ▁slept +- ▁arose +- ▁credit +- ▁Her +- ▁description +- ву +- ▁contra +- гә +- ski +- ▁hide +- ▁vers +- ering +- kon +- ▁nodded +- ▁guide +- ▁kindness +- ▁آ +- ▁flame +- ▁returning +- ▁humble +- ▁wound +- ▁request +- rà +- sz +- ▁iyo +- ▁flew +- ▁whilst +- ▁hurry +- ▁afford +- ▁bringing +- zin +- ▁shining +- ▁cattle +- ▁fe +- ▁wholly +- ▁anyone +- ▁meanwhile +- ▁widow +- ▁curiosity +- ▁appointed +- ▁anti +- ▁peu +- ▁title +- ν +- tant +- hand +- ▁showing +- ▁proposed +- vers +- ntu +- ▁Ce +- ▁mysterious +- ami +- ▁assured +- ▁guilty +- را +- த +- ▁amongst +- ▁быў +- ▁row +- ▁stared +- ецца +- iro +- ▁partly +- ep +- ▁trans +- ▁dignity +- ▁subjects +- چ +- ▁career +- ▁donc +- ▁eternal +- tern +- ▁pick +- ▁bid +- ara +- ▁branches +- ubu +- cur +- ▁dreams +- ار +- ▁companions +- ▁número +- бы +- yer +- ▁Ko +- ▁silk +- ▁fur +- ▁informed +- ▁address +- гі +- ▁rapid +- ▁werd +- ▁parte +- ▁build +- ▁pushed +- ▁successful +- sia +- ▁lang +- ▁bull +- mut +- ▁plant +- ▁lest +- 가지고 +- dro +- ▁stands +- ▁sacrifice +- ▁fled +- wy +- ▁souls +- ▁Z +- ▁utterly +- ▁comfortable +- ▁custom +- ▁consent +- ▁confess +- ▁Ich +- bwa +- ▁wit +- ▁luck +- ▁J +- ▁smooth +- ▁remark +- 구 +- ری +- ▁wolf +- ▁rear +- ▁tin +- ▁discovery +- als +- ▁washington +- ▁younger +- ▁brilliant +- ped +- ▁laughter +- ▁empire +- α +- ▁listening +- ▁western +- ▁steel +- wel +- ▁yours +- fall +- gin +- eu +- न +- ▁needs +- ▁rang +- ▁vida +- ▁todo +- ін +- ▁punishment +- sie +- ▁су +- low +- ▁objects +- ▁ўсё +- ▁practical +- ▁pra +- ▁lack +- ẓ +- ط +- ▁All +- ▁말 +- uf +- ▁thanks +- ▁observe +- ▁sounds +- ▁soil +- ▁fatal +- ▁meal +- ▁fellows +- ▁margaret +- 일 +- ҫ +- ert +- ▁managed +- ▁weg +- ▁reward +- ▁pla +- ▁arranged +- ▁uma +- ▁Demà +- ▁hollow +- berg +- ▁forgot +- ▁accustomed +- ▁nest +- cra +- 사 +- ▁rue +- ▁gift +- ▁highly +- fel +- ▁pin +- ▁solemn +- vuga +- vil +- ▁Jo +- ide +- ▁praise +- mm +- vé +- ations +- ▁directed +- mp +- ▁kiss +- ▁clever +- that +- gli +- G +- ▁foolish +- ▁parties +- ▁agree +- ole +- ngo +- ▁Ja +- ial +- ▁forgive +- 랑 +- ▁expedition +- uri +- ▁anirà +- ▁hospital +- ▁sua +- ▁muy +- ▁goed +- ▁creatures +- ▁ма +- ந்த +- ▁swift +- amente +- ▁мо +- nne +- ▁explanation +- ▁thomas +- амі +- ▁unhappy +- ▁duties +- iert +- ▁event +- ▁Ca +- ▁trembling +- ▁prisoners +- ▁bob +- ▁porque +- ▁christmas +- ▁immense +- ▁labour +- cul +- ▁devoted +- ▁glanced +- ▁tale +- ▁Man +- ▁faire +- zwe +- igi +- ▁attended +- ▁sank +- ▁lieutenant +- ▁distinguished +- encia +- kiri +- ▁route +- cions +- ▁ما +- ades +- ▁northern +- ▁waves +- ▁aho +- ▁constantly +- ▁응 +- ▁사 +- ▁reality +- ▁1 +- स +- ▁grim +- ▁bas +- ture +- ▁severe +- ▁connected +- ▁hanging +- ▁되게 +- ▁immediate +- ans +- ▁results +- ▁sail +- ▁destroyed +- ▁reasons +- ▁anxiety +- ▁stern +- тар +- ыр +- ▁thousands +- ▁zurück +- ▁principles +- ants +- ▁article +- tus +- ▁divided +- kt +- ▁numbers +- ▁etwas +- ▁tour +- ▁Ti +- ▁hin +- ▁stuff +- ical +- ▁protection +- ▁nichts +- eix +- ▁charm +- ▁italian +- ▁suspicion +- ▁separate +- ▁track +- ” +- ▁addressed +- до +- ▁stretched +- ▁pipe +- ▁consciousness +- ▁spanish +- ks +- pping +- ▁이런 +- ▁But +- ▁gihe +- ça +- ▁counsel +- lijk +- pu +- ும் +- ▁queer +- ▁delighted +- ▁recognized +- ▁murmured +- ▁fourteen +- க +- ute +- ▁objection +- ▁bwa +- yn +- ▁kissed +- ▁exercise +- ▁pictures +- ▁current +- ▁signs +- ov +- ▁primer +- dic +- ▁cheeks +- wn +- ▁shows +- är +- ei +- ▁dont +- ▁test +- mit +- ▁vent +- に +- ▁rolled +- ▁ideal +- ўся +- ▁cities +- ون +- lli +- lina +- ▁travel +- ▁pull +- aux +- uma +- ▁convinced +- ▁во +- ▁eh +- ▁cave +- lit +- ▁bru +- ▁너 +- iw +- ▁cow +- ▁« +- ▁яны +- ми +- ▁New +- ▁inner +- ▁yard +- ть +- ▁о +- ▁sal +- тә +- ▁mia +- shobora +- ▁wurden +- ன +- ▁assistance +- ▁follows +- ▁accordingly +- ▁També +- ▁swept +- ▁fox +- ام +- ▁horrible +- ele +- ▁confusion +- ▁odd +- ▁지금 +- ▁feared +- ación +- ▁tiny +- wu +- ▁ol +- ▁reader +- ▁havas +- oku +- ▁martin +- ▁page +- ▁learning +- ▁earnest +- ▁committed +- ▁alarm +- ▁argument +- ▁announced +- لی +- ▁source +- ▁blame +- ▁ĉi +- ▁dich +- ão +- ای +- ▁interests +- ▁sentence +- ▁spare +- ▁od +- ice +- ▁lonely +- ▁patience +- ▁unu +- ob +- ▁ihnen +- ea +- ▁joseph +- ón +- ▁하 +- 아 +- ▁permit +- ▁wishes +- ux +- ▁increase +- ▁For +- ua +- ▁Am +- ▁And +- ▁glorious +- ▁rush +- rus +- ▁universal +- dig +- kuru +- ▁dass +- ▁setting +- ▁к +- ▁secretary +- ▁ei +- ▁hang +- ▁plainly +- ▁coffee +- ▁bless +- ▁dijo +- rit +- ке +- ▁prefer +- ▁composed +- ▁quality +- ▁charming +- oso +- ▁treasure +- ▁delay +- ▁extremely +- ▁quarrel +- ▁그렇게 +- ▁knife +- ugu +- iti +- kora +- ▁population +- ▁slightly +- ▁deed +- ▁conclusion +- 들 +- ▁bald +- ▁drei +- ▁яе +- ▁seldom +- ▁household +- ▁lit +- ▁oak +- ▁skill +- ▁baron +- ▁feast +- ▁hart +- 주 +- ▁그게 +- ados +- ▁hungry +- ées +- ▁reign +- ம +- ▁ohne +- ▁kings +- ▁але +- ▁commanded +- ▁mamma +- ▁card +- ▁leader +- ▁aim +- ▁konnte +- нь +- ▁element +- ▁ella +- ▁igihe +- stein +- は +- hr +- ▁pur +- P +- ais +- gy +- bes +- ▁august +- ▁doubtless +- ▁час +- ▁rid +- 으로 +- ▁stir +- ▁그때 +- ▁wil +- ▁icyo +- ▁tomb +- cli +- сто +- ▁fix +- ▁compelled +- ল +- ▁burden +- ô +- ▁ali +- ▁perceived +- 까 +- ▁тут +- ▁thrust +- har +- ▁warning +- ▁hunting +- ▁dien +- 장 +- ▁lights +- iva +- ▁slipped +- ls +- ар +- े +- ▁wi +- nna +- ▁county +- ھ +- ell +- ▁vom +- ▁flag +- ▁frame +- ▁details +- lis +- ▁interview +- ▁hut +- ▁rage +- ▁encore +- pan +- gri +- ▁cart +- ▁absolute +- uk +- ே +- ▁ati +- ayo +- ▁countries +- ▁2 +- ▁pod +- ێ +- ▁inform +- dar +- ram +- ▁està +- ▁eagerly +- ▁sous +- ▁ashamed +- ▁können +- net +- ▁reform +- ▁district +- ▁hari +- ▁Rwanda +- ▁nord +- ▁created +- ència +- ▁오 +- ▁striking +- ▁forehead +- gra +- ple +- ▁peut +- ▁check +- ▁fairy +- ▁correct +- ions +- ▁admiration +- ▁crying +- ▁alas +- â +- ▁ghost +- ▁riding +- ▁dire +- ғы +- yan +- uti +- ▁male +- ит +- bb +- ید +- pf +- ▁wet +- wan +- ▁savage +- ique +- ▁trois +- ▁Hu +- ▁cuando +- ā +- gon +- ு +- ▁inhabitants +- ▁Ён +- mia +- recht +- ▁lighted +- ▁rent +- ▁profit +- ▁million +- war +- og +- án +- ▁Aus +- ▁però +- ▁chap +- ▁있어 +- pp +- مان +- ejo +- ▁Cap +- ▁roll +- light +- ssen +- त +- ▁revolution +- ▁pope +- ▁fest +- ▁manera +- ▁contained +- ▁ere +- eta +- ip +- water +- ▁shake +- ▁mbere +- ▁sigh +- some +- ▁cu +- ▁이렇게 +- ▁occasionally +- ▁flung +- wing +- ili +- ▁sleeping +- ère +- ން +- endo +- ▁thunder +- mb +- ▁bya +- ท +- ▁dared +- tent +- gna +- ▁про +- ▁visited +- ▁ignorant +- دە +- ▁kugira +- 不 +- ▁rope +- geze +- ▁giant +- ▁slaves +- ▁mate +- ▁smell +- ▁mud +- ▁Son +- stre +- ▁At +- ▁useless +- ▁destroy +- های +- ▁ĉe +- ▁failure +- ς +- ▁breaking +- ▁mortal +- ▁verse +- ▁guests +- ▁яшчэ +- ▁wave +- lik +- ▁uttered +- ▁pink +- ▁жа +- ▁departure +- hel +- ▁cloth +- ▁whence +- hl +- ▁pie +- uda +- ▁있는 +- aire +- iu +- ró +- ▁addition +- ▁더 +- ▁utter +- ï +- ج +- ▁extra +- ▁enormous +- ▁trick +- ▁permitted +- ▁abroad +- ▁parliament +- ▁introduced +- ▁services +- ▁aka +- ে +- tig +- ▁alors +- ▁eggs +- ▁todos +- ست +- ▁beast +- ▁published +- зе +- ▁inclined +- ▁او +- ▁unfortunate +- mina +- ▁gy +- лю +- ima +- ▁shelter +- ▁gazed +- nden +- ▁armed +- ador +- ▁distinct +- ▁helen +- ▁mission +- ▁female +- ▁mister +- ▁saddle +- ▁billy +- bona +- лу +- ś +- ▁butter +- ▁greek +- ▁sisters +- ▁tem +- ▁weakness +- over +- ▁Umu +- ა +- ▁diesem +- ▁stated +- ▁wondering +- ু +- ▁begged +- ▁degrees +- ▁ebi +- ▁pack +- ▁department +- ieren +- far +- ▁fever +- ▁comment +- rate +- ▁mill +- ▁그래 +- лася +- ▁proportion +- lock +- ▁literature +- ▁qué +- ▁rod +- more +- gun +- ▁tio +- ▁cur +- ▁disposition +- zy +- pot +- ì +- ▁mac +- ay +- ▁seinem +- ▁articles +- ▁eines +- ▁volen +- ▁constitution +- taj +- ین +- ▁Car +- head +- lem +- ▁jam +- ▁atmosphere +- ille +- ving +- ▁troubled +- 니까 +- ▁bari +- ▁vier +- ▁dwell +- ▁shade +- ▁painted +- ▁со +- ▁merry +- ▁menschen +- ▁cab +- ▁shock +- ви +- ▁க +- ▁decision +- ▁toch +- ▁omu +- ▁troba +- die +- ं +- ▁remote +- ▁vie +- ily +- ▁tied +- การ +- ▁approaching +- ▁library +- ▁gal +- ▁bowed +- ▁val +- ▁israel +- rait +- ▁하고 +- ▁ji +- ▁acts +- ▁sono +- ▁enjoyed +- ▁classes +- ▁destruction +- ▁sah +- ▁spain +- ▁pare +- bone +- cre +- ability +- ▁lands +- ▁limit +- zar +- ▁res +- ▁disease +- ▁valuable +- ▁mode +- ▁misery +- وا +- ▁uniform +- nas +- ▁lion +- ▁dum +- ▁hastily +- ▁Go +- ▁earlier +- ston +- ▁emotion +- ▁난 +- ▁date +- wie +- kira +- э +- eh +- ▁waste +- ▁teaching +- ▁weiß +- ▁mayor +- ▁할 +- eza +- ▁burned +- க்க +- ▁ry +- ▁О +- ▁bosom +- ▁Ili +- ▁tegen +- ▁mounted +- zel +- ▁ages +- ▁shoot +- ▁italy +- ▁division +- ▁rendered +- ▁causes +- ▁ال +- ▁commander +- ▁banks +- leg +- ew +- ▁pré +- ▁że +- ▁instrument +- ▁using +- ▁dread +- ▁farmer +- প +- bil +- ako +- ▁possess +- ▁flash +- ліся +- ▁deu +- ▁pet +- ▁nervous +- ца +- ▁aspect +- ▁retired +- ▁motor +- ▁bones +- сты +- ▁violence +- eḍ +- ▁mount +- ▁scheme +- ▁aloud +- ▁germany +- ▁affected +- ▁branch +- ▁„ +- ▁difficulties +- var +- bat +- ▁vague +- ▁towns +- ▁shadows +- ▁domestic +- ▁fifth +- ▁barn +- 자 +- ▁wooden +- ule +- être +- ici +- ▁steady +- ▁statement +- ▁model +- ▁plants +- ▁official +- nge +- olo +- bli +- ▁sufficiently +- ▁irà +- ▁ко +- ▁jacob +- ▁bottle +- ▁interior +- ▁philosophy +- isa +- кә +- ▁discussion +- ▁scattered +- ▁rival +- ▁mijnheer +- ▁eating +- ▁trace +- ▁hid +- ா +- ▁applied +- ▁belong +- ▁uw +- gé +- ▁appeal +- ▁trop +- tsi +- ▁magic +- ▁boats +- ▁notes +- fl +- ▁benefit +- ▁june +- ▁Yuba +- ▁nou +- ▁kleine +- rig +- ▁sugar +- ▁depend +- ▁doen +- tha +- ▁Pi +- ▁belonged +- ▁stayed +- zione +- ▁ŝi +- ▁delightful +- ▁forever +- ▁hunt +- tir +- ▁profession +- ▁staring +- rt +- ▁bur +- ور +- cker +- ▁beloved +- ▁invited +- nyi +- ▁realized +- گ +- ▁seg +- ▁community +- ات +- ▁absurd +- ▁loving +- юць +- gre +- ▁acquainted +- sk +- ▁negro +- lus +- ▁accomplished +- ص +- 니 +- ▁supreme +- ▁musical +- ▁writer +- вер +- ▁delivered +- ▁metal +- ▁resist +- M +- یم +- ▁estate +- ingen +- ▁nascut +- ▁smaller +- ▁cheek +- ই +- vol +- ▁lift +- ajn +- gang +- ▁pol +- ▁policy +- ▁desk +- 中 +- แ +- iya +- ▁islands +- ▁resolution +- ▁aquest +- ▁acu +- fort +- 거든 +- ▁allen +- ▁emp +- ▁ம +- ▁coup +- сь +- any +- ▁baba +- ▁commission +- ▁bara +- зі +- igo +- ده +- ِ +- ▁prize +- ▁volume +- ▁quand +- ▁represented +- ▁quantity +- ▁sex +- ▁تا +- ▁generous +- ▁amid +- тра +- ians +- fra +- ▁developed +- tier +- ▁midnight +- 치 +- ▁intellectual +- ▁ihrer +- ii +- ▁fame +- board +- ▁attached +- ▁mist +- ▁egypt +- ▁jean +- dde +- ▁Gi +- ▁là +- ▁kinds +- ல் +- ара +- ▁independent +- ▁insisted +- ès +- ▁chest +- ▁shoes +- ▁forma +- ▁render +- ▁awake +- ▁veel +- ۆ +- ser +- ▁total +- ман +- ▁desperate +- ْ +- ▁gukora +- ▁arrive +- zeit +- ▁log +- vert +- ▁require +- pel +- ▁qualities +- ▁Lu +- ▁haste +- ▁normal +- iḍ +- ▁fla +- kana +- ▁infinite +- ▁trap +- 의 +- ▁railway +- gere +- chten +- ▁하는 +- ▁settle +- ▁concluded +- ▁zag +- put +- ▁guest +- ▁combat +- ▁uno +- ▁buck +- èrent +- dale +- ▁effects +- ыя +- ▁wheel +- ▁handed +- ▁heel +- ала +- cé +- ▁maig +- enden +- ▁changes +- ▁magnificent +- ee +- ▁proceed +- ▁Да +- ▁bible +- ▁text +- ▁Um +- ▁cada +- ▁abandon +- ▁citizens +- ▁burn +- 하는 +- ▁pat +- म +- ▁Nach +- ▁recovered +- ▁tak +- ▁properly +- ▁ambition +- hy +- ▁За +- ▁novel +- ets +- ▁folk +- ▁vin +- نىڭ +- ▁defence +- ▁plate +- ▁tide +- ▁tempo +- ▁strongly +- ▁pres +- ath +- ude +- quer +- ▁flood +- ▁net +- ے +- rum +- hearted +- ▁دە +- ▁committee +- ▁altar +- din +- tem +- ▁shortly +- ▁Par +- ▁ɣer +- ค +- ▁Ama +- ▁anywhere +- ▁movements +- тэ +- ের +- ▁distinction +- nel +- ▁recht +- ▁growth +- und +- зы +- ▁multitude +- unk +- ▁أ +- eva +- ▁wilt +- ▁beard +- ▁mur +- ▁vessels +- ▁fri +- nar +- tiva +- ▁jews +- atu +- нага +- ▁sovereign +- ▁sé +- cla +- ▁melancholy +- uli +- ▁urged +- tó +- ▁wretched +- ▁ignorance +- ▁rights +- ▁ends +- schaft +- ▁гэтага +- aka +- ▁locked +- ▁audience +- ▁pit +- ▁families +- ▁dame +- ▁leaned +- ▁var +- ij +- ой +- ▁thine +- ер +- ▁Tra +- ிய +- ▁gates +- ▁kunnen +- bul +- ▁бер +- ▁incident +- 단 +- ▁আ +- ▁treat +- ▁crowded +- ▁humanity +- bin +- ▁likewise +- ▁fil +- ▁tous +- ী +- anza +- ▁utmost +- ▁stronger +- nta +- ▁shone +- lay +- ▁thinks +- ator +- ▁puis +- ▁moet +- wen +- ▁reputation +- تی +- ▁thirteen +- ▁priests +- ▁при +- ▁wedding +- ▁eens +- pri +- ▁mixed +- cz +- ▁dancing +- ▁observation +- ▁più +- ▁chin +- ▁fence +- ▁seus +- ▁mankind +- ▁ihren +- ▁frequent +- ▁nearest +- mus +- » +- ▁lock +- ▁reference +- ▁рас +- ▁maiden +- ▁kuwa +- ▁herr +- ▁extended +- ▁tard +- ▁señor +- ▁lawyer +- ▁substance +- ▁liberal +- ría +- ▁iets +- C +- ▁manners +- ▁tar +- ▁cas +- ▁보 +- ▁Post +- ▁borne +- ▁была +- ▁calls +- ▁beach +- ска +- ▁performed +- ▁brow +- ▁cries +- ever +- lic +- ▁gusa +- ▁noon +- ▁ruth +- ▁distress +- K +- ▁umuntu +- ▁povas +- ▁goodness +- ▁afterward +- ▁ɣef +- '",' +- ▁seriously +- су +- ▁lately +- ▁porte +- rse +- ▁grandfather +- ▁jumped +- ом +- ▁être +- ö +- ▁komen +- ▁mood +- ▁strain +- 了 +- ▁related +- ▁habits +- ▁map +- roj +- ▁congress +- ما +- ▁victim +- ▁Saint +- heim +- ▁manifest +- が +- ▁slip +- ▁scientific +- ▁rob +- ▁San +- ▁poetry +- ход +- ▁recent +- ified +- ▁ward +- ery +- ▁remaining +- ▁gather +- isme +- ▁gro +- ▁area +- ▁fired +- ▁poured +- ▁mille +- ▁나도 +- ▁거기 +- mme +- ▁zeit +- ი +- nza +- ska +- ▁prospect +- ல +- ▁elements +- ▁conviction +- ▁bade +- ُ +- ▁fro +- рт +- ▁Des +- ▁center +- ▁decide +- pon +- һа +- ▁granted +- ▁crack +- jar +- ulo +- kar +- ო +- ▁legal +- ิ +- dda +- ▁studied +- 드 +- az +- ▁St +- ▁variety +- ▁té +- D +- ▁teacher +- ▁obey +- tse +- ▁painful +- ▁zeer +- ▁byose +- ▁yield +- ▁medicine +- qa +- ட +- ▁H +- ▁muttered +- ▁boots +- ▁alice +- ▁heavily +- ▁eastern +- rez +- ches +- aron +- ▁Zu +- ▁feed +- rent +- ▁vanished +- lam +- air +- ி +- av +- ▁ntabwo +- ▁쫌 +- F +- ▁opposition +- ▁hitherto +- ▁views +- ett +- ▁leaning +- 할 +- burg +- ▁bride +- ral +- iv +- ▁rya +- ▁unusual +- abi +- ▁رو +- ▁seeking +- ▁உ +- ▁Sta +- ray +- four +- ▁absent +- ▁assure +- com +- ▁contact +- ▁darling +- лар +- ▁hadn +- ▁apparent +- еш +- will +- ▁firmly +- ▁stupid +- ▁chiefly +- ▁suggestion +- ▁왜 +- ▁sole +- born +- ▁ёсць +- ▁india +- ▁enthusiasm +- ▁assumed +- lot +- ▁hor +- tera +- ▁stem +- tri +- ▁startled +- اد +- ▁alike +- town +- ▁production +- 려 +- ▁chain +- asi +- vy +- ▁shaking +- isto +- ▁standard +- 다고 +- you +- ▁Fi +- ▁actions +- ▁mistaken +- rea +- ▁section +- ade +- ▁Ke +- чи +- ства +- ▁treatment +- ▁гэтым +- dio +- ▁sounded +- স +- aɣ +- ▁separated +- ▁veil +- ▁huis +- ▁wherever +- ela +- ▁gleich +- erde +- ▁sighed +- anga +- uw +- clo +- ▁Jordi +- nch +- ▁ім +- û +- ▁padre +- octubre +- tik +- ▁там +- non +- iger +- ▁singular +- ▁latin +- ▁daughters +- ▁Mal +- ▁team +- ▁tur +- ane +- ▁nombre +- dom +- ▁jar +- ica +- 라고 +- ▁probable +- ete +- ▁shirt +- ▁charity +- ▁purple +- ▁sentiment +- ▁everyone +- ▁lincoln +- ▁owner +- ▁regiment +- ▁palm +- ene +- َ +- gro +- ▁retreat +- ▁fou +- ▁э +- ors +- ▁contest +- ▁smart +- ▁merchant +- ▁tend +- ▁Diese +- ▁protest +- ▁copy +- бе +- ▁cared +- fold +- ▁madam +- ▁leven +- ▁brand +- ▁gw +- ded +- ▁av +- ▁driving +- ▁einmal +- ▁campaign +- tí +- ▁sí +- elles +- ▁fet +- ▁steep +- ▁puc +- ▁от +- ▁bath +- ▁adventure +- ة +- ▁hated +- ▁wandering +- keit +- ▁ஆ +- ▁conflict +- தி +- ▁reported +- ▁contrast +- ▁instinct +- ▁astonishment +- ▁betty +- ▁Д +- ▁калі +- ▁rested +- ▁continually +- ▁sensible +- ▁entering +- cas +- ▁lange +- ▁doctrine +- ago +- ▁whisper +- ▁cri +- ▁intense +- 수 +- ▁irish +- vous +- ły +- ▁Pri +- ▁opera +- ill +- ▁stiff +- gh +- ▁revealed +- ▁ப +- gui +- fal +- uz +- ▁tells +- ▁kein +- ▁ram +- ▁acted +- ▁detective +- dal +- ▁cord +- ил +- ыш +- ▁pues +- ▁structure +- kken +- ▁pierre +- ▁porta +- تا +- ▁transport +- бо +- ▁cheerful +- ▁breeze +- ▁poste +- ▁hero +- kul +- ▁bud +- ▁sport +- ต +- ст +- sent +- ▁attempted +- ▁departed +- ▁sud +- ▁gibt +- ▁someone +- ▁imagined +- ▁après +- ▁slightest +- hanga +- 하 +- ▁agent +- ▁astonished +- ▁vice +- ว +- 진 +- bia +- ttle +- ې +- gie +- rimo +- gle +- ▁deliver +- ech +- gla +- ▁venture +- ▁examination +- ▁devotion +- nor +- ▁diesen +- ▁pursued +- abu +- ▁agreeable +- là +- jen +- mat +- pit +- ▁protect +- ▁urbo +- ▁cell +- ▁succeed +- elijk +- twa +- ▁millions +- ▁rude +- ▁clerk +- ▁experiment +- ▁schi +- ▁Vor +- ▁terwijl +- ▁hab +- ▁Ŝi +- ו +- rí +- ▁cease +- ▁consequently +- ى +- ▁spell +- gal +- ▁centuries +- ▁Pe +- ose +- ▁francis +- ▁quarters +- ▁rat +- сі +- ction +- tje +- ▁alter +- gua +- ked +- ente +- ret +- ▁bwo +- ▁hesitated +- ▁dining +- ▁ح +- tions +- ▁adopted +- ▁mild +- ered +- ▁hätte +- ▁dal +- '6' +- ▁descended +- ▁quit +- を +- ს +- yon +- ▁infant +- mor +- ▁theatre +- ▁shield +- ▁examined +- ▁occasions +- uff +- ▁prayers +- ▁platform +- ▁descend +- мен +- ▁cloak +- ▁issue +- ▁falls +- ▁resumed +- ▁tam +- ▁également +- ith +- ▁coach +- gate +- ▁manage +- ▁coal +- ▁inferior +- ▁gets +- ▁rules +- not +- ▁Em +- ▁Is +- ▁deny +- kte +- ▁frau +- ▁drinking +- ved +- ▁kate +- pat +- ison +- ▁estat +- ▁abandoned +- ▁rot +- ▁buildings +- ▁perfection +- ax +- ▁murmur +- bon +- ▁endure +- ▁esto +- ▁senses +- ▁notion +- oon +- å +- õ +- ▁including +- ▁collection +- ▁kwam +- ▁newspaper +- ▁idle +- ▁sharply +- ▁같은 +- ▁sollte +- eyed +- ▁apartment +- ▁Una +- ched +- ▁drank +- ▁majority +- ▁throwing +- ▁gratitude +- ▁marble +- ▁repeat +- ▁winds +- ▁piano +- ▁songs +- door +- ага +- ов +- ▁readily +- ▁essential +- ▁wollte +- cken +- 긴 +- ▁april +- llen +- ▁fins +- ▁sins +- ▁pace +- ▁anderen +- ▁passat +- ▁tren +- comp +- ▁rifle +- ▁impulse +- ▁мяне +- U +- hora +- ▁Barcelona +- tia +- ▁buri +- iga +- ▁badly +- ▁bark +- zza +- ▁candle +- ڭ +- rar +- ema +- quin +- ض +- ▁wearing +- ▁femme +- ▁lane +- ▁twee +- ▁furnished +- ▁можна +- ▁inches +- ced +- ▁таксама +- ▁conception +- ▁catholic +- ▁wilde +- iter +- ▁zeggen +- ▁represent +- ё +- द +- ▁Auf +- ▁welche +- glo +- ▁deserted +- agost +- ▁très +- aban +- 和 +- ▁safely +- ▁poverty +- ▁professional +- ▁Can +- ае +- ▁climbed +- ▁ng +- ▁concern +- род +- ▁elder +- क +- ▁каб +- ▁reflection +- ▁turns +- nny +- ▁literary +- esa +- pha +- cus +- ▁tag +- ▁heavens +- ▁votre +- ▁steadily +- ▁maintain +- ih +- stru +- ▁kiu +- ▁marie +- ▁activity +- دى +- ▁dragged +- ▁char +- ori +- ▁ந +- ▁یک +- ▁province +- ▁pine +- mul +- ▁– +- cc +- ▁мне +- lf +- ▁located +- pper +- two +- '4' +- ▁sunshine +- ая +- ration +- ▁intent +- ▁reduced +- lä +- ▁defend +- ines +- ▁owing +- ▁russian +- ल +- fan +- вя +- book +- ▁woe +- ena +- ▁knocked +- ▁virginia +- ▁compared +- uta +- isch +- ono +- cri +- ▁freely +- kom +- oro +- дзі +- ▁possibility +- ▁трэба +- ▁quart +- ▁tap +- lí +- ▁lucy +- ▁рэ +- ▁stout +- ▁soll +- ▁criminal +- تى +- тер +- ▁railroad +- ight +- пу +- lung +- ium +- ▁hunger +- لى +- ▁overcome +- ▁belonging +- ▁nacht +- oh +- til +- ▁dun +- ▁Wir +- ▁Mae +- von +- ▁sixth +- rai +- lent +- ▁medical +- ▁belle +- ▁bwe +- ▁X +- ▁kwe +- ▁refer +- 던 +- ▁amusement +- ▁refuge +- ▁pressure +- elo +- tle +- ▁tro +- ▁suspected +- им +- ▁plu +- tten +- ▁obu +- ał +- ▁realize +- ▁furniture +- ▁aŭ +- shed +- ime +- ▁student +- ग +- rk +- ▁ndetse +- els +- ▁opinions +- tered +- ▁건 +- ▁poison +- ▁ils +- ▁خ +- ▁sensation +- ▁folks +- ▁contract +- ▁iyi +- த் +- ▁tant +- gg +- 년 +- ▁finish +- ▁Kon +- ▁vez +- ▁invitation +- ▁threatened +- ▁morgen +- ▁director +- ▁arch +- ▁acting +- ৰ +- ▁july +- ▁swiftly +- ▁sia +- tara +- ную +- ▁tear +- व +- ▁ச +- ddi +- ▁mc +- cen +- ▁describe +- worth +- ▁concealed +- ▁angels +- ▁lamb +- ▁anche +- ▁shell +- ▁china +- zan +- ▁돼 +- ▁gathering +- ▁experienced +- ▁plot +- ▁tijd +- ▁couch +- ▁disturbed +- ▁Sed +- ▁corps +- cor +- usa +- ▁positive +- teil +- unt +- ▁marks +- ▁secured +- ▁grateful +- ▁ugly +- cons +- ▁disposed +- ▁election +- دی +- ▁circumstance +- gas +- ▁nonsense +- ▁load +- ▁gab +- rü +- ▁steam +- ▁landed +- ▁knock +- ▁intellect +- ▁zusammen +- ▁Т +- ▁brook +- ▁farewell +- ▁mare +- ▁tight +- ▁autumn +- ҡы +- bal +- ▁sorts +- ▁sale +- bus +- ▁cela +- จ +- ngu +- qi +- ▁reflected +- burn +- ▁reaching +- ▁gifts +- พ +- ọ +- ▁пу +- app +- ▁Cor +- اب +- ▁scotland +- mini +- ▁بود +- ▁John +- xa +- dzie +- ▁blank +- ▁glow +- ▁rev +- 上 +- dri +- phi +- ▁poco +- ▁purposes +- бу +- ▁profound +- ▁kar +- ▁muss +- ▁hugh +- ▁machen +- ▁starting +- ▁febrer +- い +- เป็น +- ▁있 +- ▁expense +- ▁lee +- kunda +- ▁flor +- ▁weet +- ▁handkerchief +- ▁음 +- ▁flu +- ▁kama +- ι +- ▁score +- ▁cosa +- ▁swear +- ▁methods +- ▁cheap +- ի +- kle +- ▁imbere +- yard +- ▁былі +- ▁sending +- ▁millor +- ▁intelligent +- ▁exchange +- ▁attacked +- ▁Los +- ▁beasts +- ▁lust +- log +- ▁gra +- ▁horizon +- uba +- ▁sis +- ▁pilot +- ▁naked +- ▁Ju +- tzen +- mir +- ▁etc +- ▁deeper +- ▁ўжо +- mpa +- ▁hasta +- ▁glimpse +- пе +- ▁industry +- ▁leurs +- ▁alla +- gard +- ▁arrest +- ▁premier +- ▁prop +- fle +- ▁pointing +- ▁exhausted +- ▁kw +- ▁fed +- ów +- ▁rien +- urs +- ▁prepare +- ▁aquel +- ▁owe +- ious +- ▁detail +- bit +- ▁crossing +- ▁데 +- ▁disappointment +- bera +- ▁johnson +- isse +- ▁strangers +- по +- ▁lightly +- ▁begins +- ila +- ciones +- ую +- ▁та +- ▁fearful +- ▁crept +- ▁error +- ▁remind +- ▁번 +- ▁aga +- ▁drunk +- ▁nights +- ▁disappointed +- ▁bone +- bur +- ▁assist +- lum +- dad +- ▁lesson +- bura +- ▁contempt +- gana +- ▁training +- nze +- rian +- hum +- ▁sailed +- ▁staff +- аць +- ▁perform +- ▁included +- ▁blessing +- dir +- ▁그니까 +- ▁cure +- cs +- '9' +- ▁That +- ▁jump +- ▁landing +- ▁dios +- ▁yer +- nnen +- gat +- ▁happens +- cks +- tti +- ▁Э +- ▁sap +- ▁vend +- ▁merit +- प +- ▁spa +- ▁mid +- ▁climb +- gged +- ▁pound +- ▁бар +- bare +- ▁niets +- ▁leaf +- гра +- ▁visitor +- ▁moja +- ▁homme +- ▁tremendous +- 까지 +- ▁przy +- ▁confession +- nal +- whi +- '2' +- bury +- ей +- ▁orange +- '8' +- ▁pistol +- ▁lunch +- ▁Zeit +- ▁rub +- “ +- ▁favourite +- ▁яна +- ▁suspect +- ▁charged +- ▁perceive +- ▁completed +- lor +- י +- ▁purchase +- ▁rep +- ▁penny +- мер +- kim +- eo +- ▁characters +- ▁motive +- gram +- ▁jak +- ▁heute +- ên +- ▁document +- form +- spir +- ▁zien +- ▁increasing +- ▁بر +- ▁chapel +- ▁gazing +- یا +- ▁roar +- ▁provide +- ▁acquired +- ▁pursuit +- ▁formerly +- rä +- ▁involved +- ▁beiden +- ▁mirror +- ident +- ▁wäre +- ▁dressing +- ▁meantime +- ▁straw +- llo +- ▁govern +- ▁european +- nder +- ▁pal +- ht +- ▁illustration +- ▁rein +- ▁eve +- ө +- ▁talent +- ▁mismo +- lier +- ▁parted +- ▁bedroom +- ▁notwithstanding +- би +- ▁minor +- ▁loves +- ▁directions +- ▁marched +- ume +- ▁genuine +- tat +- ▁phrase +- ▁کی +- ▁gloom +- ▁organ +- ▁cru +- ▁handle +- ▁jealous +- zon +- ▁mont +- stan +- ▁feeble +- ▁remove +- mic +- abo +- ubi +- ▁fury +- ▁cutting +- ▁application +- ▁provisions +- ரி +- jya +- gt +- ▁exposed +- ▁crow +- ▁continent +- shire +- ▁augen +- ▁drag +- ப +- ▁vroeg +- ▁jones +- ▁closer +- ▁erst +- gia +- ismo +- kazi +- ▁auto +- ▁Mont +- ▁lag +- ▁bor +- ▁cock +- ▁fre +- igkeit +- ▁Ка +- ▁definite +- flu +- ej +- ▁beau +- ▁Ма +- br +- тан +- ▁Veni +- ▁punt +- ▁entra +- ▁crystal +- gut +- ▁qual +- ▁enterprise +- ▁driver +- igis +- ario +- ▁sink +- ▁moses +- ป +- ▁exceedingly +- ▁Ben +- ▁stra +- adi +- ▁scale +- ის +- ▁operation +- vas +- ▁pretend +- lant +- ▁anyway +- ▁tones +- ▁pig +- ▁relative +- ▁rivers +- ▁disse +- ▁swung +- ▁versch +- ▁pos +- ▁natives +- ▁brush +- ▁dort +- azi +- ▁fille +- get +- ▁gre +- ▁knee +- ə +- sin +- ▁П +- ▁hiding +- enda +- ▁Mit +- uza +- fite +- ṣ +- ▁ale +- ▁scarce +- ப்ப +- nja +- ▁walter +- nat +- pped +- ▁doorway +- nim +- ▁schools +- ▁seed +- ▁Pu +- ▁jerusalem +- ▁program +- ▁ellen +- ▁inquiry +- ▁execution +- ▁sober +- ryo +- sky +- ▁hastened +- ▁gesch +- ▁nnyo +- ▁poem +- ా +- ▁dag +- ▁practically +- fre +- ▁rolling +- ▁roads +- 生 +- ▁seventh +- nto +- post +- ▁бы +- ▁preserved +- ▁intimate +- ваць +- ▁supported +- ▁receiving +- ▁pages +- ор +- ▁fears +- ▁helpless +- its +- ▁joke +- ас +- ▁Ntabwo +- shing +- ▁violet +- '7' +- ▁restored +- ▁today +- ▁revenge +- ▁그리고 +- gara +- ▁mwa +- ▁reasonable +- ▁hundreds +- ▁rug +- ▁hoofd +- ▁squire +- ▁project +- ▁wash +- ▁dorothy +- ъ +- ▁rabbit +- rac +- ▁tanto +- rra +- ▁causa +- ▁virgin +- fern +- ▁generation +- ▁د +- kins +- ▁craft +- ▁bush +- ▁lightning +- ▁fois +- ▁dirty +- ▁estaba +- rel +- ▁declare +- ▁eaten +- long +- گە +- ▁preserve +- ▁basket +- ore +- uze +- ör +- ▁dieses +- ▁oude +- ▁ق +- uni +- ▁wives +- aĵo +- ▁zonder +- ▁universe +- ▁commercial +- лек +- ▁ме +- ▁würde +- rais +- ▁folly +- ▁hatred +- ▁exception +- ▁beheld +- ▁terug +- ▁creation +- ▁bou +- V +- uc +- ▁tramp +- ▁wrath +- ▁whereas +- ▁dearest +- ▁politics +- ▁characteristic +- inte +- mma +- ▁permission +- ▁jour +- ▁shouldn +- ▁silly +- ▁weer +- ▁string +- ▁satisfy +- zig +- Any +- H +- sca +- ▁antwoordde +- ▁gospel +- ▁horn +- werk +- ▁september +- rant +- ised +- aren +- ▁Ex +- tree +- ▁weder +- ▁tun +- ▁leben +- cing +- ▁communication +- ▁funny +- ▁beating +- iendo +- ▁referred +- dat +- ▁Tiu +- isha +- old +- ▁unconscious +- ▁کو +- ▁neu +- ▁cents +- 생 +- ▁verb +- uku +- ▁raising +- lando +- ▁missed +- ▁terre +- ▁upstairs +- ҙ +- udi +- ▁shalt +- sam +- ▁flashed +- ▁dig +- tab +- ▁toujours +- ller +- ▁cape +- mt +- ▁assume +- وی +- cel +- ▁knights +- ▁belt +- 트 +- ▁outer +- ▁tax +- ▁examine +- rek +- wr +- ▁considering +- ▁maria +- ▁pole +- ▁tr +- ▁salvation +- lib +- ible +- cat +- ▁Wie +- 到 +- ▁Вы +- ▁silently +- ▁apt +- ார் +- 더라고 +- tä +- ▁ئە +- ▁anyhow +- giye +- genda +- ▁porter +- pie +- تر +- ▁아니야 +- zio +- ▁Omu +- ▁cleared +- ▁previously +- back +- ega +- ▁3 +- ▁masters +- ▁counter +- ▁imi +- ▁pains +- ▁vrouw +- ▁zeker +- ▁clay +- ▁ralph +- ▁mail +- cies +- ▁beaten +- ▁cotton +- य +- ะ +- ▁authorities +- ु +- ▁cinema +- headed +- ▁stirred +- ▁eigen +- nin +- eff +- agi +- мә +- ▁font +- uto +- cr +- رو +- ▁kit +- ช +- uga +- ▁کار +- ▁Sol +- ▁distinctly +- ▁belongs +- ▁recall +- ذ +- ова +- lig +- ▁quelques +- tru +- rd +- reich +- ▁solution +- ▁gesture +- fully +- ▁suis +- ▁Ibi +- ▁leather +- ▁runs +- minded +- ▁cop +- ▁arrangement +- ▁phil +- mine +- ▁چ +- rze +- ▁müssen +- ▁бо +- sso +- ▁برای +- elli +- ▁monte +- ▁indi +- бі +- ▁Za +- ▁heir +- ▁daylight +- ▁petit +- ▁weil +- ▁ع +- nz +- ▁limited +- γ +- ▁sage +- ▁unexpected +- ▁buryo +- bro +- ▁Or +- ▁christians +- gur +- ▁cardinal +- ▁sp +- ▁paint +- ffe +- 出 +- ▁earnestly +- sek +- ▁dix +- ▁oath +- ئ +- ▁princes +- ▁stuck +- кам +- ▁ireland +- ▁roz +- este +- ▁twa +- গ +- ▁terra +- ▁кан +- eɣ +- ▁nooit +- tanga +- ▁passa +- ▁можа +- ▁resting +- dam +- ▁هم +- kka +- saba +- ο +- ▁intend +- R +- ▁cheer +- ա +- cut +- ▁culture +- ▁Men +- ▁Пра +- ▁measures +- ▁album +- ▁limbs +- ▁وَ +- ▁주 +- ▁dies +- ▁depths +- ▁offering +- ▁touching +- ал +- ▁trunk +- จะ +- ӹ +- ▁munsi +- ▁moest +- ▁ließ +- ▁mademoiselle +- ▁eki +- ▁gown +- 地 +- ▁bre +- ▁생각 +- ▁relieved +- ▁alleen +- ▁establishment +- ▁fitted +- jja +- ▁leaped +- ▁zero +- ▁tribe +- ▁border +- ▁맞아 +- ▁laŭ +- ▁saturday +- ▁slope +- ▁hint +- fata +- ▁cyo +- ▁captured +- 제 +- ▁tiempo +- wala +- ▁block +- iɣ +- bau +- ▁murderer +- ▁일 +- ▁display +- ld +- ço +- ▁bro +- ▁slavery +- bie +- ▁ainsi +- eka +- ▁review +- ▁heap +- ▁enable +- ▁tribes +- ▁wing +- ண +- ▁engine +- ▁instructions +- ctor +- ology +- стаў +- ▁ці +- ▁conducted +- ▁père +- fon +- ▁bang +- ▁mun +- ▁Kan +- ▁resistance +- col +- mmen +- rah +- adas +- ▁creek +- শ +- ▁mot +- ér +- ▁duchess +- მ +- ▁patron +- jes +- โ +- ▁yu +- eye +- tto +- ▁planet +- ▁confused +- uit +- ▁elsewhere +- ▁column +- ▁tales +- ello +- ▁planta +- ▁엄청 +- غا +- ▁noted +- tori +- ▁honey +- ▁nel +- raj +- ▁durant +- acht +- ▁fog +- ming +- ▁ف +- vre +- ▁romance +- ▁bowl +- tore +- ▁dues +- ▁procession +- юцца +- кай +- ▁Sin +- termin +- right +- ▁slain +- weg +- ▁obvious +- ▁agony +- ▁longing +- ▁insist +- lea +- ▁bend +- ▁ты +- zwi +- ▁containing +- ▁euch +- ▁brass +- ▁performance +- ур +- ▁War +- ▁romans +- ▁october +- ▁geweest +- 年 +- rri +- ▁smoking +- ▁sailor +- ▁gang +- ▁collar +- ▁wagon +- ő +- рам +- ▁vest +- ▁chase +- ▁passions +- ▁mé +- ▁engagement +- ▁brings +- ▁collected +- ▁Esperanto +- ▁screen +- чу +- ▁hal +- ▁steal +- ▁weapon +- ▁Ш +- ▁zwischen +- nan +- ▁grasp +- ▁flow +- گر +- ▁nada +- pend +- ▁puzzled +- ▁drama +- có +- ▁gaan +- 면은 +- ▁opposed +- 多 +- ▁cares +- pul +- ▁pillow +- ▁jeune +- arse +- ▁imperial +- ▁remarks +- と +- iste +- ▁average +- istic +- ▁wandered +- vá +- ▁gardens +- ▁prevented +- ্য +- ▁weiter +- ▁Б +- ▁lap +- ▁fastened +- ّ +- auf +- uru +- izing +- ▁released +- ▁sto +- ▁그럼 +- ▁issued +- pes +- ▁celebrated +- ▁scenes +- itu +- ▁Mon +- ▁anna +- W +- rer +- lim +- ▁din +- ▁reserve +- ▁cards +- kola +- mak +- ▁unpleasant +- ▁capacity +- ▁passes +- sser +- ▁foe +- gno +- ▁avenue +- ши +- ▁Ĉu +- ▁carpet +- ▁wire +- ▁recently +- ▁School +- spiel +- stu +- ран +- ▁deeds +- ▁maintained +- sus +- gomba +- лә +- ▁cet +- ▁favorite +- он +- cí +- box +- cro +- ▁precisely +- ▁fetch +- قا +- ▁aquella +- lio +- anya +- vid +- zzi +- ult +- ological +- ont +- ▁کا +- ▁Nu +- ▁largely +- ▁reflect +- ▁sheet +- hol +- ▁könig +- ▁contains +- aria +- script +- ▁ubwo +- ▁Pre +- ▁Ste +- ▁geld +- ▁gloomy +- rich +- ▁denied +- ffer +- ▁defeat +- дар +- ▁servi +- ▁lean +- ▁això +- ét +- ▁product +- 화 +- ▁solo +- ▁neighbourhood +- apo +- mana +- ace +- ▁finds +- ▁دو +- ▁worry +- umwa +- ▁bose +- ▁sul +- ǧ +- ▁dove +- ▁russia +- ün +- ▁prze +- bir +- ไม่ +- ▁gale +- egu +- wick +- ppen +- elt +- ▁prey +- бра +- ▁exc +- 우 +- zz +- ▁بۆ +- ▁clothing +- ▁thread +- ▁indignation +- top +- ▁powder +- ▁regarding +- umba +- ▁kumu +- ห +- bara +- ▁groups +- ▁electric +- ▁Har +- ▁responsible +- ▁include +- tatu +- ▁tradition +- ӱ +- onde +- ▁steamer +- lü +- 물 +- া +- yu +- ▁kom +- kuba +- bor +- ▁sergeant +- ification +- ▁university +- ▁Quan +- ▁civilization +- ▁By +- ▁git +- ▁hum +- cis +- ▁francs +- ▁wherefore +- һы +- ▁convenient +- ota +- ▁mingled +- ▁abruptly +- ▁wilderness +- ▁onze +- ▁propose +- ▁Mor +- ▁desires +- ▁primera +- ▁messenger +- ▁physician +- вае +- ▁contain +- 都 +- ange +- ▁hook +- ▁security +- thing +- ▁welt +- ▁damp +- ▁ciutat +- 科 +- maze +- ▁respectable +- ▁ceremony +- rak +- ▁jamais +- ▁Wo +- kwi +- ▁mí +- ▁visitors +- ▁suo +- ▁wanting +- ▁Ik +- ச் +- ▁vader +- ▁funeral +- hall +- bank +- ▁residence +- ӧ +- ▁recognize +- ▁flock +- ▁muß +- ▁heavenly +- ▁polly +- bol +- ▁damit +- ▁slender +- ▁assurance +- ▁countess +- ▁cavalry +- ▁Aquest +- ▁eben +- ▁necessarily +- ых +- ▁scent +- iness +- ket +- ▁invisible +- ▁consists +- ਾ +- ▁flo +- ▁laura +- ▁persuaded +- ▁afite +- ▁permanent +- で +- ▁Mas +- ▁wider +- emt +- chte +- ▁hano +- σ +- ▁via +- ▁commenced +- mera +- ข +- emi +- ▁rosa +- ▁vote +- antes +- ▁balance +- ín +- ▁hatten +- ▁adam +- ▁construction +- ▁territory +- ▁eighth +- ▁lassen +- ▁stole +- ▁bushes +- ▁pitch +- ▁strangely +- ▁employ +- ▁based +- ▁largest +- ▁grounds +- cke +- ▁humour +- яў +- ▁arrested +- ▁virtues +- ją +- ▁divers +- ▁organization +- ▁dutch +- fin +- apa +- zia +- ▁existed +- ▁semi +- iß +- bell +- ▁rays +- mark +- ▁operations +- ▁pays +- று +- ▁hamwe +- oor +- ▁pile +- ▁dispute +- ▁sehen +- ▁arise +- вай +- ▁solitary +- ▁کرد +- ▁washed +- oko +- faced +- ▁attracted +- ир +- ct +- sper +- ▁signed +- ো +- ▁reminded +- ▁torture +- ▁laying +- ▁international +- კ +- his +- ▁modest +- ▁coarse +- си +- ث +- ▁formal +- ▁fortunate +- vant +- ▁meu +- ▁uyu +- ▁cunning +- 했 +- ğ +- tung +- ▁saints +- ters +- plo +- ▁foundation +- ▁jusqu +- cion +- ਰ +- ▁chimney +- ▁canal +- ▁discipline +- jon +- ▁spoil +- ▁depth +- ▁succession +- ▁homes +- ▁apple +- ▁weapons +- ▁portrait +- aw +- ▁ham +- ▁november +- ಿ +- ▁africa +- ▁bears +- ▁mostly +- ▁monarch +- ▁suggest +- ▁uwo +- halten +- ▁troubles +- ▁breathing +- ▁conceal +- ற்ற +- ▁sondern +- ▁Nor +- ità +- ▁occur +- ory +- ▁pr +- ▁blew +- ▁alte +- ▁enjoyment +- ruh +- ▁oni +- ▁oogen +- nom +- ▁бу +- ▁childhood +- imu +- ▁preferred +- ▁Як +- ▁assembled +- ▁Some +- ▁Au +- каў +- цыя +- ▁moonlight +- ▁parish +- czy +- ▁се +- ▁prophet +- ▁holds +- ▁dish +- ▁editor +- ▁tip +- ▁vulgar +- ე +- ▁ха +- مۇ +- ▁stephen +- ▁vital +- ▁Fu +- ▁shout +- ▁serpent +- ▁telephone +- ▁confined +- ister +- ▁calmly +- ño +- ▁losing +- ▁masses +- ▁rarely +- ▁foul +- ▁egg +- ▁claims +- ▁roses +- ▁churches +- cum +- ▁Ze +- ज +- eri +- ▁daring +- ▁saving +- ▁photograph +- boy +- ▁wrapped +- 분 +- еп +- ós +- ▁lofty +- ट +- ▁flank +- ▁respond +- tory +- ball +- avait +- ▁aff +- ▁telegraph +- kal +- ▁hunter +- ▁founded +- ▁gracious +- ▁offence +- ▁foi +- stall +- ▁armies +- ▁த +- ा +- ▁mischief +- ▁neat +- ெ +- ▁monde +- ▁consequences +- ▁ridiculous +- ▁aller +- artagnan +- ▁nerves +- ▁donde +- ▁fanny +- bet +- spe +- zaba +- ▁obedience +- swa +- ि +- ▁determination +- iyo +- ▁contents +- ']' +- ▁sala +- нае +- ▁studies +- ▁site +- iques +- ▁florence +- '3' +- ▁republic +- ▁sen +- их +- here +- ▁maze +- ▁timber +- ▁없어 +- ▁impressed +- ná +- ▁그러 +- ▁selected +- ▁composition +- kes +- ibwa +- ҙа +- ike +- ▁guards +- rn +- ▁gr +- ▁kre +- না +- ▁uncertain +- ▁sprach +- 원 +- ▁channel +- ▁wrought +- abantu +- камі +- ▁appetite +- ▁sy +- sey +- ▁По +- ▁tele +- ▁sunset +- ▁manager +- ▁prime +- ▁adopt +- wind +- ▁Gar +- ▁sweep +- ▁마 +- ▁ruined +- row +- ▁inch +- ▁ĝin +- ▁zelf +- زی +- ी +- ▁stolen +- tür +- кы +- dek +- ▁grain +- ▁humor +- hat +- ▁instruments +- ▁whip +- ▁thence +- ▁inspired +- ാ +- lt +- ▁grandmother +- ▁dwelling +- ▁mari +- tet +- ▁rend +- ▁deadly +- ▁[ +- ▁vos +- ▁employment +- ▁desde +- ▁사람 +- mé +- ▁pendant +- ▁conserva +- ▁Ә +- num +- hem +- gus +- ▁boston +- ▁coin +- ไป +- ▁sau +- ington +- ▁wali +- ▁thither +- ▁Wi +- ▁Ubu +- ▁villa +- uloj +- ▁cau +- ▁uru +- ▁gravely +- ▁vater +- ▁바 +- ▁fountain +- ▁outward +- ▁misfortune +- ort +- dé +- ▁toute +- ह +- ▁These +- hari +- ▁brethren +- asse +- ▁vengeance +- ▁cos +- ▁Fe +- ▁condemned +- ones +- ▁bloom +- ▁bench +- ▁intervals +- uld +- ▁Abantu +- 정 +- ▁шмат +- ্ +- ▁Dum +- ▁risen +- ▁sil +- ▁speaks +- ▁bitterly +- gam +- ▁doth +- ▁ventured +- ▁insult +- ▁Hauptstadt +- ▁trembled +- ▁root +- esca +- fashioned +- vuit +- ui +- ▁burnt +- ▁няма +- ienne +- ▁sagen +- ▁tiene +- ▁sofa +- aŭ +- ા +- ▁indifferent +- nam +- nell +- சி +- iente +- ▁hopeless +- ▁weeping +- tract +- cos +- ▁regions +- ▁что +- ▁easier +- kwe +- ▁из +- ▁toutes +- ▁mess +- ▁assembly +- ▁pop +- ▁strict +- ▁Ci +- ▁niemand +- omu +- май +- ars +- می +- ▁settlement +- ▁sempre +- rung +- ping +- ▁leap +- ▁얘기 +- ▁wept +- ▁Так +- дан +- cap +- ▁ned +- ▁external +- ▁stable +- ▁guardian +- stri +- yen +- ▁aquí +- கள் +- ▁mwaka +- ▁yid +- ▁sera +- ▁release +- ▁illness +- ▁bite +- lá +- rica +- ▁arme +- ▁situas +- ▁tenderness +- ▁invention +- tch +- ▁feather +- ▁dei +- ▁soup +- ▁dreamed +- ▁promptly +- 냐 +- ▁mann +- каз +- ▁prior +- ▁travelling +- aro +- ▁lace +- ▁However +- ▁suitable +- ▁quelque +- வி +- '5' +- ▁ray +- bs +- ▁gute +- poli +- ▁guy +- known +- ▁reception +- ▁amused +- ▁pronounced +- ieron +- chu +- ▁providence +- ▁reeds +- ▁testimony +- ▁yara +- ▁ability +- kh +- ɣa +- ▁sadly +- ▁supplied +- ▁proposal +- ▁менән +- ▁anchor +- ▁murdered +- ▁stretch +- ▁lin +- ▁enabled +- ▁reckon +- ▁Por +- ▁Kaj +- ▁prayed +- ▁divide +- ▁شما +- ▁así +- ▁ndi +- ý +- sp +- ▁preparing +- vent +- ús +- ▁owned +- თ +- ▁адзін +- ign +- ो +- ▁solitude +- hal +- ▁mara +- ▁shooting +- ▁heels +- ▁bundle +- έ +- ▁purse +- দ +- ▁scho +- ▁nephew +- އި +- ▁quel +- ▁gott +- ▁nam +- nken +- ▁Gra +- iĝas +- horn +- ▁speaker +- ▁seize +- ▁leaders +- ▁absorbed +- ▁anders +- ▁colours +- η +- є +- ▁arts +- ▁repose +- mund +- ▁chez +- tage +- rte +- tine +- ▁dice +- itaj +- ▁neglect +- سە +- ▁garments +- أ +- down +- bas +- ▁submit +- ▁mask +- 보 +- 路 +- ▁hebt +- ▁students +- ▁push +- фі +- ▁jedoch +- ▁fruits +- même +- ▁Bei +- tos +- oz +- ▁surrender +- ▁ihrem +- ▁feature +- ▁accused +- ▁umwe +- ▁sailors +- ದ +- ▁rwa +- gil +- 你 +- ▁ئۇ +- rij +- eck +- ▁slo +- thi +- ▁быць +- ▁kid +- ▁decent +- nig +- ▁scorn +- ▁spur +- kura +- ▁гэты +- мат +- ەوە +- ▁chill +- ▁hadden +- ▁mem +- ▁maison +- pla +- ▁자 +- ▁cabinet +- ▁stomach +- ▁같이 +- ▁responsibility +- ví +- ▁welke +- ▁bran +- ▁hammer +- 이랑 +- ▁comparison +- ▁undoubtedly +- exp +- ▁macht +- ▁rice +- ▁Nord +- ▁wenig +- ▁정도 +- umva +- aha +- hau +- sses +- ▁lively +- ▁stre +- ของ +- ▁rushing +- ▁alarmed +- ▁sack +- ▁esti +- 이야 +- ▁narrative +- essen +- taba +- ▁combination +- ▁قا +- weise +- ▁obeyed +- ー +- ▁altijd +- ▁Ali +- ▁wherein +- эр +- tek +- ▁paar +- ▁waist +- ▁alma +- ▁occupation +- டு +- ▁Maria +- ▁große +- سى +- say +- ტ +- ▁fare +- ▁grows +- ക +- ▁hacer +- ▁indicated +- 실 +- ▁critical +- пер +- ▁goods +- ▁spear +- chel +- ▁shepherd +- ▁jury +- ▁poder +- 子 +- र् +- ▁pursue +- ▁Qui +- ▁curse +- る +- نگ +- ▁eby +- ▁appointment +- ál +- ▁changing +- lau +- ▁yonder +- ▁Aquesta +- ▁trusted +- icia +- ▁colored +- ▁After +- ı +- аз +- ▁Таму +- 신 +- ▁Art +- ▁전 +- ▁eenige +- fund +- boat +- hm +- ▁nella +- ify +- ▁kur +- ▁Era +- ▁einige +- ▁finest +- ▁administration +- tag +- ▁stroke +- six +- ▁upward +- ▁Ve +- со +- ese +- का +- ▁copper +- vano +- ▁bus +- ▁trat +- ▁gleam +- 학 +- کن +- ▁california +- abwe +- ▁wana +- ▁valor +- аны +- ▁folded +- ▁vanity +- ▁trained +- ▁debt +- mani +- ▁Fer +- ▁seas +- ong +- ▁elegant +- ▁napoleon +- ▁summoned +- ▁hombre +- ▁thereby +- ▁tim +- lian +- سا +- ▁joan +- ▁writers +- ▁chairs +- ▁fancied +- ▁stopping +- feld +- ▁unlike +- frag +- ▁summit +- ▁jewels +- ി +- ▁wax +- ▁searching +- ▁binnen +- sur +- 行 +- kk +- ് +- ▁pier +- ▁Cett +- ▁depends +- ▁jove +- ▁encounter +- ▁paying +- ▁chant +- ▁grup +- ыл +- 월 +- ▁werde +- ▁complex +- ▁oncle +- ▁individuals +- ▁Cal +- ژ +- igu +- வா +- tego +- ได้ +- jan +- ▁brute +- lat +- ה +- ▁trot +- ▁verd +- ▁하나 +- رە +- вяр +- ып +- ▁romantic +- ▁flames +- ▁bun +- ▁marquis +- dies +- ▁jan +- ▁leads +- ▁ek +- ว่า +- ▁troviĝas +- ▁갈 +- enen +- ▁depart +- ▁climate +- ци +- nio +- izo +- kino +- ▁tobacco +- tege +- hör +- ▁niba +- ▁봐 +- лен +- ▁wool +- ▁commonly +- ▁seal +- ▁그러면 +- ▁clara +- qué +- ▁poi +- ▁whit +- ▁commerce +- foot +- now +- нан +- ▁contre +- ṭ +- ▁lodge +- ▁dashed +- ▁rescue +- ▁aquesta +- baza +- nant +- ння +- ▁ranks +- ▁perish +- ▁awoke +- ▁dragon +- ▁frei +- ▁cream +- ▁retain +- த்து +- ▁ganze +- ▁syn +- ▁som +- ▁پ +- ం +- cca +- ▁viele +- ▁citizen +- ▁Bru +- ▁tune +- master +- ▁data +- ▁lessons +- ▁kinder +- ▁lantern +- ▁function +- raga +- ця +- ▁warned +- ▁cigar +- ▁roused +- ു +- ▁moss +- ▁diamond +- ▁plunged +- ить +- ▁otro +- ▁teatre +- ▁pool +- ald +- lei +- ▁available +- 国 +- ▁association +- ord +- umi +- ▁mano +- ▁impatient +- ▁response +- ▁guilt +- ▁Fo +- ▁Pol +- ▁faded +- ▁lordship +- ▁gewesen +- divi +- ▁swallow +- mol +- ▁blast +- ▁parent +- ▁recover +- ▁sensitive +- ▁blows +- inda +- cl +- ▁liegt +- ▁affect +- ใน +- ▁들어 +- ŝ +- ▁independence +- ▁roots +- ▁tett +- ▁brick +- ▁income +- lets +- чын +- ▁cigarette +- ▁faut +- ▁uza +- қ +- ▁licht +- ▁visitar +- ▁pic +- ् +- ▁tendency +- ▁achter +- ▁dumb +- ▁caesar +- ▁prima +- nak +- сць +- ▁flushed +- ▁revolver +- cial +- ▁살 +- ▁distinguish +- ▁biblioteca +- ▁britain +- kem +- ▁chinese +- ▁igi +- ▁schw +- ▁fishing +- ▁eagle +- ▁cannon +- ▁apply +- ▁monster +- iest +- ▁об +- 워 +- uer +- ▁struggling +- ған +- ▁och +- ▁뭔가 +- ▁geworden +- ico +- eli +- ▁usted +- ি +- な +- ▁graceful +- ▁계속 +- ▁cour +- ▁pou +- ▁endless +- ▁covering +- rb +- ▁adventures +- ▁sue +- ▁آن +- tical +- lk +- ▁groote +- ▁induced +- ▁discourse +- ▁York +- ▁Ад +- ▁beide +- ▁painter +- handel +- ▁cad +- 해서 +- ▁Don +- ▁privilege +- ۈ +- ▁My +- ▁sunlight +- ▁til +- னை +- euse +- ия +- ▁hoping +- jj +- ▁qua +- ▁apprehension +- ▁og +- ▁displayed +- ▁glen +- ▁tut +- ▁norman +- ۇ +- ▁möchte +- ▁costume +- ▁materials +- ▁historical +- ▁byo +- ▁scha +- ▁villages +- dzi +- ▁ул +- 种 +- ▁capture +- ▁revelation +- yne +- merk +- ▁raw +- тым +- ▁surrounding +- ▁цяпер +- ▁doubtful +- стан +- ▁injury +- ▁jolly +- ▁ersten +- ▁Што +- ▁kent +- ▁feels +- ▁aliaj +- ▁contented +- ▁ту +- geben +- ால் +- اق +- ▁happily +- bonye +- esp +- ▁mutter +- ▁nahm +- ▁sunk +- kommen +- ▁mould +- ▁bullet +- ▁gross +- odo +- rina +- 好 +- uje +- uring +- ▁University +- sicht +- ë +- ▁schwer +- ▁indifference +- inde +- ▁consisted +- ▁pond +- 금 +- 西 +- run +- ▁revolt +- ▁festival +- ụ +- ▁ample +- ▁counted +- rik +- ▁mijne +- 버 +- ssel +- ▁yeux +- igita +- ول +- ▁shouting +- ▁preparations +- ein +- ▁delicious +- வே +- uren +- ىنى +- ▁printed +- case +- ▁altres +- ▁lend +- ▁mars +- gura +- 한테 +- eurs +- ▁passengers +- arbeit +- ▁englishman +- ▁Was +- ี +- phe +- ▁newspapers +- ē +- ▁dried +- ▁staat +- ▁requires +- сә +- ▁mob +- ▁hedge +- ▁mono +- pl +- قى +- ▁basis +- ▁cara +- ▁awakened +- ގެ +- ▁lent +- ▁alien +- gues +- ▁pretended +- ▁fred +- ▁anxiously +- 尔 +- ▁delle +- ▁motionless +- tete +- ▁nyuma +- esse +- ▁educated +- มา +- ▁Sal +- ▁metge +- ն +- ▁prominent +- jun +- ▁confessed +- ▁trifle +- ▁machte +- ▁guerra +- ▁ва +- ▁아이 +- oy +- ▁nda +- ▁crimson +- ▁havia +- agenda +- ▁emotions +- ▁bomb +- ▁namely +- edifici +- ▁reports +- tief +- ▁cub +- ▁peaceful +- ▁beggar +- ▁bah +- ▁걸 +- ▁combined +- ▁torment +- ▁sees +- vier +- ▁inquire +- ▁moins +- ▁attain +- meister +- ้า +- ▁streams +- hui +- ▁shed +- кан +- ▁거지 +- zem +- ▁careless +- ws +- ppe +- ят +- 下 +- ast +- ▁andrew +- ও +- mmel +- lands +- ▁sana +- ▁Zi +- ▁blade +- ▁loaded +- mere +- ▁шу +- ▁recon +- cou +- gis +- cinc +- ▁pattern +- คุณ +- ▁despite +- ▁pressing +- ▁wid +- ▁assistant +- ▁partner +- ▁joint +- ней +- ▁spin +- ▁comrades +- ▁Ac +- ▁hideous +- tail +- ▁neue +- ▁missing +- lwa +- ▁moderate +- autre +- ุ +- ▁Et +- 道 +- bridge +- ▁toil +- uro +- ▁lecture +- ▁Imana +- ▁letting +- ▁avoir +- มี +- ▁halt +- ▁passionate +- ា +- ▁curiously +- haus +- ▁inevitable +- ▁liebe +- acions +- gul +- ▁questioned +- ▁bargain +- ▁якія +- nez +- ▁vivid +- 상 +- ▁linen +- wal +- ▁frost +- ▁admirable +- ▁zeal +- ddle +- ▁Sam +- gne +- zug +- ▁вер +- eten +- ▁leisure +- usi +- ▁allein +- ▁upright +- ▁import +- ▁ashes +- ▁december +- ▁저 +- ▁velvet +- bio +- ▁colors +- ▁disciples +- ▁proposition +- mata +- ▁Cha +- ▁corridor +- ▁plays +- ▁concert +- nut +- homme +- ▁recorded +- ▁heartily +- ▁recognised +- Al +- ▁خود +- ▁arrangements +- ▁niece +- ▁roger +- aya +- ▁অ +- oi +- ▁plane +- guard +- ▁germans +- sol +- ▁isabel +- ▁без +- ел +- hill +- ür +- ▁restless +- нне +- от +- ▁amazement +- ▁Bir +- ▁cliff +- ▁liquid +- tting +- ▁disgrace +- ▁introduction +- ▁wages +- ▁irregular +- ▁bord +- ▁shy +- skin +- ▁monument +- most +- ▁neglected +- ▁darauf +- vul +- ▁fathers +- ▁waved +- အ +- ▁claimed +- ▁zit +- ▁ladder +- wyn +- ▁occasional +- rz +- ▁footsteps +- ites +- ▁spy +- ▁ал +- ▁ĉiuj +- ▁curtain +- ظ +- kat +- ▁repair +- ière +- ▁neighborhood +- ▁chu +- rand +- ▁hoog +- rzy +- ▁cho +- ▁되 +- ▁oogenblik +- ▁recalled +- ▁limp +- 我 +- ▁stake +- ▁twilight +- nsi +- ▁embrace +- hara +- ▁rei +- kna +- ▁dug +- mento +- ▁없 +- quatre +- ▁admiral +- ▁Stadt +- ▁sandy +- shya +- past +- ▁geht +- ▁lucky +- lö +- ▁cui +- ▁simplicity +- igt +- ▁pont +- 러 +- ▁followers +- ▁guessed +- ▁tranquil +- ▁hon +- ▁supplies +- ▁thief +- ▁reveal +- ché +- ▁sentiments +- fri +- xo +- ச +- ▁nka +- ▁lloc +- ▁How +- ▁regards +- ▁parole +- ▁research +- ▁Аднак +- ▁anguish +- ▁betrayed +- ▁akk +- ▁avuga +- ▁amor +- ▁harsh +- pur +- ે +- tiv +- 斯 +- ▁aya +- ▁antaŭ +- 운 +- ▁tragedy +- ▁wander +- ▁killing +- ▁dense +- pare +- ▁admired +- mate +- ▁breathe +- schrei +- eco +- reka +- rib +- vie +- ▁ministers +- ▁fortunately +- L +- hab +- hang +- tica +- ▁نمی +- ▁Santa +- ▁wheat +- ▁drift +- ол +- 식 +- tà +- ▁nobles +- ▁derived +- ▁sem +- ▁hinter +- ▁entonces +- tribu +- ▁conceived +- ▁pockets +- ▁dif +- ▁kommt +- ▁prompt +- ▁especial +- λ +- ▁inclination +- ▁avant +- asa +- ▁hawk +- ▁originally +- ▁temperature +- ▁moor +- zna +- idi +- ▁те +- дзя +- ▁tables +- ು +- sem +- amba +- ר +- ▁Que +- ▁پر +- ▁closing +- ▁canvas +- alo +- ҙе +- یت +- ▁accounts +- bble +- ▁caution +- ▁havis +- ▁canoe +- há +- ▁Cu +- ▁simon +- ▁stores +- ▁presents +- ▁अ +- ை +- ▁deceived +- ▁satan +- ▁unfortunately +- kuwa +- ▁partie +- ▁warriors +- ▁것도 +- ▁create +- ▁kal +- ▁meiner +- ▁jealousy +- ட்ட +- dru +- ▁Tre +- ▁어떻게 +- ▁voir +- хо +- ▁gallant +- ▁étaient +- ▁bells +- ▁colony +- obo +- ▁behalf +- ▁responded +- ▁weep +- bag +- rau +- ▁entitled +- ▁immortal +- ▁nerve +- ▁peasant +- oto +- ▁ruins +- raz +- ▁riches +- ▁corre +- čč +- ▁Ну +- ▁destiny +- ો +- ▁assault +- ▁statue +- ▁Anda +- ▁Ter +- ▁ج +- lings +- ▁floating +- ides +- amour +- ▁conquest +- ▁Tio +- ▁چه +- ▁cheese +- ▁convent +- lash +- ▁glasses +- ▁wollen +- ▁thirst +- дә +- ▁mole +- ▁acknowledge +- ▁qo +- ▁எ +- ▁trenta +- ▁reverence +- ян +- ▁innocence +- uye +- indi +- ▁dur +- tisch +- ▁cam +- ▁mensch +- ▁awkward +- ▁над +- ▁essay +- হ +- ▁добра +- ukuri +- ▁benshi +- ▁ripe +- ▁alexander +- voj +- ▁dabei +- ▁pleasures +- ▁surgeon +- ▁selfish +- ▁consult +- ean +- aries +- ▁painting +- ▁Amb +- ▁بی +- bl +- ▁indicate +- rom +- ▁سر +- ▁ஒரு +- zira +- ▁snake +- ▁després +- ▁wheels +- ▁certainty +- rira +- ▁robin +- 간 +- amos +- path +- ome +- ▁tête +- ▁pint +- ▁acid +- vr +- ▁expert +- ▁endeavour +- stead +- tors +- ▁radio +- rück +- ▁juffrouw +- ▁acknowledged +- tò +- ▁под +- ▁january +- bei +- ▁kri +- ху +- ▁première +- ▁obra +- ▁کے +- ▁When +- nsen +- ▁navy +- ▁rief +- ▁fires +- ▁pis +- ▁games +- ▁kant +- ▁Diumenge +- ▁garrison +- nica +- ▁plaats +- ▁vale +- ▁Tri +- ▁weit +- ▁keeps +- ▁planned +- ▁forests +- ங்க +- aine +- ▁expecting +- kam +- oma +- ▁jail +- dors +- ▁americans +- ▁vile +- ▁То +- ▁situated +- ▁journal +- ▁ibintu +- rés +- 于 +- ▁advised +- ▁sultan +- ▁plains +- bung +- ▁determine +- ▁platja +- ▁großen +- vat +- ▁raven +- ▁시 +- ▁rond +- ▁원래 +- или +- ▁Са +- ▁whither +- рад +- ▁julia +- ▁amafaranga +- ▁shower +- pho +- ▁forming +- ን +- ongo +- piece +- ▁heer +- cin +- ▁என்று +- ▁recognition +- ана +- ugh +- ▁admire +- tain +- пі +- ▁jew +- ▁Què +- ▁earthly +- ▁sla +- lud +- ▁shine +- டி +- ▁obscure +- mittel +- ▁sido +- ▁conceive +- ▁scream +- ▁retire +- vet +- ще +- ▁mutual +- ▁boss +- eld +- ▁harmony +- ▁drops +- ▁hol +- illo +- ▁convert +- ▁furnish +- lip +- ität +- ▁aroused +- ▁moeten +- ▁yose +- ▁wishing +- ▁believing +- 비 +- ash +- ▁flour +- ▁disorder +- гы +- uen +- یان +- ▁eta +- ress +- kolo +- eks +- ▁fatigue +- ▁mansion +- ▁meadow +- menye +- ▁convention +- erie +- ▁unwilling +- ▁furious +- ▁scarlet +- ▁exile +- ▁idol +- ▁reproach +- ▁있잖아 +- ▁wer +- ▁spectacle +- ▁violently +- ▁tread +- arre +- ҡа +- ngi +- ▁grava +- ▁stamp +- ick +- ▁warmth +- 들이 +- sab +- rou +- ▁ross +- ▁Г +- 르 +- ção +- The +- ▁lief +- ▁troop +- ising +- ▁Oni +- ▁exquisite +- ache +- ▁monk +- ▁mateix +- ▁verge +- ▁vilaĝo +- eus +- ▁س +- voi +- رى +- duc +- ▁가서 +- ▁warn +- rab +- ▁accompany +- ative +- ▁Eu +- ▁encouraged +- lies +- ▁whistle +- алі +- pol +- ī +- lari +- ги +- ▁bond +- ▁parting +- ▁holland +- zehn +- ▁Paul +- ▁lawn +- ▁effective +- bwira +- general +- ▁shakespeare +- ▁erect +- ▁два +- dica +- vio +- ▁nicholas +- ક +- вала +- ▁courts +- rö +- gehen +- ▁butler +- ▁sphere +- ▁pensa +- ▁league +- ▁advantages +- ▁Min +- ė +- ▁damn +- shaw +- zing +- ▁tommy +- ▁lasted +- ▁gap +- ▁satisfactory +- ି +- ▁Joan +- ▁fund +- ▁mast +- нда +- ▁tru +- ▁attempts +- uki +- fulness +- 以 +- ▁loyal +- ▁corpse +- ishi +- ás +- ▁cellar +- знач +- ▁eminent +- ▁gasped +- ▁mußte +- ▁proceeding +- ▁commune +- ב +- ▁convey +- ▁fils +- ▁uneasy +- ▁assert +- 문 +- ▁seats +- ▁während +- ▁okay +- ▁geheel +- ▁healthy +- ▁button +- ▁disaster +- ▁딱 +- ▁media +- ▁michael +- pun +- ▁demands +- hor +- ▁injured +- 피 +- 家 +- west +- ▁gerade +- ▁개 +- ่ +- ▁Bra +- krieg +- ically +- ▁ange +- shuri +- ▁records +- ▁moeder +- מ +- ▁accomplish +- ▁ellos +- ▁danced +- ▁criticism +- ▁subtle +- ▁emma +- ▁Ber +- ▁siege +- ▁vit +- ▁maj +- lage +- ▁institution +- rad +- ▁kommen +- ologia +- bles +- vä +- hung +- ▁dost +- ▁cathedral +- ▁ciudad +- ҙә +- ▁кар +- ▁sore +- ▁monday +- ▁cinq +- ▁betray +- ▁parto +- ▁rocky +- ▁breathed +- ▁symbol +- scher +- kum +- ů +- 파 +- ▁bekannt +- uɣ +- ▁oui +- ள +- sä +- مى +- ĩ +- hun +- njye +- uzi +- ▁commit +- ▁hoch +- ried +- ▁accord +- ▁이거 +- ▁porch +- ivo +- ▁feathers +- ▁motives +- ▁mixture +- ně +- ▁persuade +- ▁pose +- ան +- ▁tras +- ▁bunch +- ▁withdrew +- ▁mevrouw +- аш +- ने +- ▁gladly +- ▁interval +- 경 +- ▁twas +- sal +- ▁envy +- ĝe +- mpi +- ▁spo +- ▁усё +- 으면 +- ▁mam +- ▁poden +- ▁hesitation +- ▁rail +- liness +- ît +- рым +- ▁nova +- ▁limits +- ▁pacific +- скай +- ▁momento +- ▁formation +- rei +- ▁کنم +- ▁ninth +- ▁Bal +- ל +- gado +- estra +- ▁maken +- uel +- star +- ок +- ▁mexican +- sanga +- ▁sally +- ▁peril +- reven +- вод +- ▁yea +- ▁Sen +- uv +- ▁prosperity +- ▁Ці +- ▁chat +- пы +- fire +- ▁wont +- ▁mala +- ий +- ▁hither +- ▁대 +- ▁faster +- ▁breed +- ▁otra +- спе +- ▁compr +- سی +- hana +- ์ +- য় +- ▁marvellous +- ▁scar +- ▁rigid +- ▁polite +- sko +- bt +- буд +- ▁disagreeable +- ▁undertake +- ingham +- ży +- ▁sola +- gging +- dien +- मा +- lui +- ege +- ▁travelled +- ▁crushed +- eng +- 래 +- 매 +- ▁deserve +- ▁그래도 +- ▁lip +- ▁varied +- ▁temptation +- tari +- berry +- ▁Dr +- ▁agreement +- yl +- ▁pru +- tial +- aus +- ▁wounds +- że +- কা +- ▁이게 +- ڪ +- ▁Han +- ש +- xed +- ރު +- ско +- our +- may +- ▁있고 +- kuta +- 名 +- ▁planted +- rado +- ▁greeks +- ▁bars +- ▁whoever +- ▁akken +- chy +- tero +- ency +- ▁stare +- fun +- ▁depuis +- tá +- ▁print +- ▁designed +- κ +- ɛa +- tö +- ▁проста +- ▁gall +- 市 +- ▁treaty +- 으 +- aza +- ifa +- ▁landlord +- ▁poc +- ▁répondit +- ▁interfere +- ▁parallel +- ▁chances +- ந்து +- ▁beings +- ▁گ +- 전 +- ual +- ▁Th +- ▁jackson +- ޭ +- ले +- τ +- انی +- ▁advancing +- ▁existing +- ▁confirmed +- haft +- ▁Dissabte +- zeug +- ▁christianity +- ▁stove +- ▁app +- 계 +- ▁dropping +- ▁emily +- stig +- pens +- vern +- 점 +- ▁destined +- ▁damage +- nung +- hard +- ▁renewed +- hood +- ▁À +- tima +- äch +- ▁예 +- ango +- ▁zwar +- ற +- ▁rap +- жу +- ddy +- ▁artificial +- ▁jerry +- ▁plusieurs +- icht +- ▁West +- ▁canada +- ▁frozen +- ▁greeted +- ▁gallery +- ▁جا +- ▁gor +- ▁aveva +- 거 +- ▁faced +- ூ +- ▁elbow +- ▁comparatively +- key +- ▁discuss +- ▁prof +- ▁robe +- eke +- ▁Яны +- ▁pulling +- ▁elected +- eze +- aan +- ▁offices +- ▁kap +- rne +- ген +- iled +- ި +- ▁Kar +- ين +- lee +- নি +- ▁abode +- ▁dash +- ▁preparation +- ▁loudly +- ▁riep +- ▁Dar +- ▁miracle +- ▁reckless +- ▁restore +- ▁seves +- awa +- argent +- шу +- zak +- ▁philosopher +- iye +- ▁entertained +- borough +- ▁quest +- และ +- 동 +- ▁attractive +- graf +- ▁mig +- fus +- ▁levi +- яр +- стра +- pus +- ρ +- ▁additional +- ▁slumber +- ваў +- клад +- ias +- ▁retorted +- ▁wreck +- ▁frontier +- ▁adding +- лё +- ▁ฉัน +- ণ +- паў +- ▁lovers +- ▁тое +- aquest +- ▁stark +- твор +- gni +- ▁exists +- ▁instances +- ▁discussed +- figur +- ▁duck +- ▁он +- ▁permet +- ▁ky +- hur +- ▁todas +- ▁sailing +- ▁tenía +- ▁associated +- én +- ▁harbour +- ความ +- gio +- ▁descent +- einander +- ▁senator +- ▁nan +- ▁crest +- ▁senate +- erd +- ▁eldest +- को +- ▁pris +- nik +- ▁blieb +- ▁surprising +- μ +- ▁mère +- colo +- nc +- 다가 +- ▁lords +- 하게 +- ▁verses +- ной +- ▁drug +- ▁rejoined +- ▁dood +- އް +- quet +- ▁eventually +- ▁patch +- ▁alten +- agu +- ▁personally +- ▁punish +- ▁Den +- wij +- ▁agitation +- ▁ranch +- ▁vriend +- ▁icy +- fur +- ▁нават +- elen +- ▁fand +- ▁hoop +- ▁restaurant +- ▁passages +- ▁paradise +- ▁haus +- ▁اس +- ▁pala +- 成 +- ▁ardent +- iche +- band +- ▁cage +- ▁bulk +- ▁cinquanta +- 당 +- ▁ridge +- ▁dale +- ู +- ▁attract +- woman +- ▁tube +- bula +- ogo +- bach +- fte +- ▁tierra +- ▁Fin +- ▁ashore +- ▁altered +- ▁quien +- ▁readers +- ▁grip +- '1' +- ▁dwelt +- ▁differ +- ▁prudent +- شت +- ▁tenth +- ோ +- cult +- ▁shells +- nje +- ▁acht +- хі +- ▁critic +- ▁tlie +- ▁stond +- ▁бел +- เ +- ▁thumb +- უ +- kurikira +- 三 +- মা +- ▁pious +- ▁santa +- ▁secrets +- loj +- istes +- ▁bonnet +- ▁envelope +- chan +- цыі +- ▁sincere +- rod +- eln +- ▁glowing +- iris +- lern +- ▁Bre +- ▁Inter +- ган +- 선 +- ▁flies +- ▁dank +- ▁brutal +- ells +- ▁alt +- ▁bata +- π +- ▁beds +- bot +- press +- ▁consolation +- trag +- ibyo +- ন্ +- rika +- ń +- ▁develop +- ▁consist +- ▁drowned +- ▁Chi +- ▁scott +- ened +- ▁secretly +- ках +- Ar +- rim +- ▁fünf +- ▁madness +- ▁Ten +- ▁protested +- ▁yielded +- ▁sinking +- ▁historia +- ▁If +- ▁problems +- তা +- ▁vingt +- ▁witnesses +- 었어 +- position +- ▁protestant +- ▁Land +- part +- lied +- ▁landscape +- ▁boxes +- ▁recollection +- ▁primitive +- ск +- ▁verw +- nä +- ▁ple +- կ +- ▁instruction +- ▁pepper +- ▁quaranta +- ▁file +- pass +- ▁정 +- ▁kil +- ▁shillings +- ▁prejudice +- ▁jongen +- jwe +- reu +- 회 +- grad +- ▁kor +- kli +- 用 +- ▁parson +- ره +- bbe +- ▁سو +- ▁clar +- ٹ +- dí +- 后 +- ngira +- ▁Яна +- ▁baka +- ▁personne +- ▁chorus +- pani +- ▁longed +- ▁بىر +- mand +- ▁cake +- sty +- ▁inspiration +- ▁earliest +- ▁mou +- ▁perfume +- ▁dell +- ▁hostile +- ▁py +- ▁später +- ▁elaborate +- ▁lance +- ▁iby +- ▁rings +- shyi +- ▁twisted +- ▁clasped +- ▁spaniards +- fil +- ▁positively +- gers +- ▁yourselves +- ▁cruelty +- ধ +- ▁essence +- ▁disgust +- dit +- ▁justified +- ▁ahora +- ▁diu +- ▁replaced +- ▁sar +- guer +- ▁february +- ▁neighbors +- ▁tad +- ▁meinen +- ▁perception +- ▁One +- چى +- ▁witch +- ▁repent +- gamba +- ▁investigation +- ▁upset +- ▁inquiries +- скі +- him +- ▁packed +- ▁susan +- ▁villain +- class +- mmi +- kä +- punkt +- enza +- ▁ти +- ыт +- ▁medio +- ló +- ▁Park +- ▁disturb +- ▁rever +- ▁dave +- ▁firing +- ▁idiot +- ▁shaken +- 안 +- ▁promises +- shaka +- ▁decir +- ▁hush +- க்கு +- ▁bé +- 部 +- ▁harvest +- cun +- ▁걔 +- mili +- ਕ +- ▁хто +- ▁quando +- ▁errand +- ▁beef +- ▁consul +- ▁memories +- ▁wist +- tim +- ▁struggled +- hil +- pé +- ▁inspector +- gene +- ەی +- ▁luxury +- 最 +- ▁Ба +- ▁ausge +- ▁obviously +- ▁facing +- ían +- ▁strip +- ▁lily +- ▁வி +- пан +- ▁granda +- ▁rou +- зи +- má +- ▁complain +- hole +- ▁compliment +- ▁preceding +- ▁interpret +- arm +- ▁observations +- ▁bending +- kun +- ▁swamp +- excursió +- ▁alchemist +- ▁waving +- гән +- ரு +- fühl +- ages +- ▁fragments +- tom +- ▁몇 +- ▁laten +- anto +- ▁openly +- ▁kin +- ▁ceiling +- ▁shrill +- ▁bout +- ▁allí +- rico +- ▁realm +- gor +- ▁writes +- ▁lots +- ▁angle +- iwa +- abel +- ▁conquered +- 要 +- шә +- ▁blown +- maid +- ▁Herr +- eight +- ▁boast +- ▁arguments +- кра +- ▁arc +- ▁Divendres +- ▁judges +- ▁establish +- ▁wirklich +- kulu +- دار +- ना +- ▁boldly +- ▁blanc +- た +- ▁habia +- ▁version +- ▁jou +- ▁institutions +- ebe +- hus +- ▁resources +- ▁deer +- ▁pencil +- ül +- ▁compare +- rry +- tick +- ▁globe +- ▁warrant +- யா +- aku +- lles +- stricken +- کە +- mita +- ▁victor +- ▁shan +- tze +- atory +- ступ +- ▁customs +- জ +- hir +- 부터 +- валі +- ▁인 +- ▁threatening +- ▁neighbours +- ▁greeting +- تە +- ▁mer +- ▁dazu +- mira +- pia +- dul +- ▁grove +- ▁gesicht +- ▁È +- ▁sweat +- ಾ +- ▁grandes +- agira +- ▁gossip +- か +- ▁میں +- bari +- ▁federal +- ό +- anda +- schein +- টি +- ▁basin +- ▁hans +- ▁spr +- ▁entertainment +- tua +- ▁heroic +- ▁davon +- ▁experiences +- obu +- rê +- ര +- ▁uku +- ӓ +- ր +- ▁correspond +- ▁sickness +- ут +- luc +- ▁proceedings +- ▁internal +- тын +- riko +- ▁propos +- ▁poets +- pr +- ▁Oku +- ▁slim +- 부 +- ▁springs +- ▁pleasing +- imana +- ▁rag +- ▁dislike +- ▁abraham +- ▁barbara +- tina +- لىق +- mber +- ▁agnes +- ▁exclamation +- ▁crimes +- ▁Бо +- ▁harder +- ▁jacket +- polis +- ▁کر +- ▁blowing +- ▁observing +- ▁introduce +- 방 +- ▁molly +- لو +- ▁saxon +- ▁puede +- taka +- ▁halted +- gó +- ▁backward +- ▁sketch +- өн +- ▁juan +- ▁newly +- ▁torrent +- cà +- 히 +- leid +- ▁той +- ▁arrow +- scal +- нымі +- wl +- ön +- ois +- ыз +- ▁respects +- ▁Esta +- bour +- ▁Fue +- ▁arrange +- ▁plato +- ▁legend +- gaan +- ▁champion +- ▁beste +- し +- ▁colonies +- ▁sha +- ▁temporary +- gir +- ▁Tan +- ▁lingvo +- ▁vive +- pal +- dle +- ▁dangers +- ▁downstairs +- ▁kwenye +- roma +- medi +- anzi +- ▁answering +- ▁thanked +- chester +- ▁conference +- ▁widely +- azione +- gol +- ▁Med +- ▁tenir +- ▁industrial +- ರ +- ▁đ +- ▁faintly +- ▁tossed +- ▁wal +- ▁fru +- ▁medium +- ▁denken +- ▁spared +- مت +- deck +- ▁crazy +- ▁boom +- ▁corners +- там +- ▁학교 +- ▁dieu +- dhi +- pos +- ▁terribly +- ▁access +- ▁tho +- maal +- ▁intercourse +- ▁code +- ▁nein +- ▁attained +- ▁payment +- ▁werk +- pac +- ▁perpetual +- ▁comrade +- 事 +- unter +- ierte +- مە +- ▁tracks +- ▁checked +- ▁johnny +- ▁некалькі +- pence +- пра +- шым +- dress +- ▁roi +- ár +- ▁jonge +- bikorwa +- ▁cash +- فت +- тай +- ym +- ாக +- ▁dollar +- ▁kubera +- цэн +- ▁rom +- ▁rejoice +- ▁Mariya +- ▁mix +- mala +- ▁gaf +- ▁fortnight +- ▁crisis +- ҥ +- ▁artillery +- шча +- ▁hail +- pet +- ▁abo +- 法 +- J +- ▁шо +- ▁tiger +- fish +- ▁signor +- ▁honourable +- stellung +- avu +- elijke +- ▁horrid +- ▁terrace +- ▁seeming +- ▁tat +- oka +- stor +- ▁impatience +- 교 +- ▁bind +- ▁pad +- ▁multaj +- り +- ▁nowhere +- rug +- ▁wasted +- ▁testament +- ▁mainly +- ▁estan +- ▁boil +- ▁schnell +- ▁sim +- ▁gek +- اف +- ▁kya +- ▁companies +- ▁Che +- tif +- ▁catching +- 과 +- ▁clung +- kur +- meɣ +- ▁travers +- став +- natured +- ▁improved +- ▁Wer +- 영 +- prob +- kas +- ▁vere +- ▁vigorous +- ▁fir +- nh +- ▁timid +- も +- ▁slaughter +- yobo +- ▁neighbour +- cole +- ▁бир +- avi +- ▁lowered +- ▁highway +- ▁país +- 個 +- ▁excess +- ▁rational +- cup +- griff +- cant +- ކު +- lima +- ▁estos +- ▁severely +- ▁affectionate +- genommen +- hé +- rita +- ▁disc +- tivo +- zá +- ▁strictly +- ▁advise +- ▁pia +- ▁baza +- сти +- ▁tomorrow +- ▁solemnly +- ▁defense +- ▁kamer +- ▁background +- arme +- ▁studio +- ų +- ▁mexico +- ▁bot +- rop +- vien +- ▁link +- nek +- ком +- ▁gulf +- ▁petition +- ▁remembrance +- ▁tremble +- ària +- gga +- peri +- ▁Ob +- ▁día +- ર +- ▁Ati +- cura +- ▁unua +- ▁nut +- ▁спа +- ▁sino +- ock +- র্ +- ▁amusing +- ▁Ты +- ▁judged +- ▁Iki +- ▁goddess +- ▁seul +- ▁economic +- ▁cual +- ▁collect +- ▁ascend +- eres +- ony +- ▁stil +- ▁pomp +- komeza +- oo +- вар +- ▁kopf +- ▁scandal +- ▁Sud +- ▁ville +- ▁National +- ▁abuse +- cyo +- ▁чым +- ▁받 +- ▁defeated +- istoj +- ▁Ven +- ន +- ▁Мо +- ▁부 +- 街 +- ▁suspicious +- ▁lieu +- ▁formidable +- ▁supposing +- ▁laat +- ▁flora +- ▁먹 +- ▁boot +- orient +- ▁executed +- пи +- ▁speedily +- ▁lei +- ▁mundo +- ▁pony +- iers +- ▁먹고 +- ▁whereupon +- әй +- ் +- rada +- keeper +- ▁leicht +- ▁mouse +- ndu +- stock +- ун +- մ +- vit +- ▁slay +- aĵoj +- ▁guarded +- ئى +- ి +- ▁thor +- ▁survey +- ▁Nta +- ▁harbor +- gala +- In +- ▁machinery +- 위 +- ▁liquor +- ▁schu +- ▁generations +- ▁endlich +- از +- 天 +- 갔 +- ▁provinces +- ▁estava +- ▁prevailed +- ▁lifting +- ▁rebel +- anta +- ▁staying +- ▁holiday +- ▁Col +- ▁kiuj +- ▁años +- aud +- kiza +- ▁swim +- ▁barrier +- unda +- ▁Ch +- ▁deliberately +- foto +- ▁deposit +- ▁Auch +- ▁procure +- ▁hinder +- 'yes' +- ▁thoughtful +- ▁tempest +- ▁bloody +- tano +- oir +- ▁bolt +- ▁learnt +- open +- ▁shared +- mur +- ▁Пры +- ▁naval +- ▁vivo +- ▁ар +- ▁declined +- มัน +- ▁confirm +- वा +- ҳа +- ▁diamonds +- mw +- vê +- ▁স +- altra +- ▁confederate +- staan +- 小 +- chin +- ▁elfu +- ▁museum +- เขา +- ific +- ▁staircase +- ▁olive +- ▁hasn +- ▁jimmy +- хә +- ▁holes +- ame +- ▁unity +- ▁swell +- 物 +- ▁protected +- An +- eme +- 作 +- ▁bodily +- court +- ▁ufite +- lab +- ▁glancing +- ▁vista +- sible +- reg +- ▁wealthy +- ▁Estas +- wit +- ▁significant +- koj +- made +- ї +- pers +- ▁cable +- ▁avoided +- ▁amidst +- ▁klein +- ▁arab +- ▁marsh +- ▁willingly +- ▁thereof +- ▁dismissed +- ు +- ▁allah +- ▁winding +- ▁marvel +- boo +- iden +- cate +- sprechen +- banga +- ▁dalla +- ▁significance +- ▁groot +- ▁aboard +- bird +- ▁client +- ▁avail +- 관 +- tien +- ▁дзе +- یه +- χ +- ▁threshold +- ▁inward +- yar +- moj +- ost +- nent +- ▁helping +- ▁세 +- ▁shores +- lde +- kay +- ത +- ା +- ▁Os +- ▁kleinen +- ▁edith +- issa +- kore +- ▁swing +- meze +- ▁afforded +- ▁xwe +- ▁thankful +- gani +- ū +- ▁placing +- ▁disguise +- mica +- ▁desirable +- ▁frankly +- ▁tage +- ▁Ort +- ▁visits +- ▁famille +- ▁tenia +- ▁ragged +- gué +- ksi +- ▁deemed +- ▁declaration +- gari +- ▁dy +- gende +- ▁edi +- ե +- ▁brows +- ▁erste +- ▁celui +- ▁twi +- ▁armen +- ▁Uganda +- ▁fabric +- range +- ▁babe +- riye +- ▁sou +- ▁мае +- ▁fragte +- ▁نا +- load +- ▁kubona +- fed +- ▁autobús +- ▁ann +- ▁consented +- ▁swore +- ▁searched +- ▁ape +- pis +- ત +- ▁hearth +- ▁commands +- ▁bronze +- ▁abundant +- ▁goin +- ▁pall +- ▁headquarters +- ▁barely +- ▁wildly +- aca +- ▁devant +- ▁Europa +- ▁encountered +- ▁seconds +- main +- spec +- ▁telegram +- ▁wilson +- ім +- ▁improvement +- ެ +- ▁creep +- ਸ +- gestellt +- por +- ▁boiling +- pose +- äng +- ▁appreciate +- ▁otros +- ҙы +- ▁prend +- ordin +- bbi +- ▁olu +- ▁wanjye +- amazi +- ӑ +- maker +- ▁frederick +- itude +- ▁engineer +- gent +- оп +- ▁mme +- ▁ojos +- ▁web +- 之 +- ្ +- ▁pens +- ▁punished +- ▁frightful +- വ +- dina +- stained +- ▁después +- lag +- 소 +- ▁awe +- ▁oriental +- urt +- igheid +- ▁jeder +- ▁Über +- ▁돈 +- ense +- லை +- ▁gru +- ▁retained +- ▁neen +- ▁sens +- ▁mama +- ▁confident +- ▁cerca +- ▁debate +- ▁두 +- rok +- ▁nawe +- ▁bene +- 他 +- ▁remainder +- ▁questo +- ▁picturesque +- toria +- ▁wrist +- stellen +- ▁louise +- jye +- ▁volk +- ▁filling +- ▁offended +- ▁hearty +- ▁shallow +- ▁modo +- ▁prudence +- geni +- ▁samuel +- emba +- тары +- ▁officials +- ▁trouve +- ▁commanding +- вая +- ▁alta +- пол +- ▁walks +- dik +- illes +- რ +- sburg +- dur +- ▁мала +- kwiye +- ▁suite +- މަ +- away +- دىن +- ▁woke +- stel +- ▁natur +- ▁siempre +- ▁affections +- än +- ▁induce +- ▁어디 +- nner +- cour +- pli +- ▁таго +- pic +- ▁brains +- ड +- ▁neighbor +- ép +- 山 +- alt +- ▁Després +- يى +- ĉa +- ▁vielleicht +- ▁Та +- ▁successor +- ▁unsere +- ▁flowing +- ή +- мов +- ▁jet +- erte +- ʻ +- ▁comprehend +- ▁tempted +- ▁childish +- ñ +- ▁addressing +- kra +- 方 +- ▁management +- ▁incapable +- fang +- ▁coward +- ▁liber +- ▁conspicuous +- glass +- ھا +- ▁날 +- ▁visiting +- ▁sistema +- گی +- ▁되는 +- ता +- ▁curtains +- ▁earn +- ▁möglich +- ▁challenge +- ვ +- ▁Lia +- ▁ambassador +- ▁experiments +- ▁purely +- ▁yang +- كى +- ▁kick +- ▁thorough +- ▁heroes +- ▁klar +- ▁byinshi +- ▁нам +- ▁willen +- ▁bestowed +- ▁vya +- ▁preacher +- ▁شد +- ị +- ▁unbe +- нам +- ▁relatives +- ĕ +- ▁traces +- ▁desolate +- ĝ +- ât +- ▁diversaj +- xe +- ▁reaction +- reb +- ▁David +- ssi +- ın +- ▁familia +- entend +- रा +- ▁모 +- 去 +- ▁images +- ого +- itt +- фе +- vidi +- singa +- ▁eso +- ար +- ungs +- kü +- ▁pent +- geven +- rr +- ▁radi +- ▁bras +- ▁стра +- kara +- legen +- ▁Wal +- ▁zéro +- ά +- called +- ▁бол +- ▁jours +- піс +- ▁гу +- ▁үҙ +- ▁artistic +- zie +- ▁enjoying +- ▁reasoning +- ▁сам +- ▁wan +- eṛ +- ▁personality +- ▁deserved +- ▁ئې +- ▁occupy +- chair +- gü +- ▁Е +- ▁measured +- ▁favorable +- ▁beim +- ▁canon +- ▁Ф +- baar +- rid +- ▁harp +- dwa +- ligi +- ▁fiery +- ▁blanche +- ▁expectation +- kehr +- ▁ас +- ▁해야 +- 会 +- stin +- haired +- ▁greatness +- meter +- igung +- ▁correspondence +- rera +- жо +- ▁twin +- ▁alliance +- ▁sian +- ▁Però +- klo +- ▁bitterness +- ▁mabel +- ▁harold +- ware +- ▁snapped +- با +- ▁extensive +- ▁besser +- ▁мя +- ▁dusk +- yez +- ▁traitor +- ▁sept +- jas +- ▁geist +- ▁menos +- ▁Això +- ▁appropriate +- ▁diana +- ▁select +- ▁considerably +- ▁attorney +- ▁verk +- fahren +- ▁hind +- ▁scotch +- ▁oxford +- arch +- Vo +- ĝo +- ▁calculated +- ▁الْ +- zal +- ▁diverses +- ▁Калі +- ▁bump +- لە +- lica +- ▁policeman +- ▁seixanta +- ▁alert +- fla +- ▁carr +- mara +- 겠지 +- ▁session +- ści +- ▁ви +- teur +- ▁shrewd +- ots +- ▁Iyo +- ▁vault +- ▁sufferings +- sini +- öl +- ▁missus +- onge +- ▁uta +- ▁Dijous +- уа +- ▁Ed +- ▁manuscript +- ▁representative +- ▁پا +- ▁kie +- ▁ivan +- ▁monkey +- ▁parlour +- imwe +- som +- ▁Jean +- ▁phenomena +- kor +- ▁noche +- ▁erra +- ▁therein +- sore +- aires +- ▁amazed +- ▁servir +- لار +- halt +- ▁від +- տ +- ▁lugar +- ▁Ко +- ▁tante +- ▁behaviour +- ▁clan +- ying +- udo +- ला +- ▁Sp +- ▁seit +- scribe +- ▁revel +- ester +- ▁fright +- mura +- চ +- ▁deprived +- ▁quan +- kari +- 分 +- эн +- ▁teachers +- ▁cheerfully +- ych +- ▁prairie +- ▁constructed +- stat +- ▁observer +- dim +- ▁gravity +- 唔 +- ▁temples +- ▁bestow +- ña +- aven +- ရ +- ▁robbed +- cles +- 카 +- ▁хо +- ▁grin +- awo +- ▁whale +- ▁dishes +- ▁donne +- вед +- ▁sarah +- ▁elsie +- ▁graf +- leigh +- ▁fortress +- ▁있는데 +- ▁surroundings +- ▁гэтай +- viv +- ova +- ไ +- மா +- ת +- ▁limb +- ▁باش +- ▁terrified +- rang +- lation +- ▁Tro +- ▁parti +- ▁goal +- schuldig +- ▁tiuj +- ▁plum +- ▁dreaming +- 으니까 +- onne +- 自 +- ▁schien +- tenant +- ▁lieb +- даў +- kula +- ી +- ▁injustice +- ▁andern +- ▁remedy +- ▁saith +- pid +- ▁treasures +- рә +- reba +- ▁asia +- ▁negative +- нов +- '""' +- ▁happening +- oza +- ▁actor +- ▁frances +- cloth +- ләр +- ▁rash +- кты +- ▁matt +- ▁serving +- kiwa +- ▁impatiently +- ▁uncomfortable +- گو +- ▁races +- ▁lia +- antoj +- ▁dramatic +- ▁fiction +- ▁diversos +- ▁dew +- hod +- maga +- ▁ikintu +- ▁comte +- ▁uwe +- ω +- нні +- ▁esteem +- nah +- ▁invented +- ▁bod +- ender +- ▁rwose +- ▁unjust +- ▁atlantic +- omo +- ▁worried +- ▁draught +- ▁recollect +- kya +- ੀ +- ▁haunted +- ▁extend +- ▁Bur +- isten +- ▁parece +- point +- undi +- үү +- ▁зна +- ▁20 +- ▁throng +- ▁autres +- ▁chariot +- ▁boven +- ▁amy +- გ +- rien +- franc +- iq +- بوو +- ่า +- irwa +- ics +- ▁confi +- نے +- 강 +- ▁nick +- ander +- ▁stato +- ▁hause +- tric +- ▁bac +- ▁Tanzania +- ▁thrill +- ▁مو +- ▁hog +- ▁viz +- ▁배 +- erung +- ▁hue +- ▁cultivated +- unga +- 성 +- nti +- তে +- ▁Tamen +- ۋا +- ▁zelfs +- ization +- ▁què +- ffen +- ereza +- ▁estar +- ▁aza +- насць +- ▁aged +- هی +- ▁specially +- ▁pupil +- sanzwe +- ▁sweetness +- mission +- ▁briefly +- из +- ▁Eine +- 산 +- ▁які +- pole +- ▁angrily +- hof +- nud +- ▁ganzen +- ection +- ▁kitty +- tangira +- nette +- ▁fiercely +- ▁solche +- łem +- ▁watu +- ▁яму +- ▁Paris +- ▁France +- ▁tame +- 크 +- ▁augenblick +- 여 +- rique +- lev +- ▁eighteenth +- nem +- ▁wissen +- ▁공 +- 同 +- ▁traffic +- ▁smallest +- ▁doll +- rier +- ▁تە +- ▁negroes +- ▁hired +- ▁charlotte +- არ +- ground +- eaux +- டை +- นี้ +- ▁victims +- வை +- ▁cou +- vic +- ▁disappear +- 발 +- ▁yake +- ▁fortunes +- emp +- ▁rosy +- ▁compte +- ▁genera +- tour +- ▁barren +- 南 +- ▁invent +- wacht +- ▁пла +- ▁youthful +- شی +- tale +- ▁rider +- ▁دوست +- ▁subsequent +- ▁workers +- ▁pavement +- ▁blaze +- ▁Teil +- kaba +- െ +- bris +- neuf +- inc +- ▁бай +- сем +- ▁erected +- ▁pul +- ▁faculty +- ▁stirring +- 本 +- ▁bewildered +- ▁resemblance +- Ma +- ▁nieder +- தா +- ▁dreary +- wydd +- ▁overhead +- ▁vera +- ▁triste +- ▁чалавек +- א +- ▁steht +- ▁آیا +- ▁turkey +- ▁sabe +- ▁victoria +- ▁innumerable +- ▁London +- ▁madre +- ▁celle +- ▁commence +- glich +- ▁tribunal +- nus +- ▁swords +- chter +- ▁cargo +- kri +- ▁fain +- ▁tracta +- ▁keys +- прав +- ▁hurrying +- ▁йо +- ▁Ва +- ktor +- ▁veins +- ство +- ▁gihugu +- tekereza +- ▁unser +- ▁doubts +- ▁dessen +- ▁Gre +- ▁persona +- ▁zona +- ▁kuvuga +- ▁reserved +- ▁Many +- ▁rebellion +- ▁substantial +- neɣ +- ske +- ▁inama +- ▁кра +- ▁Yu +- oth +- ▁skirt +- bazo +- ▁heraus +- ▁complexion +- imba +- ▁City +- 区 +- act +- menya +- ać +- para +- iĝo +- lumin +- mora +- ▁stately +- ▁forbidden +- ▁axe +- otto +- asen +- sobola +- ۔ +- ▁literally +- ▁needle +- ең +- кон +- тыр +- ▁abundance +- roy +- 州 +- ▁sermon +- ▁keinen +- rán +- ▁dacht +- ▁elephant +- ▁centro +- ▁sails +- culture +- ▁uburyo +- ▁Tor +- ▁އަ +- ▁financial +- اش +- ▁yabo +- atge +- ▁doubted +- rè +- ▁buiten +- ▁tank +- ▁Neu +- ▁cooking +- ▁unit +- ▁suited +- сна +- ▁floated +- يا +- ನ +- ▁rows +- bili +- ▁hip +- کی +- ▁potatoes +- ▁cavern +- ▁fowl +- éc +- ▁halb +- rius +- ▁nancy +- nyu +- ▁Port +- ▁reste +- ▁nomo +- υ +- ▁그랬 +- tuma +- tun +- zzo +- rag +- ▁remorse +- ▁relieve +- ্যা +- ischer +- în +- isation +- ▁unnecessary +- ▁hizo +- stadt +- тур +- ນ +- ▁illusion +- ний +- arra +- ▁herbert +- ▁nuit +- ▁panic +- ▁geb +- 었 +- ▁assigned +- 新 +- ▁bits +- ▁separation +- ▁это +- bwe +- ▁accent +- ▁사람이 +- ▁bonne +- ders +- ▁거의 +- ▁steward +- ▁hurriedly +- ▁entertain +- ▁Gen +- ▁그치 +- ▁resta +- equip +- ▁quina +- ân +- 체 +- нем +- ▁skies +- uten +- ▁comb +- ▁echoed +- ▁només +- ▁bol +- ▁regularly +- nini +- ▁mute +- ▁resolve +- nine +- ▁hejuru +- ▁될 +- ▁Tie +- ▁amiable +- ▁kimwe +- enfant +- ▁tots +- ▁converted +- ▁studying +- ン +- ▁tusschen +- َّ +- ▁towers +- dus +- ата +- že +- dou +- pra +- ddwa +- praw +- ▁我 +- ▁francisco +- ▁onward +- ▁kuva +- ▁drie +- ▁ни +- ▁setanta +- ына +- ▁уз +- ▁split +- ▁Als +- enti +- loo +- rò +- ▁sire +- posa +- ▁بار +- ▁rip +- ▁gilt +- ▁iri +- ▁thyself +- ▁mah +- ▁assez +- ▁skull +- ▁preach +- ▁traveller +- ▁bills +- ▁urge +- ▁snap +- 양 +- 會 +- agit +- ▁Mil +- ▁sup +- ▁ph +- ▁remembering +- пад +- ▁poems +- بی +- ▁mucho +- beera +- ▁cautiously +- idade +- cept +- ▁보고 +- ▁rubbed +- ▁Sur +- ▁dwarf +- ▁chains +- ▁fundamental +- ▁போ +- ު +- ▁beam +- ▁invariably +- ▁woo +- ▁bernard +- раў +- equi +- ▁mnie +- 더라 +- jó +- ▁doom +- ▁shops +- liegen +- shot +- ▁crash +- uß +- гьы +- ▁같은데 +- ▁martha +- ▁compass +- ▁좋아 +- ching +- stehen +- ▁sob +- ▁tú +- ั +- ▁сама +- ▁muse +- ▁exchanged +- ulu +- نه +- ▁conf +- sea +- ▁brit +- ▁gast +- rir +- ▁miriam +- ▁Ang +- ▁convince +- ▁tenen +- ୍ +- ब +- ▁saviour +- ▁muntu +- iwe +- ▁conveyed +- ▁yiwen +- ให้ +- even +- ▁ancestors +- ▁fried +- ары +- ▁Firefox +- ▁encuentra +- innen +- ▁multe +- seven +- श +- ▁encara +- ▁jewel +- ▁ئەو +- ▁durante +- ▁elect +- ▁bands +- خت +- ▁thoughtfully +- 오 +- ▁republican +- yle +- ▁merchants +- ▁purchased +- ▁bak +- ▁trousers +- ▁tuta +- ▁courtesy +- ▁weinig +- char +- ▁superstition +- ▁vuitanta +- ▁friday +- ▁agents +- ▁chicago +- ▁tragic +- ▁awhile +- ▁span +- ▁lowest +- sili +- limi +- ▁avond +- ೆ +- ▁Gran +- 근 +- odd +- ▁invite +- ▁kas +- ▁sublime +- ▁많 +- pier +- ▁gehört +- ▁swallowed +- ▁ус +- ல்ல +- schi +- лап +- ▁answers +- ▁warrior +- ▁temperament +- ▁neces +- ▁deceive +- nwa +- ▁infinitely +- ▁ac +- ▁provision +- uche +- ▁fourteenth +- ▁dealing +- ▁alex +- ▁maxim +- ▁Nda +- لَ +- ▁opportunities +- ▁zyn +- ▁edition +- ▁salon +- 主 +- ▁unworthy +- schap +- ▁eagerness +- ▁suspicions +- ▁United +- 작 +- wali +- ▁swinging +- ▁vacant +- ▁catherine +- ▁hesitate +- ▁allem +- യ +- лет +- cada +- duka +- ▁omdat +- ayi +- ▁ber +- ▁mysteries +- ▁failing +- ezo +- ▁buffalo +- sley +- ▁pueblo +- ▁adult +- 公 +- ▁recommend +- ▁Quina +- ▁voix +- treten +- ▁incredible +- ▁wretch +- sail +- ▁video +- child +- what +- ад +- hut +- ates +- ▁hora +- ▁rero +- gos +- ▁decay +- ަ +- Imana +- ▁애들 +- கி +- ▁dio +- ▁baker +- ▁grasped +- ▁bureau +- ▁levant +- ▁unseen +- ▁estimate +- ▁alsof +- uh +- ▁맞 +- ▁charlie +- ▁tenderly +- ▁eer +- ▁tire +- ▁noranta +- organiz +- ▁insects +- ▁advocate +- 时 +- ▁جو +- ▁clergyman +- ▁petite +- ▁hecho +- baka +- ▁pagan +- ù +- ▁것 +- lect +- ▁glittering +- ▁eternity +- ▁knelt +- ▁identity +- ▁pupils +- ▁climbing +- ▁rim +- aff +- ▁jungle +- ▁Une +- ▁libro +- ▁ringing +- ▁publication +- ▁bigger +- ▁epi +- ▁sprak +- 北 +- кла +- chant +- ▁suspended +- ▁spe +- ▁연 +- လ +- ▁cats +- ▁fitting +- ▁pag +- ▁자기 +- amategeko +- ▁conclude +- ▁Mais +- шка +- ▁muscles +- linda +- 也 +- prov +- ▁dependent +- ▁screamed +- ▁hace +- ▁Vol +- ▁drum +- 来 +- ▁conce +- 特 +- ▁specific +- ▁deaf +- ▁ক +- wul +- ▁Gal +- ▁بۇ +- ▁toda +- siga +- ▁zullen +- lav +- پی +- ▁Х +- fashe +- ▁persisted +- ▁attendant +- ▁rey +- 든 +- ▁Kom +- ফ +- ▁goose +- ▁sources +- ▁swan +- ▁foster +- ▁strongest +- ▁tunnel +- fic +- iriza +- hour +- ▁އެ +- awen +- 十 +- ▁народ +- ▁mejor +- urwa +- ▁eerste +- ▁نە +- core +- ▁jewish +- ▁coffin +- ▁melted +- anno +- ▁sour +- ▁quaint +- φ +- ▁exciting +- ▁neben +- ▁River +- ▁autor +- nnes +- ▁dirt +- ▁impressions +- ▁알아 +- hle +- munt +- ▁daniel +- ▁resort +- ▁좋은 +- ▁occurrence +- ▁Jes +- ▁acquire +- зу +- ▁Pas +- ▁everlasting +- ▁rum +- bis +- ▁acute +- gekommen +- ▁vow +- moi +- ▁sheriff +- amendement +- kera +- ▁kost +- ▁adams +- ▁improve +- ▁proprietor +- ▁کە +- ▁tricks +- ▁appar +- ▁voort +- ▁witnessed +- ▁plague +- ▁clothed +- lden +- гал +- ▁rejected +- odi +- ایی +- setzen +- ▁football +- ▁dock +- ▁Bau +- 得 +- ▁awfully +- ▁chiefs +- ▁poly +- únic +- 学 +- ▁gigantic +- ▁amazing +- tado +- ▁awaiting +- ▁offers +- ibu +- ▁반 +- ▁columns +- ▁cars +- ures +- ▁Pla +- ▁notable +- اه +- ▁일단 +- ▁Peter +- ▁bella +- lma +- ▁magazine +- ▁jardin +- ▁morgan +- ▁doctors +- lick +- ▁blossom +- eja +- ▁parlor +- коў +- ▁pushing +- eww +- ruf +- ▁för +- ▁righteousness +- ન +- ▁gens +- ▁swimming +- ▁banda +- ▁banquet +- ▁ब +- gegeben +- ▁wordt +- erin +- ▁reden +- ▁4 +- ▁hasty +- ▁discharge +- مەن +- ▁Dan +- ▁fig +- nit +- ▁echter +- lice +- ▁dismay +- ▁shocked +- 심 +- ▁pobre +- 定 +- اس +- ▁ruler +- ▁komunumo +- mise +- ▁했는데 +- geza +- sinn +- ▁strife +- эт +- lichkeit +- '19' +- rous +- வு +- ▁patent +- anti +- coloured +- ▁동 +- ▁mod +- ို +- ▁bacon +- yita +- ▁clearing +- lv +- wand +- phon +- imo +- hound +- ▁ruled +- fie +- ▁Arm +- ▁gloves +- yin +- angu +- tlich +- ▁radiant +- nyarwanda +- etti +- ▁wu +- ▁arrows +- ▁explosion +- ▁volta +- heure +- elde +- ▁tub +- 용 +- brun +- ▁congregation +- ▁집 +- ▁japan +- ▁annual +- ▁무슨 +- ▁destin +- cident +- ▁ша +- ซ +- ▁gan +- ĝa +- ▁Dies +- ▁concept +- tak +- ▁yanjye +- muntu +- ▁plates +- রা +- ▁все +- ▁raison +- ũ +- ት +- ▁scheen +- ▁probability +- iens +- ĉ +- ▁Win +- esco +- sehen +- 利 +- ▁pied +- amo +- utse +- ▁effet +- szy +- ▁condemn +- igne +- ▁lodging +- ▁brood +- так +- ▁egyptian +- ▁escort +- ▁provi +- ▁Marc +- ▁subsequently +- ▁scripture +- ул +- ▁bonds +- ▁tumult +- ▁intentions +- ▁shoe +- ▁expensive +- 감 +- lp +- ▁viu +- ▁vis +- ▁rises +- ▁requested +- ▁ине +- ▁yawe +- tius +- ▁quelle +- ▁carved +- палі +- ular +- পা +- pira +- ▁pump +- লা +- ▁mourning +- ▁mule +- utu +- ough +- ▁mechanical +- ுக்கு +- ▁kir +- etto +- ▁spoon +- ▁indem +- ▁sticks +- ▁smiles +- ▁assisted +- vide +- жи +- ▁tools +- 就 +- ▁mm +- ис +- dine +- ▁winning +- giving +- ▁jug +- ▁ŝ +- ▁Bon +- illon +- ▁waiter +- ▁economy +- ▁venus +- ▁abide +- ambi +- ▁squirrel +- ▁sexual +- ▁gehen +- ▁bows +- platz +- ▁demanda +- ▁numéro +- ▁fuss +- bile +- seite +- ▁rightly +- lish +- ▁нічога +- θ +- bü +- ▁dismal +- tische +- ▁এ +- رت +- ▁hervor +- onia +- ▁vari +- ▁oldest +- ▁strive +- ▁groß +- وم +- ▁pé +- trees +- ▁conjecture +- ▁William +- ▁kubi +- ▁É +- ▁প্র +- ▁nad +- east +- ▁Tur +- рат +- ▁plough +- ▁purity +- ▁washing +- ▁Rue +- ▁seeds +- lief +- stal +- ▁compassion +- nego +- ▁fantastic +- ▁sama +- ▁kugeza +- ▁pastor +- making +- ▁decree +- ▁clad +- arn +- ▁sac +- ▁bass +- kintu +- ▁lamps +- '`' +- či +- цэ +- ▁mart +- ▁County +- ▁faults +- cular +- ▁بول +- izza +- ▁totally +- ▁owl +- kende +- বে +- pas +- volu +- тор +- ssy +- ▁Poste +- zima +- شە +- ಕ +- 천 +- ▁سا +- ▁좋 +- ▁subdued +- 水 +- ▁glacier +- ▁explore +- taf +- ழ +- anima +- ▁conquer +- ▁forgetting +- ▁verte +- າ +- lop +- ▁highness +- ▁mede +- нік +- ▁functions +- ▁stain +- ▁Dimecres +- guha +- fia +- дер +- ▁cinc +- ▁gedaan +- ▁picking +- ▁guided +- assa +- vig +- ▁einge +- ologie +- ▁dean +- ▁likeness +- ▁ужо +- 係 +- 였 +- ▁heathen +- hom +- ▁forbid +- wara +- ibindi +- ▁happier +- ▁Ak +- gger +- گا +- ales +- gaz +- él +- ▁Val +- く +- ▁latest +- cir +- ளி +- лыҡ +- kuu +- ▁cob +- اع +- насці +- 인데 +- фар +- typ +- aho +- ruk +- ▁deur +- ▁heights +- დ +- embe +- ▁iemand +- cida +- bound +- ▁hommes +- mpf +- ▁allies +- ▁Martí +- ▁High +- korera +- ▁contrived +- ▁abana +- тыя +- pil +- hundert +- ▁matthew +- ▁monstrous +- 县 +- ▁abend +- ▁hielt +- gation +- ▁revenue +- test +- ▁sunny +- ır +- قی +- estudi +- ▁Nie +- nej +- ▁holl +- ▁japanese +- cido +- ▁municipal +- unge +- ▁archbishop +- ind +- ▁examples +- ▁يا +- ▁carpenter +- ▁okwe +- ▁бул +- ыць +- ос +- ▁bereits +- ноў +- ▁cuatro +- ▁nineteenth +- рак +- ▁whe +- ▁exhibition +- ▁evolution +- ishwa +- gevoel +- ▁gezicht +- ▁dolor +- ▁Camp +- ▁douglas +- inter +- сця +- ▁algo +- ▁scared +- see +- ▁tidings +- nov +- polo +- ▁enfin +- aigua +- ▁meinem +- ▁fulfilled +- ▁persian +- цяг +- ▁alternative +- ▁thorn +- وە +- cord +- ▁நி +- ▁College +- ைய +- ▁könnte +- ▁چی +- ▁stump +- ggle +- বা +- ▁près +- ▁pleaded +- ກ +- ▁тым +- diri +- fé +- ▁extract +- ▁faculties +- esprit +- ▁voll +- ▁unnatural +- ▁시간 +- ▁gilbert +- ▁다른 +- ▁haber +- ▁particulars +- 마 +- 명 +- idos +- ▁naj +- vingt +- ▁schön +- נ +- ▁circuit +- ▁organized +- tiques +- ▁producing +- ancy +- ▁vir +- ▁representation +- 국 +- ▁unique +- ▁bidding +- tam +- ▁आ +- vind +- mag +- Q +- ▁await +- ▁ки +- ▁ele +- etzt +- garagaza +- ▁notre +- fangen +- త +- ▁bother +- cil +- ▁exhibited +- kke +- ▁rust +- nama +- аба +- ▁quadr +- igas +- ▁месца +- rust +- ▁expenses +- تم +- ▁lump +- ▁luego +- ▁prevail +- یە +- ▁annoyed +- ▁spake +- மை +- ▁savages +- ▁Fre +- ته +- ▁Vall +- ▁selection +- ▁withdraw +- ”. +- ▁despise +- inu +- rg +- ▁meadows +- aten +- puesto +- ▁stad +- enga +- ▁Ger +- ек +- ▁ҡа +- ▁dine +- ▁dreaded +- âme +- ▁ikibazo +- cic +- ▁float +- ▁crowned +- ▁Pra +- ▁odor +- stelle +- ▁marching +- жым +- ▁않 +- ▁lime +- ▁casting +- ramo +- ▁polished +- ▁akaba +- ▁Bas +- ▁dealt +- ▁emerged +- tol +- ég +- δ +- edd +- ▁Durant +- ▁ober +- rey +- ĝis +- ▁borrow +- ▁sylvia +- ▁blush +- uwe +- ▁tw +- fect +- ▁presume +- ▁hafi +- ▁humility +- ▁también +- دان +- ▁sho +- ▁parcel +- ▁clergy +- ▁agitated +- ▁fashionable +- ▁magistrate +- ▁hinaus +- شو +- ण +- ▁personage +- ▁emphasis +- ▁senior +- teko +- وان +- ▁ox +- дума +- wegen +- ▁원 +- ▁kirk +- লে +- gres +- embo +- ▁sweeping +- чыць +- ▁chef +- ▁sung +- ▁brig +- ▁seines +- ▁был +- ரா +- ▁yavuze +- ▁Beth +- ▁grade +- ▁stamped +- ▁infantry +- ▁shriek +- ▁twe +- คน +- له +- lut +- ▁constitute +- ▁sting +- ▁worldly +- ▁pearl +- ridge +- ▁hospitality +- ▁employer +- ▁cursed +- ▁vicar +- gize +- zier +- onder +- ▁balls +- lagen +- voca +- ún +- ▁hijo +- ▁plank +- liga +- ▁déjà +- ▁heures +- ▁shouts +- рын +- ▁youngest +- ▁gew +- ▁verder +- ▁bel +- rya +- ▁definition +- dag +- ەکە +- ▁successfully +- baye +- ▁mondo +- ▁massive +- аваць +- ensi +- ▁mwi +- ▁Ĉi +- ында +- ▁신 +- тү +- ▁ascertain +- ▁вось +- ите +- ▁liet +- ▁Van +- ▁relationship +- cré +- kker +- ▁writings +- acre +- dian +- hält +- ▁pang +- ▁ста +- ▁Quel +- dı +- nait +- umugabo +- ▁peg +- ▁approved +- vos +- 加 +- ▁bruce +- ▁disturbance +- ▁sentimental +- hren +- ▁차 +- det +- ▁grupo +- ▁invalid +- ured +- ▁vaguely +- ▁wonderfully +- tico +- aquesta +- shin +- ▁tyrant +- шке +- ▁roared +- ▁joyous +- ▁spreken +- ▁chart +- ▁fee +- ▁hood +- ▁blake +- কে +- ▁ese +- ▁engage +- ▁player +- hita +- ▁peak +- ▁conven +- ▁imitation +- nton +- dó +- ▁matin +- under +- ▁aunque +- ▁tightly +- ▁quant +- ▁ہو +- ▁eloquence +- το +- ▁sits +- ote +- ound +- ▁colon +- ▁في +- ▁types +- ▁asserted +- ▁fifteenth +- ▁fr +- tio +- ▁startling +- ӗ +- یں +- ڕ +- ▁dona +- ▁chicken +- ▁ĉefe +- ▁هیچ +- ▁arriving +- ▁boiled +- ▁devas +- ▁complaint +- ू +- ▁marion +- reɣ +- ▁competition +- force +- ▁antes +- କ +- ▁gris +- ▁barrel +- oph +- ▁passer +- ▁spider +- лас +- soma +- ▁luke +- скіх +- white +- ▁welfare +- ▁misschien +- ijwe +- rap +- ▁кол +- ރ +- ▁Grand +- ▁uses +- ordre +- ▁role +- ▁دارد +- ways +- ▁saloon +- چە +- jin +- ▁brigade +- führen +- ▁decline +- ▁кон +- ▁별로 +- ▁meetings +- ▁Ara +- how +- ▁terme +- ▁یه +- road +- ▁golf +- ▁pasture +- owa +- ▁greece +- ▁autre +- kauf +- ▁tas +- ▁sonst +- shinzwe +- mwa +- uter +- ▁formula +- ▁Arbeit +- ▁circular +- ▁encourage +- ▁bamu +- ум +- ▁albert +- rp +- liv +- lama +- ▁plat +- ▁scholar +- ▁examining +- роб +- ▁awaited +- ▁imperfect +- ਿ +- ▁сябе +- jana +- three +- ▁wars +- oke +- ▁cert +- ▁eerst +- ▁utterance +- ▁gus +- ▁Tot +- ▁plantation +- ▁coloured +- ▁oficial +- ▁feminine +- ▁whispering +- ▁Für +- іцца +- haga +- 행 +- egg +- ▁داد +- ▁Pen +- eby +- ▁bist +- үн +- ▁Mira +- ▁valleys +- ▁solomon +- ▁무 +- ▁Berlin +- ▁beautifully +- ▁ethel +- rama +- ▁voy +- hurst +- ected +- ▁teil +- ▁episode +- раз +- ▁trova +- licher +- 日 +- gare +- ▁foes +- tare +- ▁му +- аг +- ▁incidents +- შ +- ▁유 +- ▁tug +- ▁raft +- ▁Robert +- ие +- ▁raz +- ▁estu +- ▁patro +- oba +- ▁helmet +- ▁restrain +- 포 +- ▁wolves +- ▁strand +- ▁tropical +- ▁bild +- heden +- ▁charms +- иш +- ▁стала +- ▁exhibit +- ▁medal +- ▁lads +- ▁mineral +- ents +- nyama +- raba +- office +- ▁panel +- या +- ▁queda +- ▁mirth +- ران +- tere +- ▁luncheon +- iment +- ▁Spe +- mpe +- ▁إ +- ▁attach +- ▁sixteenth +- zir +- ▁bantu +- ава +- leta +- ▁justly +- ڵ +- ▁excursion +- ▁pear +- ▁undertaking +- ▁영화 +- uke +- just +- ɣen +- ▁eloquent +- 民 +- ís +- дом +- ▁кры +- fit +- kaj +- ▁relate +- ▁twelfth +- ▁einfach +- சு +- ▁accordance +- ▁verstand +- chang +- ▁chambre +- ▁loin +- ▁flowed +- ▁verg +- ▁genug +- ▁butcher +- ਲ +- ▁zooals +- wamu +- ▁transfer +- koko +- komen +- ▁pauvre +- ▁arribar +- ▁год +- ▁방 +- ▁Bri +- laf +- venir +- ▁stadt +- ▁spots +- ▁anthony +- ▁와 +- ί +- وو +- ▁তিনি +- шт +- ден +- gs +- ▁earned +- ▁esper +- ێک +- ▁mines +- ▁yell +- ▁roaring +- gn +- eron +- ▁exercised +- ▁environment +- ▁snatched +- teri +- ▁justify +- ▁carries +- vill +- ▁наш +- ▁endeavoured +- ▁ark +- ▁raid +- ▁kurz +- ddu +- 밖에 +- ▁juist +- ▁rascal +- ▁South +- সা +- ▁fy +- ▁Cla +- ▁oliver +- onda +- ▁factory +- ▁spreading +- ▁nigh +- ▁mustn +- 질 +- 德 +- ▁geh +- ▁재밌 +- pfa +- ▁agua +- ▁umwana +- ▁알 +- ▁Hey +- ▁wink +- ilo +- ▁fueron +- ▁echo +- ▁partir +- ▁cependant +- sze +- ▁rector +- ▁abandi +- ▁친구 +- ▁bafite +- ▁Spiel +- awe +- لم +- zze +- ▁begon +- ▁geven +- ▁resentment +- êr +- ▁வ +- ▁demon +- rre +- ▁horace +- ▁geloof +- ▁Но +- ▁travail +- ▁volumes +- ▁approval +- ▁constance +- ▁depended +- кән +- bika +- ▁donner +- ▁중 +- ▁bois +- ▁feu +- ▁soixante +- 미 +- ▁wari +- ▁tau +- meza +- তি +- ▁scrap +- kali +- lijke +- 역 +- ▁birthday +- ▁owed +- ▁thicket +- ▁trench +- валася +- ਨ +- ref +- essa +- ▁Fel +- ▁Unter +- 所 +- wang +- ▁М +- ▁expressions +- ▁дэ +- yana +- ▁kujya +- шта +- ▁soir +- ▁amuse +- ▁Quin +- ▁talents +- ▁singer +- ▁kaum +- ▁Para +- ▁stud +- ▁saa +- 두 +- ▁cells +- subi +- ध +- ▁woorden +- ▁domain +- вой +- ▁donna +- ▁coldly +- ▁conform +- ▁pluck +- meester +- ▁Ра +- ▁baga +- ▁prendre +- rire +- ▁olw +- ▁demonstration +- kto +- stück +- cock +- ▁Ser +- ▁token +- cott +- ▁charges +- ▁manufacture +- bred +- yant +- bali +- ▁llama +- ▁assertion +- ▁دارم +- kita +- üb +- ▁nchi +- ▁dresses +- nehmen +- lij +- ▁marshal +- tique +- verse +- ัน +- ▁feeding +- ▁komt +- ▁애 +- ▁foam +- ▁grau +- ▁trente +- ain +- ios +- ял +- раб +- ▁guerre +- ▁respected +- ▁ayen +- прац +- ▁fools +- temp +- တ +- ▁mädchen +- ▁dad +- ▁appearing +- waka +- ▁constru +- ▁louder +- ▁needn +- ▁recommended +- kol +- ▁pathetic +- ellen +- ▁Hari +- ▁mü +- ologist +- lement +- ▁armour +- ▁пакуль +- ▁speeches +- tron +- ▁tents +- cció +- houses +- ▁interpretation +- 与 +- arri +- كە +- osi +- ог +- vou +- ▁tribute +- ▁sheets +- tige +- champ +- മ +- spect +- ▁communicate +- ▁chambers +- ▁transform +- ▁quella +- τα +- ust +- mdash +- ▁nei +- ▁morir +- ▁gregory +- ▁ladyship +- schiff +- ▁Seg +- ▁på +- ▁Dat +- ▁North +- сан +- ▁dora +- ▁patients +- ▁мин +- ckte +- сце +- ▁decidedly +- tali +- ▁sache +- ▁fist +- utilitza +- ▁arnold +- wise +- ▁baixa +- ▁dome +- ▁Op +- koresha +- aciones +- rell +- ▁punto +- ▁nunca +- ▁triumphant +- ာ +- кер +- ▁Jahr +- ▁Цяпер +- ِي +- ▁gemacht +- fro +- ▁genoeg +- ▁tint +- ▁virtuous +- ▁creed +- களை +- ▁wits +- gress +- ▁чу +- う +- ▁tooth +- ▁preaching +- ▁continuous +- ▁jaw +- ▁Vila +- ▁urgent +- gebracht +- ▁cows +- ▁nome +- zó +- ▁wese +- ▁dura +- ▁guru +- ▁10 +- ▁confer +- ▁கு +- sum +- ▁kontraŭ +- ▁differences +- ▁chatter +- ▁Во +- ▁praying +- ▁captive +- garuka +- enta +- ▁chemical +- noj +- ▁دا +- ▁candles +- eixen +- رم +- ▁دی +- ▁pren +- ▁paths +- ži +- uve +- ▁housekeeper +- ▁brandy +- cono +- ▁jerk +- ▁abstract +- ▁General +- ▁beer +- baha +- ▁pouring +- aco +- cá +- find +- ifying +- ▁verdad +- ree +- ▁horns +- ziehen +- ▁loop +- hou +- renge +- ▁habitual +- ▁veces +- ▁richmond +- ▁riot +- ▁appealed +- ▁Yo +- ▁effected +- rle +- bora +- ▁menace +- ▁stretching +- ▁Cy +- satz +- ▁cabeza +- lled +- னி +- િ +- யில் +- ▁fills +- bby +- ▁Af +- ▁endured +- ▁divorce +- ▁impressive +- ▁пере +- ▁devote +- ▁orator +- bere +- ▁switch +- mek +- ▁commons +- power +- ▁jag +- ▁alongside +- သ +- fy +- appel +- ▁alguna +- ▁patiently +- ▁beaucoup +- ▁bleef +- ▁esther +- ▁pow +- quen +- ▁пасля +- ▁impress +- ▁treball +- ▁ambitious +- idas +- һә +- ▁mason +- ▁아직 +- ▁militar +- dorf +- tragen +- ▁thereupon +- 유 +- ▁pulse +- ▁logical +- ▁abge +- ▁rachel +- тры +- ҡо +- undu +- ▁blanket +- ▁eran +- 里 +- lly +- ect +- ▁orchard +- gero +- 理 +- ট +- ▁zat +- ▁augment +- ▁compound +- ▁تر +- ▁sustain +- gebiet +- ▁indignant +- ▁stooped +- ▁tact +- ئی +- ▁스 +- ▁submitted +- ▁discern +- ▁documents +- rish +- ase +- ▁unconsciously +- ▁travellers +- ▁Castell +- ▁Este +- ranga +- ▁yaba +- 高 +- ambo +- has +- ▁해서 +- тат +- ้ +- ▁frog +- posició +- née +- ope +- fasha +- ▁loro +- zorg +- dora +- ▁اون +- ną +- ▁yali +- ▁initial +- dog +- ▁prose +- ▁ĝis +- eḥ +- ▁wahr +- ▁favourable +- ▁serene +- cara +- ีย +- іў +- rated +- ▁Dimarts +- сла +- парт +- stern +- zte +- lated +- ศ +- zioni +- ▁bart +- ▁streak +- ▁hebrew +- ▁будуць +- ▁wanneer +- ▁dignified +- ▁whereby +- nelle +- ▁shift +- ▁ingenious +- ەت +- ▁Ку +- kaza +- ▁huit +- ▁graves +- ▁shrugged +- wee +- ▁glare +- ▁animated +- lub +- ▁côté +- langen +- kir +- ufu +- gem +- ▁stillness +- ▁hunted +- iku +- ▁authors +- ebb +- ▁música +- ▁ezi +- tures +- ▁scout +- ▁continues +- ▁adapted +- ▁gente +- ▁excite +- ▁jaroj +- ▁poble +- ▁kelkaj +- ங்கள் +- gull +- ▁ап +- dium +- ▁미 +- ▁morality +- tiu +- роў +- ▁deliberate +- ▁어떤 +- ▁nouvelle +- ▁musket +- olu +- hale +- ▁crowds +- ▁finden +- 터 +- kie +- ▁nobility +- 하니까 +- alla +- ▁pearls +- ▁cliffs +- iver +- وں +- ▁flee +- ▁Ez +- ▁willow +- ûn +- ▁deepest +- ▁sustained +- ▁stehen +- ṭṭ +- schritt +- ▁trains +- nay +- പ +- ▁tute +- یی +- ▁prosper +- ▁Az +- ▁ras +- ▁voz +- яв +- horse +- ▁Ха +- 직 +- ▁پێ +- rono +- ▁amen +- ▁sci +- bwi +- larga +- kubi +- ▁polític +- ▁présent +- ▁miller +- ▁Ankaŭ +- ▁sabbath +- ▁너가 +- ▁readiness +- дыр +- ▁frighten +- ilia +- 노 +- ▁hereafter +- ▁பு +- ▁statt +- ▁algun +- uy +- ▁oars +- ▁familie +- espera +- ស +- ▁complained +- daw +- registr +- ▁harmless +- ىسى +- ქ +- নে +- ▁suicide +- rons +- ▁komm +- kis +- кры +- ▁despised +- kus +- ▁luther +- ▁인제 +- bee +- ▁கா +- ▁carbon +- viol +- ▁eva +- lande +- ssin +- ▁bona +- ▁sealed +- chus +- न् +- ▁buttons +- sul +- paro +- ▁proves +- ▁continua +- into +- ▁strained +- ateur +- ނު +- ான் +- ▁welcomed +- ▁parla +- cchi +- leɣ +- ▁oppose +- rme +- ▁papier +- furi +- ▁ўсе +- ▁ernst +- ▁almighty +- glio +- blu +- ▁aufge +- ät +- ▁dip +- шо +- ▁Ту +- zien +- ▁Cre +- 克 +- ▁outline +- ээ +- ▁goat +- ▁artists +- ▁punch +- ▁kat +- 개 +- ▁bees +- ▁primary +- ▁mois +- ▁হ +- ▁ہے +- ▁오늘 +- ▁lhe +- нак +- زا +- kti +- મ +- ▁possibilities +- ▁cai +- rol +- ▁dernier +- ▁monks +- нд +- ▁Nya +- ސް +- ▁underneath +- ▁sympathetic +- late +- ▁superiority +- ▁rejoiced +- ੇ +- cade +- etta +- iff +- ▁convict +- then +- ▁damned +- ▁sombre +- uu +- ▁premi +- ruka +- ebla +- bier +- ▁lazy +- ▁Bor +- ▁apostle +- ▁Ant +- بر +- kreis +- ன்ற +- چا +- ▁angles +- ▁promising +- ▁embarrassment +- dell +- ▁sworn +- boden +- ▁radical +- ▁baa +- ▁gener +- ▁encouragement +- nuncia +- ▁blick +- ▁translation +- kop +- ▁zeg +- shop +- ▁violin +- اپ +- gone +- ฉัน +- 호 +- ▁activities +- ▁thirteenth +- lust +- ▁argue +- culo +- ▁stimme +- ก็ +- ▁clue +- ▁Juli +- tez +- 보다 +- ▁dispose +- ▁herz +- ▁그건 +- ▁bog +- ▁consum +- ▁Ils +- ▁rencontre +- try +- ▁leute +- بو +- 教 +- ▁herald +- فی +- sión +- ▁comfortably +- ▁señora +- tering +- ▁groupe +- cient +- ▁솔직히 +- ев +- ▁apples +- ▁babi +- flo +- ▁के +- nemen +- ▁downward +- ▁attending +- łu +- ▁wußte +- ▁slice +- ▁flashing +- উ +- ▁az +- coming +- ▁selling +- ▁conte +- ladi +- 拉 +- ಲ +- ▁비 +- ▁domina +- ▁mona +- ▁clinging +- gora +- ▁الم +- ▁hunters +- ▁distrust +- ▁kugirango +- ▁battery +- ▁fearing +- ▁holiness +- estre +- grund +- ▁dira +- ▁wambaye +- ▁observa +- ▁buli +- క +- ▁ascended +- gesetzt +- ggs +- ▁rural +- ▁mga +- ▁invasion +- ▁дә +- jem +- ▁compact +- ▁armor +- ▁creeping +- ▁fué +- эй +- 하면 +- ▁प +- ▁pleasantly +- ▁gabriel +- ▁vita +- ▁Cer +- guru +- ĝi +- 及 +- ▁preached +- ▁administra +- eto +- лай +- 왔 +- lov +- ▁dart +- ҳ +- ▁yr +- yim +- ии +- ▁honestly +- ▁realised +- ▁vrij +- ▁엄마 +- ▁workmen +- வர் +- ▁dive +- camp +- ▁sprung +- comb +- 업 +- ▁detect +- ▁cluster +- овы +- ▁practised +- zahl +- nahme +- bos +- pres +- ▁missionary +- ▁gordon +- ▁crust +- ▁reverse +- ▁breathless +- ▁logic +- ਤ +- tani +- ▁designs +- өр +- ddling +- ▁maud +- ▁saber +- ▁kye +- ▁Martin +- ▁reap +- ▁blazing +- ania +- ▁max +- ក +- ▁mujer +- ▁yella +- рен +- houd +- ▁manifestation +- ▁shaft +- て +- ▁abbey +- ▁요즘 +- 被 +- iles +- ▁Rus +- ▁mehrere +- ▁canadian +- ▁sets +- ▁lingered +- glia +- ▁weighed +- ▁iba +- ▁tamen +- ▁liable +- ▁Hier +- lein +- ▁piled +- sobanu +- ▁roughly +- ▁си +- ▁farmers +- zim +- ▁complicated +- йын +- ▁ensuite +- dder +- ▁ela +- ▁possessions +- ▁мар +- ▁languages +- 前 +- kata +- limu +- ▁attacks +- ެއް +- ▁attachment +- ▁praised +- bij +- ▁Тут +- ▁acres +- ubwi +- ▁wiped +- лык +- ▁Там +- ▁Its +- tino +- flam +- arde +- renga +- ▁schoon +- ▁petty +- yaka +- sieg +- ▁robes +- ▁Ban +- rage +- plant +- ngen +- үл +- ▁embargo +- ▁Umugabo +- ▁kati +- ред +- hart +- ▁hacia +- dol +- ▁Bro +- ▁imyaka +- ▁merciful +- ▁twentieth +- ыў +- ▁castell +- rijk +- autres +- ccion +- ▁거기서 +- ▁admission +- ның +- ▁freed +- trau +- ▁philadelphia +- ▁eigentlich +- ▁سے +- таш +- ▁wickedness +- ец +- rog +- ં +- ▁puff +- 食 +- tul +- эл +- ▁desperately +- نە +- 된 +- ▁hose +- ▁ghastly +- ▁avaient +- нат +- enge +- ▁anni +- ▁marine +- ▁Cas +- ҭа +- angwa +- meye +- нә +- ▁proclaimed +- evi +- ▁있지 +- nts +- ▁gag +- ന +- ения +- ▁ditch +- ▁frail +- ▁ebb +- رد +- rot +- zes +- ▁morris +- ու +- ▁upwards +- iser +- ▁veu +- ▁slechts +- ▁einzige +- ▁Film +- ▁reporter +- ▁йы +- ▁pap +- 티 +- ously +- vivi +- nych +- ▁wag +- ▁და +- baut +- ▁5 +- enne +- ▁tailor +- ▁yacht +- ▁tutti +- ▁exalted +- ▁frenchman +- ▁drill +- ▁soit +- ▁bunny +- üh +- ав +- sitz +- ▁دست +- таў +- bing +- ▁accuse +- ▁lame +- lucht +- ▁garment +- ències +- ▁uzi +- ▁offensive +- osos +- ▁mach +- ▁vicious +- baro +- ▁vot +- ▁piety +- emos +- nwen +- enca +- ▁flush +- tava +- ▁Ḥ +- ▁allowing +- ▁resigned +- corn +- کار +- ▁destination +- ▁stealing +- geführt +- nier +- ▁dicho +- ▁alfred +- ▁conqueror +- ▁eĉ +- ▁exploit +- ▁peer +- ▁가는 +- ▁assent +- git +- ▁goede +- ▁aye +- mouth +- year +- brid +- свет +- ▁pinch +- ▁Ay +- beck +- ▁obtaining +- bereit +- һ +- ▁consulted +- ▁retour +- ję +- 던데 +- ▁lakes +- ▁tarde +- ▁malice +- ▁Té +- ▁graham +- clav +- ▁annie +- dá +- ▁Вось +- ▁هە +- list +- ▁hield +- matic +- ▁allan +- ▁serait +- folg +- ▁genial +- ▁ankoraŭ +- ட்டு +- ▁ultimate +- spekt +- ▁muto +- ▁gwe +- ▁nabi +- ▁comprend +- ▁как +- ▁gest +- haye +- inta +- ▁visions +- ▁groaned +- ▁excellence +- ▁growled +- lha +- ▁Auto +- މ +- ▁알바 +- bber +- ught +- ▁elderly +- berries +- ▁continu +- ▁wol +- ▁tran +- ▁instructed +- py +- ▁positions +- ulation +- ▁bravely +- forme +- ▁versuch +- ▁bitte +- ▁zouden +- ర +- uɣal +- ▁strengthen +- ▁Oh +- kov +- စ +- ▁맨날 +- histoire +- ▁donkey +- lers +- ▁haut +- ▁gezegd +- ▁achieved +- ▁kure +- гор +- abord +- ▁rede +- tritt +- ▁plea +- بەر +- ▁suffice +- ▁gem +- ▁knot +- gl +- ▁vicinity +- ▁seasons +- гын +- ▁condu +- 겠다 +- ▁American +- ▁gleaming +- 度 +- ▁warmly +- ხ +- ▁asks +- ▁inheritance +- ▁dol +- ▁shawl +- ▁enfants +- ▁aṭas +- ▁mae +- ▁guitar +- ▁desirous +- ▁본 +- ▁essa +- ▁significa +- lok +- nger +- த்தை +- stieg +- ▁fiel +- ▁corresponding +- ▁enthusiastic +- ▁segui +- sell +- ▁ministry +- byi +- àn +- ▁pouvait +- тара +- chal +- ▁бит +- ▁defiance +- buch +- ▁frère +- ▁twain +- ▁perd +- ▁junge +- ▁ash +- ▁endeavor +- ▁diferents +- ▁خیلی +- kumi +- ▁tray +- ▁viene +- ação +- ▁berg +- ▁astonishing +- eller +- ▁steer +- quis +- ▁людзі +- ▁sorrows +- ▁peggy +- ▁frown +- ▁thro +- ▁continual +- ▁mbili +- ▁constable +- rup +- ▁adapt +- ▁drain +- খ +- cit +- ▁westward +- keneye +- ▁transaction +- ▁pitiful +- ▁representatives +- ▁loan +- ▁gust +- itud +- kina +- ▁weten +- ▁commonplace +- ▁recognise +- ▁grund +- guin +- ▁segon +- ▁seventeenth +- ▁russ +- mez +- ĉe +- ▁applause +- ▁quantities +- lid +- ▁tough +- தே +- ▁Ни +- ▁bishops +- ▁Sha +- ▁handful +- ▁hazard +- ▁espèce +- ▁keeper +- ▁праз +- ▁prophets +- ▁гар +- ▁wort +- ▁blankets +- gled +- pression +- ▁рабіць +- ▁curt +- ▁waarom +- ▁Afrika +- ▁oo +- ▁않아 +- لي +- ▁charley +- ▁berlin +- lah +- ▁tutto +- nard +- ▁affirm +- ▁sacrament +- ▁waking +- ▁allowance +- ▁cort +- ṛa +- ▁mum +- ▁meisje +- ▁Nel +- ▁wonders +- ▁souvent +- ▁thieves +- 내 +- 인가 +- ▁attempting +- ▁sechs +- ▁allusion +- ▁embraced +- ▁лә +- ▁pledge +- ного +- wart +- ▁ئەم +- rren +- ▁venu +- tanu +- шчы +- obli +- ▁admiring +- ▁rejoicing +- ▁lawrence +- ▁compromise +- ▁symptoms +- ▁accusation +- ▁theories +- ▁Parti +- ▁contemplation +- ▁kuza +- ▁apron +- भ +- غان +- ▁lid +- ject +- ▁bate +- ▁mantle +- زان +- schw +- ▁behaved +- rato +- ▁horseback +- ▁놀 +- ▁approve +- ▁nouveau +- ▁deine +- bourg +- produkt +- ▁creator +- ್ +- ▁meals +- ட் +- 케 +- ▁thursday +- ▁lawful +- рон +- ▁vrai +- bly +- فَ +- ເ +- ▁gent +- ▁civilized +- ▁orphan +- ▁carelessly +- ▁duc +- ▁traced +- ▁sparkling +- ▁hostess +- ▁transparent +- ▁Mag +- ▁loko +- ▁letzte +- ▁casual +- ▁darted +- ▁trumpet +- ▁african +- ▁machines +- bringen +- cated +- raw +- deel +- ري +- ▁کنید +- із +- ▁manhood +- ▁таму +- schau +- ्या +- ވަ +- ▁preĝejo +- kind +- ▁ceux +- ode +- ▁quoted +- riz +- ҟа +- ▁austria +- ▁repeating +- grave +- شن +- 외 +- ▁stuart +- duct +- vita +- ფ +- ▁frock +- ▁mira +- лаш +- cation +- imbu +- 海 +- لىك +- ▁notions +- ▁ornament +- ▁perdu +- ▁transferred +- ▁Ɛ +- ▁drifted +- ▁influences +- ▁borrowed +- ▁attraction +- Р +- мәй +- ▁spi +- ▁behave +- ▁assassin +- ▁bursting +- iy +- ▁dusty +- ▁싶어 +- ▁communicated +- ▁uri +- qq +- ▁laden +- ▁opge +- ( +- ▁skeleton +- تون +- ▁або +- ▁declaring +- ▁psalm +- ▁inscription +- дж +- dera +- கள +- ▁namen +- ▁perished +- eid +- impa +- wich +- mpli +- ▁mississippi +- ▁hanged +- ▁cog +- ▁jene +- скія +- ▁location +- ▁irre +- ހަ +- ▁meditation +- ▁sullen +- ▁보니까 +- tian +- ▁executive +- ▁spectacles +- ▁biza +- ્ +- ▁consistent +- dau +- ▁yett +- და +- ▁announcement +- ▁hop +- өл +- ▁하면 +- ▁sadness +- ." +- ში +- ▁prosperous +- ▁Wan +- сть +- مه +- ▁passe +- ді +- ▁confidential +- ▁daran +- dition +- ▁respecting +- ▁أن +- bû +- liest +- 디 +- lika +- ▁Ariko +- ▁Oli +- igna +- ▁rouge +- ▁glances +- ▁hagati +- ▁фа +- સ +- ándose +- ▁customary +- ▁adjoining +- yamba +- ▁babies +- ▁squadron +- année +- ▁wasser +- ▁இந்த +- ▁Ул +- ▁가고 +- ▁extending +- thought +- ▁distressed +- ▁sentences +- ▁setzte +- ▁evils +- scheiden +- ▁margin +- ▁returns +- ▁circles +- ▁Sy +- 文 +- கை +- god +- ▁우리가 +- plan +- аюць +- ▁вас +- ▁legion +- ▁treason +- ный +- ▁lleva +- ▁apartments +- тен +- mother +- ீ +- ▁거는 +- ▁قو +- ▁German +- iba +- ▁갔다 +- ▁publik +- ▁fam +- ▁whatsoever +- ▁warfare +- ▁plunder +- ▁plaça +- hend +- ▁errors +- ஸ் +- ▁política +- ۋە +- ▁wiser +- moni +- ▁sleepy +- ▁governed +- ▁lieve +- ▁marrying +- પ +- тәр +- wesen +- ▁essentially +- ▁milieu +- ▁weigh +- ▁chaque +- ▁includes +- ▁monta +- ▁snatch +- nci +- ▁juice +- tec +- гад +- ▁avenge +- ncy +- ▁produ +- ▁tien +- 这 +- ▁ubuzima +- ▁maître +- کردن +- kend +- ▁hunne +- sí +- 키 +- fungu +- 았 +- ј +- ▁gardener +- сту +- ▁didst +- ▁camel +- ▁salva +- ▁refined +- ▁pete +- ▁vue +- ores +- кар +- फ +- ▁lodgings +- ▁caso +- zed +- ▁meg +- ▁nacional +- won +- ▁será +- ▁balloon +- ▁quasi +- ▁delicacy +- play +- stairs +- 통 +- ▁nem +- ▁decisive +- ▁Del +- ailleurs +- 明 +- ▁dainty +- pata +- ▁maakte +- れ +- ▁hurled +- pó +- ▁Stra +- ▁pew +- 安 +- ▁Mark +- gren +- ▁aange +- ▁serves +- ▁quoi +- ▁hombres +- ▁tongues +- zera +- ▁Га +- 약 +- ▁questioning +- ▁pip +- cena +- ბ +- вым +- ▁ticket +- ▁chanced +- ▁plunge +- ▁profond +- кая +- ▁vehicle +- ▁dominion +- pad +- acco +- ▁ridicule +- ▁contributed +- ▁paces +- ▁stalk +- 길 +- ▁splendour +- uḍ +- gina +- ▁beter +- licht +- ▁ה +- rif +- ▁homeward +- Aba +- ந +- ▁avez +- ▁소 +- ▁haw +- 亚 +- ▁있을 +- ▁baggage +- ▁generosity +- ▁merits +- ▁shapes +- ▁laatste +- ▁Està +- amento +- 하지 +- spel +- aji +- ▁apostles +- ▁henceforth +- handed +- ▁defended +- ▁grit +- proc +- cry +- lou +- ▁Ɣ +- shyira +- with +- ные +- ▁liking +- acce +- straße +- issant +- ▁converse +- ▁ricevis +- กับ +- ɣur +- ▁vine +- ▁пя +- ▁corruption +- abandi +- ▁bira +- ▁persecution +- ▁phenomenon +- ▁gravel +- கு +- ▁quivering +- ▁dicht +- ▁Ag +- цый +- ▁affliction +- ▁rimwe +- ctive +- ק +- ▁gesagt +- ▁anticipated +- ▁duel +- klu +- ▁democratic +- ▁battles +- ▁Air +- 能 +- ▁bluff +- ▁gef +- cita +- зя +- ▁pension +- ▁shelf +- ▁dam +- pak +- lino +- ▁blunder +- цу +- ע +- ▁Most +- ▁argued +- ▁imposed +- ▁supernatural +- ▁umbrella +- ▁uwa +- wacu +- ▁ўз +- twara +- ▁discretion +- ▁dimly +- cents +- extrem +- lini +- ▁cosas +- jahr +- ▁hinge +- ভ +- ▁cot +- ▁eyebrows +- ▁eleanor +- ▁falsehood +- ▁daher +- ▁Mary +- defini +- amt +- laub +- ▁Ҡа +- ▁abound +- ▁grandeur +- ▁secondly +- mod +- ▁bez +- ▁ahubwo +- аваны +- ▁gene +- fes +- rca +- 에는 +- lisa +- eder +- ▁cora +- ▁trifling +- ▁neighbouring +- ▁precise +- яўляецца +- ▁obligation +- ▁litter +- ▁blik +- phone +- ▁줄 +- anyi +- ▁acaba +- ▁stray +- ▁fuel +- ▁bottles +- vey +- ▁sais +- ▁같 +- ▁resurrection +- ▁venerable +- estima +- ▁provincial +- ▁venice +- versa +- ▁만나 +- ▁consumed +- ▁translated +- ▁teu +- ނ +- nywa +- ▁cove +- ór +- ▁vegetables +- ▁attributed +- coat +- ▁hob +- ▁apparatus +- '200' +- ▁philosophers +- ▁traveling +- ▁beth +- ▁hiyo +- ▁lined +- ائ +- ▁quoth +- ▁kumenya +- ▁extravagant +- ▁harness +- zion +- ▁breach +- ▁counting +- ▁zulk +- ▁sternly +- дэ +- lec +- turo +- ▁fuhr +- кава +- ▁socrates +- ▁phase +- mä +- yaga +- ▁نیست +- ▁naught +- ▁attendants +- ▁gelijk +- ▁programa +- ▁separa +- கோ +- ▁jaws +- ▁descending +- ▁pharaoh +- ▁intensity +- riza +- тр +- rée +- aran +- ▁prolonged +- 음 +- ▁worm +- denken +- ▁technical +- dden +- дин +- ▁owners +- ▁encontr +- ▁strikes +- ▁disregard +- ▁puerta +- ondo +- ▁гэтыя +- ▁accomplishment +- mount +- ▁مع +- ▁pels +- ▁weird +- ▁fragment +- ▁jasper +- ▁sincerity +- legt +- лам +- றி +- qe +- сын +- ▁rumor +- cian +- 구나 +- wir +- ▁fum +- ▁gahunda +- ▁proudly +- cyaha +- love +- vision +- ▁pretence +- ▁whim +- ประ +- ▁maggie +- ił +- cier +- ▁буду +- arian +- ný +- ▁vy +- ▁سی +- ▁estates +- ▁kettle +- ▁stations +- ▁cough +- ▁людзей +- ▁Nun +- ▁mwana +- ▁사람들 +- ▁باید +- ranye +- ▁زۆر +- 而 +- ▁mocht +- inen +- ▁rwego +- த்தில் +- ▁Monsieur +- ▁latitude +- ▁Ji +- ▁miracles +- ▁gorgeous +- မ +- ▁closet +- ▁solely +- ▁toast +- še +- яс +- гла +- ebi +- lch +- print +- ře +- ▁кү +- zelve +- ▁partit +- ▁basa +- ▁divisions +- ▁peasants +- ▁camino +- avais +- ▁mouths +- ío +- ▁naye +- ▁mystic +- gambo +- ▁ای +- shaped +- ▁Ent +- wandel +- ็ +- ▁leagues +- ▁família +- ню +- ▁distributed +- rite +- cò +- ску +- ▁instinctively +- ▁orleans +- mid +- arms +- ▁alley +- Л +- ▁woord +- моў +- ). +- ▁energetic +- ▁terrific +- ▁사실 +- ▁그걸 +- ▁Qu +- ▁lumber +- ▁suppressed +- ▁honorable +- ▁focus +- ▁eut +- 후 +- үр +- ▁kö +- ▁reduce +- пар +- ▁schau +- ▁jahre +- 목 +- нен +- 由 +- ▁lettre +- ▁말이야 +- konstru +- ▁georgia +- ▁brightly +- раш +- ▁Мин +- ▁ça +- La +- ▁George +- ngera +- gier +- ถ +- ▁slid +- ając +- ▁consisting +- ес +- سم +- ێت +- ▁달 +- laden +- terra +- ▁circus +- ▁cotxe +- ffi +- kha +- йым +- ▁loaf +- ▁analysis +- ▁libre +- सा +- ▁bake +- ▁sports +- ▁package +- ▁buena +- ▁zimmer +- mari +- 其 +- ▁Quant +- ▁나오 +- gleich +- ▁Just +- bizi +- ▁joc +- marca +- cze +- ▁insect +- ▁paw +- rida +- ▁kennen +- च +- ▁divin +- ▁sicher +- ▁пред +- urukundo +- ▁exterior +- èr +- front +- ▁شده +- ▁doute +- cción +- nett +- ▁fireplace +- ▁interruption +- ▁тады +- ▁dearly +- ▁flatter +- ▁zaidi +- écria +- дын +- шь +- блі +- вет +- ▁domini +- ▁такі +- ▁hatch +- ▁franklin +- 세 +- 한데 +- ▁oppressed +- ▁privileges +- уп +- ▁veure +- lion +- ▁oud +- ап +- ▁tira +- makuru +- ▁tafel +- ▁clare +- ▁mothers +- ▁reins +- ▁alguns +- ▁voilà +- ▁digging +- ▁kvar +- tojn +- ▁пара +- foje +- ▁spending +- зна +- ▁Kiel +- ▁pelo +- ▁occurs +- об +- ager +- ▁objected +- ▁produces +- ▁Ал +- lton +- ▁brace +- ▁dopo +- reta +- ▁하지 +- ▁laut +- ▁systems +- lassen +- fas +- ibility +- ै +- ▁jumping +- slag +- ▁간 +- ▁helm +- rew +- ดี +- esi +- niej +- ▁merri +- blick +- фи +- ▁connu +- ▁successive +- regel +- ▁tastes +- వ +- き +- ▁pioneer +- ▁brightness +- ▁factor +- ▁summons +- лыш +- tawa +- 져 +- ▁stan +- ▁nuko +- chung +- ▁kro +- ▁scoundrel +- ▁warum +- ▁ёй +- дні +- ▁blocks +- vina +- ▁dragging +- roll +- ▁excel +- ▁kazi +- ▁treasury +- če +- ▁pill +- gelijk +- ▁sledge +- ▁skins +- quí +- thorpe +- ▁cooked +- itwa +- utsch +- ▁equality +- ▁hark +- uzu +- ▁refusal +- ▁twist +- ▁краіны +- ▁concerns +- bó +- ים +- likuwa +- उ +- ▁appreciation +- 者 +- ▁portions +- пада +- bru +- ▁excessive +- จาก +- ▁disco +- ▁products +- ▁ஏ +- ▁reis +- ▁بد +- ▁distribu +- ▁restraint +- ▁하는데 +- word +- ▁irresistible +- shima +- ▁letzten +- ▁Tal +- nym +- ▁faithfully +- ▁cyose +- ▁kamen +- ք +- ▁jeden +- ▁curve +- ▁cómo +- ▁sheer +- ▁carta +- ▁Ek +- ▁haupt +- ▁hopeful +- ▁automobile +- ▁Not +- ▁jeanne +- ▁moves +- 來 +- yitibwa +- ▁rack +- doj +- ▁tuesday +- ัง +- ▁수도 +- 날 +- aq +- ▁করে +- ▁Dilluns +- ▁caravan +- ▁spray +- ▁நா +- ança +- ▁cinco +- ▁Kandi +- 걸 +- ▁zog +- bracht +- pte +- iten +- ▁shudder +- ில் +- ▁rely +- ▁spit +- ▁simpl +- мет +- ▁jimmie +- ▁device +- fekt +- ▁bliss +- ▁ведае +- turn +- ▁germ +- iana +- 무 +- ▁neuen +- ▁leon +- ▁inventor +- igan +- ▁заб +- ык +- ▁ص +- ▁granite +- eko +- ಂ +- פ +- 合 +- ▁sands +- ▁grinned +- ▁Bel +- ▁ziel +- ▁verloren +- ▁gush +- ▁몰라 +- தை +- ▁seemingly +- kta +- ▁obedient +- ▁pedra +- ▁accurate +- ▁orden +- ▁threat +- ▁northward +- colored +- ▁groom +- fet +- ▁bags +- ramu +- ▁prote +- tangiye +- ▁cakes +- ▁এবং +- ходзіць +- 박 +- лын +- ▁zie +- ▁bridle +- ▁Ev +- ▁mock +- ▁gau +- oda +- ḥa +- ▁alto +- ▁treachery +- sun +- cina +- truc +- ம்ப +- чных +- ▁Pan +- ▁choked +- ▁Anem +- ▁illustrious +- ▁Hor +- ▁withdrawn +- ▁hermit +- ▁kurya +- ▁stubborn +- ▁visto +- به +- ▁loyalty +- ём +- faction +- ▁equipment +- ▁sohn +- pane +- manyi +- ▁wezen +- baki +- cule +- ют +- ▁sever +- ▁haughty +- ▁Two +- ▁screw +- เธอ +- wil +- ▁leaping +- ▁substitute +- ▁tavern +- ▁casi +- ния +- ▁doctrines +- ▁spoiled +- ▁Fra +- ҭ +- ▁compel +- ▁aren +- որ +- він +- ▁districts +- دم +- gine +- uq +- ▁kneeling +- ▁renew +- rma +- ▁slide +- cola +- ▁Far +- rimu +- ▁aught +- neh +- ▁nightingale +- стро +- autobús +- ▁peculiarly +- ▁doit +- ▁placid +- ▁Alle +- ▁characteristics +- ▁rotten +- ▁kicked +- ▁Те +- kuzi +- ▁trivial +- impe +- ண்ட +- ▁detected +- ▁comer +- ছে +- ▁loath +- city +- ▁sauce +- ود +- 와 +- ▁adorn +- ▁ibibazo +- ս +- әр +- ▁discharged +- ▁soothing +- vuze +- ▁trials +- ▁ми +- ▁properties +- lite +- ▁조 +- ▁Non +- ashaka +- ▁organs +- ▁pierced +- ▁stockings +- ▁tasted +- buk +- ▁crois +- rach +- ח +- ▁expressing +- ▁finn +- ▁reflections +- ▁hull +- ▁zult +- ▁편 +- ▁Dur +- ▁classic +- ▁Roman +- ▁unkind +- ▁tiam +- ▁맛있 +- ▁hymn +- ▁До +- ▁destaca +- ▁disposal +- hoo +- ▁هست +- nova +- taro +- ▁edgar +- ▁feat +- rgen +- ▁dret +- ▁strove +- כ +- 초 +- riya +- ▁ledge +- grade +- ▁بىلەن +- ppi +- ▁splash +- ▁мог +- ▁femmes +- ograph +- ▁compris +- ▁працяг +- Uni +- ▁honesty +- ▁homoj +- ▁architecture +- ый +- ▁적 +- ваецца +- ▁superintendent +- ▁overwhelmed +- ▁tonight +- ▁fiend +- ▁crab +- ▁finer +- iller +- ▁diris +- fini +- ▁Grund +- ▁annoyance +- ldi +- ▁प्र +- ▁intently +- ▁stride +- ▁Ah +- eki +- ▁cradle +- ▁preci +- ▁vede +- ▁rainbow +- enz +- ▁begann +- ▁dined +- ▁Schw +- avoir +- ▁нешта +- ▁benjamin +- 面 +- ▁dirigi +- ▁дене +- mming +- mple +- ppel +- ▁Wenn +- ▁eric +- ۋ +- ▁anterior +- ▁questa +- ▁trenches +- ▁crush +- وت +- ር +- ▁efficient +- ▁roast +- 님 +- anye +- ▁momentary +- ▁sect +- แล้ว +- ▁fitz +- ▁clubs +- ▁duly +- ▁membre +- тро +- 立 +- ▁soap +- rc +- ▁isle +- jer +- ▁akazi +- ▁baptism +- chron +- dores +- ▁salary +- ▁아니라 +- uso +- 巴 +- ▁adequate +- ▁Ass +- 싸 +- 意 +- ▁чо +- ▁campo +- tono +- pä +- ▁Plan +- ▁coil +- ▁sleeve +- ▁brengen +- ▁extension +- ▁pirates +- ુ +- rov +- ▁инде +- ▁schlecht +- labor +- gr +- ▁convenience +- ▁programme +- ▁gorge +- idades +- ▁gerald +- tausend +- ▁penalty +- مُ +- ▁பா +- fol +- dha +- 색 +- ▁gill +- ▁conductor +- ▁nails +- рг +- rima +- ▁logs +- ▁borders +- ▁misma +- ▁ghosts +- bé +- firma +- bahn +- ए +- ▁cordial +- ▁righteous +- кой +- ▁keenly +- ▁вялікі +- pple +- ▁candid +- ▁recruit +- ର +- ▁bug +- ▁isaac +- لەر +- eɛ +- chief +- ▁stepping +- wijze +- ▁тры +- médi +- ▁layer +- third +- ▁drawer +- ▁inspection +- leb +- ів +- ▁natürlich +- ▁lemon +- ▁Пасля +- lines +- ▁James +- ▁severity +- ாள் +- тка +- ▁oba +- ▁denk +- rh +- trop +- ▁polish +- ▁delayed +- ▁procured +- ▁mau +- கொண்ட +- gemaakt +- ▁oven +- dienst +- 온 +- 发 +- ▁বি +- pê +- ▁hörte +- clou +- тэр +- чым +- ▁тор +- ▁완전 +- ▁crop +- dung +- geschichte +- ▁prices +- ▁causing +- ▁darf +- ▁어제 +- orna +- 任 +- alter +- ös +- ▁nari +- 性 +- ook +- ется +- ▁chip +- och +- ▁jed +- ▁intrigue +- ▁label +- altres +- ndwa +- ▁hamlet +- ▁persist +- ▁expedient +- bita +- fertig +- hawe +- grand +- ▁wisely +- ▁byiza +- ▁lighter +- ▁potent +- ▁carrera +- ▁dagger +- posta +- ▁patriot +- ▁intolerable +- ▁그래가지고 +- ▁vexed +- tini +- ▁naam +- tempe +- ▁sham +- ▁Jahre +- ▁weariness +- ▁countrymen +- ▁elevation +- ▁vegetable +- еи +- aged +- ▁했어 +- яць +- remo +- kni +- ▁Kinder +- ▁antwoord +- ваюць +- ▁ekster +- ▁다시 +- ▁ҡара +- ▁ribbon +- eter +- ▁bred +- ▁realise +- ▁poll +- ▁numero +- ение +- ▁Seine +- ख +- lett +- fond +- genomen +- ▁storms +- trat +- 했어 +- што +- ▁groan +- င် +- chos +- мін +- ▁neb +- ▁taxes +- llar +- kiye +- ▁socialist +- ▁verdict +- ▁Kwa +- ▁influenced +- ▁receipt +- ▁forthwith +- ▁clark +- ▁satin +- kombe +- ▁eleventh +- ancia +- ▁адна +- itch +- ▁schooner +- bh +- aquell +- gab +- ▁sway +- ▁wednesday +- 보고 +- ▁attendance +- ▁ру +- ัก +- ▁denen +- ▁roland +- ▁traditions +- مل +- ▁visage +- ▁shattered +- ambu +- ▁jenny +- bou +- ▁spectators +- ▁novels +- had +- ▁reprit +- ▁indulgence +- ▁philosophical +- ▁soldat +- iras +- ▁entry +- 体 +- ▁wales +- raad +- fran +- ▁connect +- ▁drunken +- bett +- ▁unexpectedly +- ▁battalion +- ާ +- ▁zijt +- ▁locks +- ppy +- ▁Nova +- ▁devils +- 싶 +- ▁пре +- ▁sneer +- ▁relating +- itz +- ▁pickwick +- ▁resulted +- ▁général +- ▁blijven +- cula +- న +- menti +- вацца +- ▁brad +- ▁wawe +- መ +- ▁schöne +- ▁Ces +- زن +- ▁embarrassed +- ▁pant +- ▁genau +- வர +- هُ +- ▁finance +- ▁sacrifices +- ▁altre +- ▁figura +- het +- abana +- ▁입 +- ▁hasten +- аецца +- ▁panting +- ubwo +- ດ +- ▁foremost +- mpel +- ▁chum +- ▁noisy +- ünde +- ▁babiri +- ▁kutoka +- ▁먹어 +- fik +- ▁submission +- ▁depression +- dolph +- denial +- pè +- activ +- ▁inhabited +- ▁상 +- ▁exemple +- rock +- ▁bil +- eria +- wall +- ▁forgiven +- ▁성 +- ವ +- cato +- ▁droit +- imiento +- ▁Dy +- だ +- badde +- ▁periods +- ▁fluid +- ▁peine +- ▁Mat +- ark +- пам +- ▁grievous +- ▁pry +- Est +- ▁breaks +- ▁athos +- 属 +- nies +- ▁wagen +- ކަ +- ▁imprisonment +- ▁tearing +- ީ +- ▁habla +- ▁stumbled +- ▁fancies +- ▁specimen +- ▁davant +- ▁submarine +- ▁blessings +- uwa +- ▁emergency +- ▁refrain +- ▁kort +- eira +- ▁galley +- ▁med +- ▁grieved +- last +- ▁shadowy +- ▁figli +- ▁distin +- ▁subjected +- தான் +- ▁pursuing +- 올 +- ▁Muri +- ▁볼 +- ▁identical +- یک +- ▁grab +- 해가지고 +- ▁cups +- ▁lydia +- ▁mieux +- ▁target +- ▁assign +- ▁equivalent +- ▁bullets +- ない +- ▁buying +- wright +- ▁riu +- ▁torch +- ▁massa +- ▁phrases +- ▁dachte +- ▁breadth +- ▁fairies +- ފަ +- school +- lette +- রে +- ▁rumour +- ▁antwortete +- ▁shrank +- ▁communion +- ▁û +- ▁unite +- شک +- ▁geben +- მა +- 차 +- ▁distribution +- ▁insane +- ▁darkened +- ერ +- ▁nabo +- ▁cylinder +- ▁ultimately +- ючы +- ▁mound +- ▁cedar +- ▁Australia +- ständig +- ▁uncommon +- ▁transformed +- ▁gasp +- மே +- bala +- öpf +- ▁venir +- ▁dienst +- lacht +- hot +- 린 +- cía +- ▁들어가 +- ▁appearances +- ▁striving +- ▁bounds +- ▁così +- ▁ок +- ▁vois +- ▁junior +- ▁crisp +- ង +- xes +- ▁micro +- ▁poles +- ▁crooked +- ▁circum +- dron +- reden +- ▁hanze +- ▁currently +- ▁bland +- вал +- 갖고 +- bril +- ▁wengi +- ▁rapport +- ▁خوب +- ▁роз +- voll +- schrift +- ▁spark +- ▁alia +- ▁reed +- ▁hounds +- iciro +- kraft +- ▁colonial +- ▁manifested +- ▁tiel +- ნ +- ▁kaiser +- ވެ +- hura +- ▁llarg +- abyo +- ▁ravine +- ▁partially +- ▁lark +- ▁falta +- ▁மா +- ▁plume +- ▁feverish +- ▁noir +- elte +- ąc +- giu +- ▁rwo +- ▁faisait +- ım +- нең +- ▁reaches +- ▁ibikorwa +- igh +- ாய் +- ▁gesp +- ▁damsel +- ▁адказ +- ▁superb +- ▁Well +- ▁Dia +- dini +- ▁oxen +- 现 +- ould +- ▁zacht +- ▁etwa +- 力 +- ▁lacking +- ▁southward +- ▁furiously +- uo +- дна +- ▁victorious +- siz +- ydi +- ▁sollten +- ▁reject +- ▁arte +- aña +- чна +- ▁lever +- ▁petits +- ▁Las +- ▁impamvu +- ▁phantom +- ▁술 +- ות +- ▁mourn +- ▁shades +- rino +- ▁Actualment +- ▁explaining +- ▁quote +- ▁processes +- ▁divert +- ▁iom +- 代 +- face +- ▁uli +- ▁tornar +- ▁constitutional +- ▁engines +- пры +- ▁rubber +- ▁bron +- tischen +- ▁już +- ▁pyramid +- bwenge +- ▁مح +- ыс +- ▁lewis +- ▁furnace +- ▁mugihe +- ▁gekomen +- ▁stripped +- हा +- ▁banker +- ▁edel +- kia +- ▁stress +- ▁verde +- ▁himmel +- ▁indulge +- ▁coral +- 울 +- ▁shrine +- trice +- ▁manda +- ▁rounded +- sept +- ▁profess +- ▁vraag +- ▁foliage +- ▁extremity +- ▁gaining +- “. +- ▁ornaments +- ▁conversion +- ▁nap +- 있 +- ▁tisa +- mov +- ▁byari +- ▁dentro +- jou +- ▁выступ +- ▁robbers +- aquestes +- ▁Mac +- ან +- bund +- ▁ekzistas +- ▁contradict +- empre +- ▁joys +- ހު +- tada +- ▁preference +- yor +- эш +- ▁plötzlich +- ▁assuredly +- ▁legislature +- ▁clam +- ▁hof +- ▁obstacle +- nyura +- ▁wald +- ▁outfit +- vad +- formed +- ▁roam +- ▁earthquake +- ▁rags +- ▁sanction +- tang +- ▁staggered +- ogen +- ▁defined +- ▁Hol +- ▁interposed +- ▁sara +- ▁gezien +- ▁seule +- ▁covers +- ▁pest +- ▁wakati +- 프 +- arte +- ▁confounded +- ▁settlers +- ▁wondrous +- ▁obstinate +- ция +- ▁conventional +- gesellschaft +- ▁ہے۔ +- rere +- след +- ▁darüber +- 하면은 +- ▁contradiction +- ▁봤 +- ▁travels +- ま +- ▁horsemen +- refu +- ▁lions +- wd +- ▁Cel +- ▁basil +- ▁deposited +- ▁negoci +- 복 +- ▁sped +- fanya +- sted +- 嘅 +- 次 +- ▁davis +- ▁ropes +- ▁führte +- դ +- ▁archer +- ▁ĉiu +- ition +- ่ง +- ▁flint +- ▁repentance +- 하면서 +- gisha +- ▁Star +- ▁konata +- হা +- embre +- ров +- ▁temporal +- 추 +- ▁twinkle +- ▁fertile +- ▁suck +- ▁urban +- ▁hats +- 준 +- եր +- tou +- ▁assented +- ▁maka +- пля +- ▁indispensable +- ▁Perezida +- ▁begging +- cado +- chamber +- aid +- ▁rubbing +- ▁blossoms +- ▁swam +- bian +- ▁robinson +- aime +- ▁loneliness +- kuri +- ▁areas +- ▁reside +- folge +- 外 +- ▁gaat +- hér +- ▁announce +- ▁imprisoned +- ütz +- ▁gingen +- ▁seizing +- ▁discoveries +- ▁sec +- фа +- ▁uneasiness +- ▁зусім +- ▁guten +- ▁rattle +- ின் +- პ +- ▁gesehen +- ▁Yesu +- ▁romp +- ▁아니면 +- ▁tiun +- ▁afar +- دن +- ▁tener +- ▁touches +- ▁strings +- ▁forgiveness +- ▁courtyard +- ▁reigned +- ıl +- racy +- је +- rara +- hö +- ▁Charles +- yinza +- ▁survive +- ▁зараз +- ▁physic +- بِ +- тып +- read +- vý +- ▁suspense +- ▁aloft +- ште +- кала +- ▁سەر +- emu +- ▁kunst +- ▁trunks +- ▁있었 +- ਹ +- β +- нікі +- 過 +- ▁beams +- ▁concentrated +- ▁costly +- ▁plead +- ോ +- ▁remnant +- krank +- ▁wereld +- ▁Wy +- kamp +- ▁inspire +- ène +- body +- ئو +- leu +- ▁trim +- ▁robbery +- chas +- ▁discussing +- ฟ +- ▁mentre +- ▁hoarse +- ▁freight +- ▁جي +- зда +- tual +- ▁Nur +- neu +- ▁organism +- ▁determina +- ▁дзень +- ▁edges +- ▁قىل +- ▁void +- rana +- ▁sacrificed +- ▁advertisement +- ▁players +- komeje +- ▁repaired +- скага +- île +- ▁adorned +- ▁bored +- ткі +- gabe +- ▁statements +- ▁holmes +- ▁이러 +- ▁intimacy +- crea +- ▁disdain +- 하다 +- ehrt +- pá +- ▁kara +- вых +- ▁penetrate +- mac +- ▁cac +- ▁και +- داری +- trä +- ▁objections +- ▁clatter +- antic +- anche +- ▁vocal +- ▁repu +- lateral +- ▁adventurer +- ▁مە +- ▁cecilia +- 재 +- kala +- ▁desp +- சா +- ▁yw +- schlag +- yama +- ▁siaj +- ု +- gelegen +- மாக +- ▁doomed +- ▁crude +- ▁verme +- ▁rejoin +- dow +- ▁waistcoat +- ێکی +- eqq +- ทํา +- mies +- ▁World +- ًا +- ▁سال +- issait +- ▁softened +- ான +- ŭ +- ▁vainly +- cip +- ▁dafür +- 면서 +- ▁grimly +- ▁partake +- ▁públic +- чак +- klop +- нос +- 位 +- ▁hardy +- ▁خواهم +- ▁ernest +- ▁Ny +- assi +- ▁خا +- ▁pike +- ▁kitten +- ▁crawled +- ▁ami +- ▁conclusions +- 가지구 +- ▁igual +- ▁nursery +- ▁steeds +- ▁pipes +- ggi +- ▁Pal +- ▁wagons +- ▁cured +- ncia +- kanya +- legi +- mill +- ▁pleading +- ▁과 +- ▁inyuma +- morph +- ម +- 짜 +- 급 +- ▁quarante +- ▁barber +- ▁slopes +- ▁llibre +- tli +- tah +- ▁freund +- ▁presenta +- ▁abe +- ▁greet +- ▁hinein +- વ +- ▁splendor +- ▁remarkably +- 었는데 +- ▁bucket +- ▁flot +- ▁zooveel +- ▁flows +- kubwa +- ▁neighboring +- ▁የ +- ▁painfully +- ▁zer +- ▁recur +- ▁jaar +- ▁molts +- ▁muerte +- impo +- ス +- ބަ +- ▁persones +- ▁offend +- ▁akka +- acord +- ▁legitimate +- ▁migra +- combe +- ние +- 력 +- кт +- ▁subordinate +- ▁Are +- ▁cherished +- nzo +- ▁engel +- ▁rye +- ▁né +- ▁elevated +- mula +- ▁valiant +- ▁кор +- ▁funds +- muka +- ▁signifas +- ▁scenery +- escu +- quel +- ▁lá +- ▁ĉirkaŭ +- ▁sobbing +- ▁Leta +- ▁summon +- тоў +- ▁coolly +- mena +- ▁proclamation +- ▁Haupt +- ▁signature +- ▁preceded +- ▁marius +- pio +- tika +- ▁vegades +- ▁bazi +- ▁bribe +- thu +- ▁Europe +- ▁springing +- ▁recovery +- ▁kissing +- going +- ▁sinner +- ▁cautious +- rist +- md +- ▁yielding +- ც +- ▁beech +- cious +- ▁êtes +- ▁carne +- ▁paragraph +- тура +- ▁fosse +- ▁instal +- ▁completa +- aves +- वि +- ▁guarantee +- ▁click +- ▁fos +- ▁lawyers +- zähl +- మ +- ▁때문에 +- ▁rugged +- igihugu +- ▁esa +- nął +- ▁bati +- ▁tourist +- sima +- 時 +- ▁чаго +- ▁garb +- bwiye +- ▁controlled +- zungu +- ▁unusually +- schloß +- ▁watchful +- ▁caps +- ▁boards +- ▁dua +- ▁respective +- фор +- ▁cinquante +- ▁abrupt +- थ +- nomen +- ▁passenger +- ▁zitten +- ▁fearless +- ▁món +- ▁herrn +- ▁nit +- gericht +- wadde +- ▁tumbled +- ▁tanta +- igte +- iere +- organ +- ▁Then +- ▁typical +- ▁possesses +- ▁definitely +- ▁tama +- ismus +- bald +- ▁Iyi +- mbere +- rut +- ▁salute +- 入 +- volle +- ▁seis +- ▁lava +- ▁aujourd +- ▁staan +- ▁refusing +- реш +- ▁fathom +- houden +- ▁공부 +- 四 +- ▁majestic +- chie +- plu +- ▁sólo +- ▁specimens +- ▁instincts +- ▁reckoned +- ▁이번에 +- ▁highland +- ßen +- ▁Ale +- ▁Tour +- ▁knocking +- ▁wallace +- ▁hector +- ▁mwiza +- ▁intensely +- ▁resume +- ▁stormy +- ▁corporal +- ▁otras +- ▁sensations +- ▁pitt +- ▁lament +- ▁paddle +- ▁convi +- ▁você +- ▁tyranny +- ▁gaily +- тыш +- ▁isolated +- impose +- ▁maintenant +- ▁cupboard +- ▁hir +- ▁nod +- ▁regal +- ▁befindet +- ▁removal +- ▁lw +- 이나 +- جی +- ält +- illas +- ▁ang +- vra +- 站 +- ▁여기 +- rmi +- ▁regent +- ▁telèfon +- ▁ivory +- ▁abbot +- ▁beforehand +- nac +- schaffen +- ▁Bay +- ▁되는데 +- әү +- ▁kisses +- مَ +- ▁кара +- ccio +- ▁disk +- ▁lapse +- য়ে +- wandi +- ▁hii +- ▁chronicle +- baho +- زى +- ▁babylon +- ▁chancellor +- jur +- мовы +- achtig +- ▁proofs +- mile +- ▁survived +- کو +- amara +- меж +- ▁speck +- ▁welcher +- йшлі +- gegangen +- ▁shuddered +- lb +- ▁telescope +- ▁marque +- ▁кил +- ▁ibya +- ▁backwards +- name +- ▁proclaim +- gali +- ▁earnestness +- labora +- stoff +- ▁corrupt +- ▁rusange +- fach +- ▁shrink +- ▁luz +- ▁лепш +- druck +- ▁uncertainty +- ▁herds +- umukobwa +- ▁cricket +- ▁shabby +- pion +- لِ +- ▁تۆ +- suit +- cart +- دو +- ▁bana +- ▁overwhelming +- ▁resign +- ▁estado +- beth +- ▁früh +- ▁amaso +- գ +- rec +- ▁moto +- ▁Бер +- ▁resignation +- ules +- ิน +- ▁dama +- origine +- ▁rapidity +- kono +- ▁modesty +- ▁права +- вання +- ▁array +- ▁partial +- ▁사람들이 +- ▁debts +- garten +- ▁Wat +- ▁prepara +- gend +- ▁bleeding +- ▁inflicted +- ▁Pere +- ▁angst +- hne +- ▁muito +- щи +- église +- ▁exclusive +- зо +- او +- kee +- iron +- дра +- ▁longue +- 士 +- ▁shaped +- spirited +- ▁exercises +- ▁sincerely +- িক +- ▁mà +- вялі +- ▁amateur +- ыч +- ▁hudson +- alina +- ▁slew +- koze +- 그 +- енә +- ▁glück +- ën +- ▁pep +- ใจ +- ▁erhalten +- ▁puritan +- ▁제일 +- ▁cling +- ▁Us +- ▁쓰 +- halb +- ending +- ышт +- อย่าง +- ▁decidi +- almente +- से +- stellt +- ▁fino +- ▁Jen +- ▁тип +- introdu +- ▁cris +- ▁scholars +- قان +- mada +- ntes +- ▁occhi +- ▁mogelijk +- stunde +- ▁Other +- گەن +- ▁perilous +- ▁그렇 +- zam +- ▁igihugu +- رس +- qqim +- tempered +- ▁protector +- あ +- ▁sembla +- ▁loft +- ▁nag +- ▁Sub +- ▁명 +- ▁ankle +- ▁kabiri +- ▁biscuit +- ▁diameter +- ▁vapor +- ckt +- қәа +- zette +- ▁presque +- ່ +- ▁grotesque +- ▁mules +- ▁architect +- ▁packet +- ▁rol +- ▁пі +- ▁Dis +- ▁predi +- ▁algunos +- ▁ваш +- ▁specio +- ▁habían +- sang +- ▁caroline +- équipe +- ▁May +- αν +- ▁lone +- 씩 +- ▁deem +- кө +- ▁meisten +- tib +- écri +- ▁heißt +- лт +- ▁его +- ತ +- ▁wien +- angle +- ▁volas +- ▁Mc +- ▁eastward +- 东 +- ▁kru +- ▁smote +- baix +- ться +- ▁roofs +- ▁strode +- 心 +- ▁dreadfully +- год +- ▁conspiracy +- mato +- ▁wegen +- ▁augustus +- аль +- tide +- ▁fishes +- ▁mature +- ▁East +- 二 +- छ +- preservation +- ەکان +- ece +- ԥ +- ▁Josep +- ść +- нуў +- dauer +- ▁nervously +- ▁divinity +- ▁часу +- rack +- ▁mug +- ▁regiono +- ▁rebels +- ▁essere +- yat +- ▁viola +- ▁vino +- ▁endowed +- ▁torre +- ▁deserves +- jí +- ▁ending +- ▁cœur +- ▁tops +- ▁beware +- ▁sections +- ▁fiddle +- ifies +- nian +- ▁gele +- ▁inasmuch +- ▁espi +- ▁ubwa +- gada +- ▁planning +- 此 +- ▁বা +- ▁kindred +- лым +- ▁interference +- ▁heaps +- пера +- ▁brachte +- ▁hesitating +- 相 +- ▁pitched +- ▁bounded +- ▁usage +- ▁wizard +- ▁governess +- sinzi +- lernejo +- mbra +- ▁kinderen +- ▁deceased +- 등 +- ▁biggest +- ▁flaming +- ▁ammunition +- ▁Louis +- ▁pea +- ▁schemes +- 레 +- ▁ceremonies +- ▁petr +- ▁neutral +- rong +- ▁viva +- ▁فر +- ▁zaak +- jyana +- bearer +- ▁passar +- gwe +- ▁envoy +- garagara +- ▁Су +- ▁smoked +- ▁걔가 +- ▁laboris +- มาก +- 属下的一个 +- 되 +- ▁folds +- ▁elk +- 빠 +- ▁representing +- ד +- ▁sentinel +- ▁King +- લ +- ▁curled +- ▁Eki +- ез +- ▁catastrophe +- ▁왜냐면 +- tili +- ▁bamwe +- กัน +- หา +- ▁undertaken +- hmm +- ▁Ol +- лег +- 집 +- ▁indien +- ▁English +- ▁infernal +- ▁Kur +- sió +- ▁parade +- mobil +- amerika +- ▁matches +- gelegt +- ▁여 +- Ba +- ▁creu +- uwen +- ulated +- ▁hire +- ▁competi +- cco +- ▁boughs +- vies +- nywe +- 因 +- ▁rifles +- vera +- ▁heed +- ▁اور +- ▁aha +- tics +- ▁podía +- ▁lieber +- ▁یا +- тал +- ▁tram +- ▁junto +- ▁demi +- ▁wichtig +- ং +- ވެސް +- ▁camera +- ▁sheltered +- ▁jud +- ▁saß +- ▁massacre +- ▁fisher +- ttes +- ▁bijna +- лер +- нер +- ▁wand +- ▁troy +- ▁curs +- ▁yankee +- स् +- ▁далей +- ▁boarding +- ▁chocolate +- ▁antonio +- 적으로 +- гар +- mania +- 手 +- tsinda +- escola +- ▁strengthened +- ▁Jun +- ▁perceiving +- ▁necklace +- ▁furent +- من +- ▁underground +- ▁lavish +- аваў +- дык +- ▁nuestra +- ▁exceeding +- leh +- ▁evenings +- ▁nosotros +- mí +- ▁importante +- ▁represents +- ▁vieux +- ▁ohio +- цаў +- ▁rebuke +- ліва +- ▁Spa +- nous +- tuur +- unu +- ▁imbaraga +- ▁너는 +- 주고 +- мага +- ስ +- flower +- ▁wig +- еит +- ▁precipice +- rose +- ▁sunrise +- іс +- ▁inherited +- ▁ஒ +- ▁sogar +- worn +- ▁continuing +- ▁lungs +- সে +- წ +- 코 +- ▁perché +- ▁Schul +- ▁ferry +- ▁burial +- spann +- ▁rulers +- ▁relacion +- ▁gallop +- ▁promote +- говор +- ▁imana +- oest +- tanto +- аю +- ▁früher +- ▁plateau +- ▁gradual +- ▁정말 +- ▁stool +- kamer +- ▁adieu +- ▁pots +- szt +- ▁melt +- ▁amigo +- ษ +- ▁του +- ▁слова +- іць +- ▁expressive +- ▁pir +- ▁mall +- stick +- ▁zon +- выя +- ать +- ύ +- ▁나가 +- ▁charter +- اً +- ▁cass +- ké +- ambul +- ▁speculation +- lug +- ▁occasioned +- ्य +- cible +- ifi +- ▁trait +- ▁burns +- ▁alteration +- ▁dagen +- ▁taylor +- ிற +- ▁passive +- ▁план +- ▁inflict +- ▁apology +- ▁portal +- ▁peel +- kozi +- ciu +- univers +- ▁갑자기 +- ruḥ +- ▁plump +- ▁kennedy +- ▁wert +- ši +- ▁rapture +- 台 +- ▁judging +- ▁donné +- ም +- uzuye +- ▁habitation +- ▁bower +- ▁vein +- ▁wunder +- ிருந்த +- ▁snug +- ▁Sein +- ▁revived +- ▁handen +- ▁garde +- hali +- ▁whistling +- ▁sobbed +- guye +- ▁nemen +- ▁copies +- ▁banner +- esser +- மி +- ▁critics +- ▁Hall +- люб +- ڈ +- ▁perplexed +- 청 +- ▁mechanism +- ▁mio +- ▁nuts +- ▁turkish +- سه +- ships +- ▁lingering +- shore +- ▁що +- ▁gufata +- ▁barre +- bino +- raf +- ▁solar +- ▁prostrate +- ▁contrari +- ▁outrage +- ▁enclosed +- ▁күр +- யே +- ▁guidance +- ▁irr +- ▁suggestions +- lyn +- ▁vond +- ▁nostrils +- mico +- ▁cielo +- ▁nowadays +- бір +- ▁dezen +- ார்கள் +- اک +- moc +- ▁perquè +- кана +- ண்டு +- ▁эс +- ▁launch +- ▁multi +- Argent +- ▁кеше +- ▁saul +- tile +- ▁Flor +- ▁Tim +- ▁Parlament +- นา +- ▁répond +- quo +- zicht +- boro +- ▁shorter +- akt +- herr +- ▁marian +- 연 +- 모 +- ▁están +- ▁draws +- ▁theater +- ▁traveled +- oti +- ▁nobleman +- ▁karl +- ▁arriba +- iam +- fili +- ▁misfortunes +- خوا +- ▁besonders +- ▁Ibyo +- мак +- itza +- ▁shots +- ▁coroner +- зам +- ▁harriet +- ছিল +- ▁bachelor +- Co +- ▁ĉu +- ▁muchos +- 見 +- ▁worte +- پە +- की +- naga +- füg +- ▁carl +- زار +- ▁sancho +- ▁prophecy +- rance +- ▁supp +- eft +- 속 +- ▁States +- dded +- ▁없는 +- ▁stages +- ▁umuryango +- ▁vigor +- ▁abaturage +- ▁stammered +- ues +- ▁lou +- ழு +- ະ +- इ +- હ +- 可以 +- ▁considerations +- ▁pillars +- ovitch +- بت +- ▁tus +- ▁tense +- ▁nekk +- ▁hamilton +- பெ +- ebwa +- ▁escaping +- iel +- ▁penetrated +- ▁першы +- ▁boca +- anne +- ▁Amerika +- ▁incense +- ▁luft +- ▁zik +- ▁Bet +- സ +- ▁grond +- ▁glücklich +- ▁plaster +- ▁fork +- ▁elkander +- iber +- ▁varying +- cess +- ▁repetition +- organisation +- ▁pode +- ▁ware +- ▁oyo +- με +- って +- ▁impart +- ▁전에 +- lif +- هن +- ▁dialect +- fici +- ▁días +- fellow +- ▁foar +- fait +- ▁uur +- roth +- ▁arg +- ▁dimensions +- ▁ssente +- дым +- ▁costa +- ▁ведаю +- ▁jupiter +- ▁sculpture +- ▁leva +- மான +- ▁gad +- ▁yoke +- ▁bavuga +- afrika +- ▁buen +- قۇ +- مو +- action +- ▁zone +- szcz +- վ +- แต่ +- ރި +- ▁cue +- เล +- ▁verily +- ▁pigs +- ىدۇ +- ס +- ▁democracy +- loopen +- ▁entrar +- ▁rats +- holm +- ▁erg +- ▁spears +- ▁cracked +- ▁타 +- ▁Tag +- tato +- ▁Det +- ▁forlorn +- voor +- sein +- ▁என்ற +- ら +- ruhande +- ▁minder +- ▁cavalier +- ▁proceeds +- ▁houden +- ▁substances +- 全 +- ▁slap +- ▁traditional +- ▁trou +- ▁mooi +- ▁revis +- ▁passionately +- alde +- dressed +- ▁acquaintances +- ▁politely +- änd +- ▁año +- 這 +- operate +- ▁decoration +- ▁pines +- boko +- ник +- ▁oscar +- ▁elf +- ▁செ +- ▁myth +- ury +- fati +- ▁unver +- дум +- ▁lighting +- ▁manos +- lange +- ▁fulfil +- торы +- ▁Pot +- ▁sewing +- ▁shilling +- က +- ▁encouraging +- 현 +- ▁Uru +- ▁rescued +- ssar +- ▁scrub +- ▁Hab +- ▁sulla +- altro +- bad +- enco +- mmed +- ▁ieder +- ▁pela +- ▁남 +- ▁zegt +- ▁delusion +- kup +- ▁pourquoi +- ▁amas +- ▁fuera +- ▁mournful +- tait +- ▁afflicted +- гэ +- '000' +- ▁Sus +- ▁lifetime +- ▁refreshment +- ▁melody +- ▁Africa +- ▁regulate +- ▁hive +- ▁Twa +- ▁diseases +- trud +- ixe +- control +- ▁oog +- ▁resolute +- ҵ +- ергә +- ▁meta +- ▁Fou +- ▁morn +- ▁espa +- ▁hosts +- ▁puts +- ▁oblige +- ▁chor +- ▁نه +- heer +- ▁exempl +- ▁glee +- ▁intens +- ▁cousins +- ად +- ▁Yagize +- ▁umugore +- ▁August +- ▁elders +- ប +- ▁problema +- gö +- ▁hijos +- kleed +- ▁irgend +- த்தி +- ▁Cada +- ▁percy +- marin +- ▁moist +- ▁Fort +- ▁veteran +- ▁englishmen +- ▁Why +- ▁carriages +- ▁compliments +- ▁força +- ▁lado +- ▁supporting +- ▁obstant +- ▁beseech +- ▁antagonist +- ▁tipus +- ▁Ми +- tê +- ▁robber +- grün +- ▁mistakes +- ▁petersburg +- ▁countless +- дня +- 야지 +- ނެ +- ș +- ▁혼자 +- acha +- rito +- ▁weitere +- ▁Hy +- ▁leo +- vir +- rud +- ▁eccentric +- ▁expose +- ▁valid +- ▁cran +- беҙ +- ▁nyinshi +- డ +- ▁loi +- ▁unaware +- kota +- ▁honoured +- ▁quitted +- ukira +- ρα +- льны +- élé +- hä +- ▁neɣ +- ▁conseil +- ŝa +- مون +- غۇ +- ▁propriety +- ▁screaming +- eq +- ▁regretted +- tira +- ▁brighter +- bec +- clos +- ffy +- ప +- ▁chalk +- ifica +- ▁journalist +- lieu +- ▁conti +- honneur +- سب +- sio +- ▁handled +- ▁jaro +- ▁amaze +- ▁monotonous +- क् +- ▁covenant +- avons +- thra +- ▁그거는 +- ▁adds +- abili +- ▁suits +- brand +- ▁excellency +- ▁herzen +- ే +- ▁luxurious +- gabo +- ува +- ▁건데 +- lada +- ▁professed +- ມ +- 거나 +- ▁decorated +- cultura +- ksa +- ▁fade +- teg +- shwa +- ▁famine +- ேன் +- serva +- vez +- ณ +- ▁geoffrey +- ▁bail +- ▁resp +- rinda +- ▁Gemeinde +- heb +- ▁lili +- rf +- ▁adhere +- ▁swarm +- Sa +- ▁retreated +- ▁coats +- ം +- ում +- ables +- ▁blot +- think +- fli +- ▁morte +- ▁relaxed +- ▁eugene +- льныя +- ▁Haus +- hinga +- ▁parc +- ▁nehmen +- ▁parle +- ޑ +- ▁assumption +- 처럼 +- ▁candidate +- ▁waarin +- ը +- ▁carro +- ▁snuff +- ▁headed +- ▁fraud +- ▁objective +- ▁piercing +- ▁mantel +- ▁confine +- ▁matron +- ances +- ▁bleiben +- ▁Club +- ▁од +- ▁restrained +- ▁Tak +- ément +- 内 +- ▁grieve +- ▁mills +- ▁tries +- ▁inconvenience +- xu +- uck +- ▁caballero +- ёр +- funa +- rás +- وه +- 눈 +- ▁celestial +- ▁fli +- kiko +- ▁eût +- harmoni +- ▁admirer +- ▁괜찮 +- ▁weib +- ▁homage +- ▁exposure +- reḍ +- lingen +- ▁obstacles +- ▁Het +- ▁janet +- дай +- нар +- ▁게임 +- ▁dodge +- كَ +- ▁afge +- ▁bloß +- ▁despatch +- raient +- ▁messages +- ▁hermano +- enberg +- ▁agency +- ▁bajo +- loch +- ▁humiliation +- ▁dés +- ▁lastly +- ▁perse +- ▁Sport +- ▁descendants +- ▁Ме +- ▁statues +- ▁lizzie +- self +- ▁skirts +- ▁peered +- ▁nascuda +- ▁suburb +- ▁apollo +- eṭṭ +- ▁manier +- нуць +- afi +- ▁describes +- ▁solicit +- weis +- ▁сёння +- клі +- ▁ink +- ▁bubble +- ▁በ +- ▁izan +- ▁gato +- һын +- ã +- ▁ntiba +- ▁export +- ▁beaver +- ▁alikuwa +- ▁implement +- ▁wad +- wald +- ▁gebe +- ৈ +- ▁Bell +- sabye +- луч +- ▁egy +- өй +- llu +- ▁sends +- oid +- wyd +- ▁umwami +- ▁senza +- ▁sistem +- стр +- ▁manly +- ▁scr +- anomena +- ▁Gri +- ▁shun +- ▁schuld +- ▁homo +- 马 +- ije +- ▁precaution +- ocht +- stamm +- ▁vase +- ▁blut +- ▁Mer +- Or +- ▁blushed +- ▁працы +- nischen +- ніка +- یر +- ▁appreciated +- ▁withered +- duk +- done +- ▁är +- ▁threaten +- ▁exclusively +- ▁هر +- ém +- ▁uitge +- ▁disadvantage +- 책 +- 通 +- ▁blau +- ▁politeness +- ▁gutanga +- idde +- шан +- ▁여행 +- ▁imaginary +- thon +- wani +- ▁Molt +- нал +- ▁banished +- ▁dames +- ▁proverb +- гру +- ▁draft +- dix +- ▁commun +- خر +- ▁varieties +- ▁Ph +- ▁emerald +- าน +- ▁nziza +- ▁neatly +- лов +- ▁mildred +- taking +- ▁needless +- லா +- ▁wholesome +- ▁sig +- ▁jej +- ▁fable +- ▁اینجا +- ▁chemin +- ▁monastery +- ▁legislation +- ▁fein +- ▁topic +- shimira +- ▁заста +- enc +- ▁orchestra +- cast +- ▁brisk +- ▁turf +- ▁leonard +- fried +- ▁poe +- ▁заўсёды +- fallen +- ▁eigenen +- ▁replace +- prä +- lius +- ▁kibazo +- poj +- lek +- batu +- чны +- лес +- ▁inclu +- cci +- ▁statesman +- ▁supposition +- ▁succeeding +- ▁때는 +- она +- ▁în +- ҳәа +- ▁okazis +- ▁займа +- ▁resembled +- ▁reared +- glan +- ▁maidens +- ▁joyful +- ciò +- ▁instru +- ለ +- чо +- ▁беларускай +- vallen +- ▁comforted +- ▁neer +- zone +- ▁pirate +- baum +- ▁바로 +- ▁grans +- ▁membres +- tunga +- ▁sounding +- gha +- aci +- ڕێ +- ▁tempt +- ▁밥 +- ▁destroying +- влак +- ▁rewarded +- ▁그럴 +- stände +- bwo +- ▁defect +- ▁되지 +- ▁huts +- ▁models +- ▁clumsy +- ▁rusty +- indirimbo +- ▁hü +- ▁형 +- äß +- อง +- ▁anticipation +- ▁eigene +- ▁மு +- gewe +- stral +- ▁tess +- ▁Vull +- ▁grie +- ▁jungen +- ▁Бу +- ▁Studi +- feh +- лод +- ▁carlyle +- ▁frowned +- ▁restoration +- ▁pretext +- ▁sali +- ▁nile +- ▁sorrowful +- anna +- ▁cuanto +- eros +- maro +- usu +- ▁constituted +- ▁Will +- ▁geest +- ތަ +- જ +- ▁insignificant +- ▁아예 +- ▁subsist +- ▁nuevo +- ތ +- ▁prospects +- አ +- గ +- ▁contribute +- richten +- ပ +- ▁بعد +- wend +- ▁Both +- aje +- ▁proportions +- jal +- ▁brushed +- subiza +- ▁Мне +- schule +- ▁horrors +- ▁henri +- upe +- ▁santo +- ▁knives +- ▁گفت +- ▁issues +- ▁patrol +- ▁rad +- فر +- oga +- ▁sid +- ▁Where +- ▁freude +- ▁weeds +- ▁vielen +- ▁rogue +- ▁associations +- ତ +- 토 +- ▁worthless +- ▁sponge +- ▁vegetation +- чыцца +- ▁solve +- ▁glas +- ▁oedd +- lap +- ▁сабе +- ▁threats +- maß +- ▁confronted +- লি +- mü +- бор +- ▁менш +- ▁repeatedly +- teye +- jf +- ▁hap +- ▁dazzling +- ▁endurance +- ▁conservative +- ▁pudding +- metre +- trekken +- át +- ▁би +- ട +- ▁konstruaĵo +- brook +- ▁beauties +- ▁posted +- ▁fray +- ▁heated +- ▁moustache +- ▁asylum +- кен +- ▁Усё +- ▁gebracht +- ▁trovi +- ते +- ĝu +- ▁integr +- ▁Union +- eti +- ▁cyrus +- ου +- ▁வெ +- ▁biba +- ivity +- ▁babo +- ▁somme +- ▁Welt +- ▁accidents +- enburg +- ▁teba +- ▁posterior +- ▁nchini +- ڕا +- ić +- ▁tennis +- 설 +- ▁canvi +- зем +- ufa +- ▁murray +- sighted +- ▁клас +- ▁jeff +- sozi +- ▁crops +- sters +- nius +- jä +- ▁ilin +- ▁judicial +- ▁일본 +- rump +- kl +- ▁yon +- teria +- ▁Cam +- entre +- mates +- ει +- ్ +- ▁Раз +- ▁gaul +- ▁license +- ghi +- ரை +- 지만 +- ▁conscientious +- ▁facile +- yongera +- ▁Fall +- ▁hawa +- dessus +- ▁howard +- ises +- ▁humbly +- ▁ĉiam +- ▁complica +- kundi +- ▁enlightened +- green +- tracht +- ▁westminster +- ▁jugador +- rije +- ▁Aka +- ▁exaggerated +- ▁pulpit +- ▁reverend +- ▁hack +- ▁Ky +- चा +- ▁turks +- ▁ик +- ▁camin +- wort +- ▁fascinating +- ▁academy +- ▁strait +- ▁nell +- ▁cye +- ▁format +- folk +- ▁publicly +- ▁kubwa +- ▁kiam +- ▁Beni +- zag +- ôn +- ▁indulged +- ▁pretending +- ▁treacherous +- Z +- ▁rapp +- ozi +- ▁valet +- wol +- ▁Max +- ▁이케 +- ▁choses +- kanye +- ını +- ▁dispers +- limo +- ▁kumi +- jong +- ▁fantas +- ள் +- ▁fits +- gano +- ▁batteries +- ▁xa +- ▁ĉefa +- ▁dingen +- ▁secondary +- ▁pilgrim +- ត +- fier +- ▁calamity +- ▁thrice +- ▁slipping +- ▁lain +- ▁sowie +- تان +- ▁attentive +- жыць +- ▁fem +- ▁surf +- ġ +- ▁elapsed +- ▁보면 +- ▁umunsi +- ▁costat +- เก +- erf +- ements +- ▁Form +- 학년 +- iyor +- ▁northwest +- ▁scratch +- در +- wig +- ▁wot +- ▁moan +- ▁Yella +- ▁Roc +- ▁eindelijk +- ozo +- гляд +- ▁vices +- ▁actors +- ▁contemporary +- lhe +- byaha +- ▁posts +- чэ +- ▁frantic +- ▁irritated +- ▁rouse +- uva +- mist +- ▁prolong +- ▁Он +- ɗ +- ▁landlady +- ▁insolent +- ▁Был +- origen +- нес +- ▁brazil +- ▁vragen +- tako +- ▁moll +- ▁skilful +- rako +- tub +- ▁gait +- ▁undertook +- ▁vigour +- لىرى +- macht +- ▁assuming +- 려고 +- ▁warren +- ப்பு +- associa +- ▁epoch +- نده +- ▁peep +- ifer +- бач +- ▁puzzle +- ▁compa +- ▁말고 +- ▁Cham +- ▁nail +- ▁불 +- ▁sales +- ▁detailed +- رب +- ▁Miss +- luck +- ▁disappearance +- pack +- tü +- ▁popularity +- ێر +- ەر +- ▁Esto +- ït +- ▁servei +- kunde +- ▁espe +- ▁respectful +- ▁texas +- ▁vincent +- ▁naples +- ▁tutor +- ▁unos +- storm +- ވާ +- lg +- ▁diverse +- ▁fenster +- ckle +- цыю +- 五 +- σε +- ▁termed +- یل +- ▁condescend +- ▁miraculous +- ▁parler +- mě +- ▁berth +- mour +- fact +- зар +- ▁Zo +- ▁Hand +- ▁poetic +- ▁chestnut +- ▁richly +- ▁Sse +- ▁historic +- fru +- ▁leer +- irira +- ▁discovering +- kwam +- raum +- dak +- ▁aimed +- лены +- ▁receives +- rani +- ▁indica +- ▁contracted +- گه +- ▁claws +- ▁peaks +- ▁salut +- ▁됐 +- ▁cultivation +- ▁ecclesiastical +- ▁모르 +- ▁sieht +- tjes +- schloss +- ▁seulement +- 이라 +- ▁foreigners +- ▁ridden +- เรา +- ▁pom +- rade +- ▁mocking +- ▁تع +- reichen +- liza +- tip +- tsch +- ▁accuracy +- ▁Yes +- ಗ +- ▁austrian +- 表 +- ▁csar +- wanga +- yas +- grown +- ▁Tam +- hí +- ▁sieben +- rood +- ▁faltered +- ▁dates +- imp +- ん +- ▁champagne +- ▁corona +- ▁tartar +- ières +- ▁entr +- ▁See +- dolf +- ▁devient +- ாத +- ях +- ▁lodged +- üg +- ո +- ▁언니 +- ▁glauben +- ▁majoria +- ▁missouri +- état +- ▁daddy +- ▁linger +- ▁dins +- ▁generals +- ▁truck +- ▁scias +- ▁bust +- ▁sophia +- unque +- ▁herinner +- St +- ▁scriptures +- chem +- ▁umutima +- ▁consol +- ▁absolut +- даць +- ▁Ihre +- ganya +- ▁enters +- том +- lach +- ▁diminished +- ▁bilan +- ▁vinden +- ▁நீ +- ▁sciences +- tangaza +- ▁sinister +- ▁ogni +- ▁그것도 +- ް +- ▁cuerpo +- ▁apparition +- ▁nelson +- ▁bobby +- က် +- nten +- ką +- wun +- geboren +- ▁drake +- ▁allied +- ▁izina +- ▁Ses +- ▁fidelity +- ▁sample +- cional +- こ +- ▁bestimmt +- tata +- ▁wrap +- rangi +- ▁aided +- ▁offspring +- ▁rhyme +- ▁Süd +- େ +- ையும் +- ▁whirled +- buza +- ▁генә +- ▁Island +- ▁Aber +- ผู้ +- hama +- anoj +- ▁herd +- นั้น +- ▁hoo +- ▁labours +- ▁lebens +- ▁phone +- ▁compete +- trieb +- ▁profitable +- ▁restrict +- ▁singularly +- ▁sweetly +- ரிய +- ван +- ▁helena +- ▁სა +- ▁practise +- ▁brake +- ▁아마 +- ▁plen +- dez +- ĝoj +- ▁shivering +- ▁sigui +- bwiza +- ▁Canada +- ▁succ +- ▁tedious +- laid +- ▁있다 +- pod +- muziki +- ▁morals +- ▁joining +- ▁differently +- ▁customers +- ▁cement +- ▁چند +- ▁preliminary +- انە +- ▁strolled +- ▁zose +- кат +- ovna +- ▁сказаць +- чай +- ▁Kristo +- ▁rustle +- barra +- aḍ +- ▁primo +- ▁روز +- ▁courteous +- льным +- ою +- teɣ +- ▁moyen +- obra +- ӡ +- ▁stellen +- 종 +- ▁sweetest +- ▁Uyu +- riɣ +- ▁barge +- ▁concession +- जा +- ▁regiments +- ▁Torre +- ▁hag +- 阿 +- ▁carolina +- maak +- hnen +- ▁Это +- ých +- iez +- ganda +- laufen +- ış +- ▁cantonada +- ▁Red +- κα +- œuvre +- ਜ +- observ +- deɣ +- стар +- ▁huo +- ▁acceptance +- ▁могуць +- ligen +- ▁힘들 +- ▁вам +- 공 +- ▁komp +- ▁municipi +- ▁environ +- ▁bertram +- بل +- ardi +- ▁hoorde +- ల +- ierung +- ▁foundations +- ▁languid +- 라는 +- covered +- ▁gentil +- ▁sor +- ▁каля +- ▁юҡ +- بار +- মি +- ▁privately +- ▁backed +- gezogen +- ▁애들이 +- udu +- ▁Menschen +- respect +- ▁gum +- ▁loom +- ▁heath +- date +- ▁ensued +- dura +- lose +- ▁swayed +- ▁quinze +- ▁يې +- ▁tended +- әк +- ▁prejudices +- ▁pros +- huit +- ▁lull +- ่อ +- tria +- ▁unreasonable +- ▁removing +- ▁equipped +- ▁framed +- ▁vanish +- nish +- ▁crawl +- ▁rustic +- ▁behavior +- ▁lloyd +- ▁attire +- ▁settling +- pita +- girl +- ▁sotto +- 喺 +- ▁fragrant +- ▁Ebi +- ktion +- arc +- ▁모르겠어 +- ▁bicycle +- ▁coachman +- ▁Shi +- ▁адно +- ▁grub +- nico +- found +- thy +- ▁sturdy +- ▁Rw +- ▁такое +- dec +- ▁abans +- نت +- walt +- ització +- ▁Frau +- ▁gedanken +- லி +- ▁roc +- klar +- ▁comic +- ская +- ▁agricultural +- esque +- ▁lesser +- lè +- ▁zy +- ▁corporation +- ▁hannah +- ▁Tett +- 长 +- ▁commencement +- ▁klas +- ▁yelled +- ▁exertion +- ssé +- μα +- час +- ▁historio +- ▁viewed +- ▁ideals +- ▁England +- ય +- ▁devout +- ▁rut +- ▁brim +- ▁kio +- evo +- ▁instruis +- ▁نو +- ▁laboratory +- burgo +- ▁primero +- ▁بیا +- ▁gelaat +- ▁Niba +- ppo +- ▁blackness +- ▁extinguished +- ▁liar +- ▁цяжка +- gion +- ▁liefde +- ▁milli +- gambi +- ▁links +- ▁crater +- ▁pennsylvania +- viel +- ▁porthos +- ▁augustine +- bira +- ▁routine +- чык +- ▁haul +- luk +- paper +- ▁gelukkig +- ಸ +- альна +- ▁әле +- ▁pai +- ▁Unu +- еү +- 对 +- ▁cecil +- ▁sweetheart +- ▁peering +- sema +- ▁Scha +- asent +- ▁bandage +- kunze +- اج +- tay +- koloni +- kers +- ▁burglar +- ▁tengo +- ень +- nato +- ▁ehe +- ▁repos +- ▁bertha +- etan +- ▁verkis +- ▁okuba +- ▁clenched +- öhn +- fusion +- ivi +- ▁Ei +- ssu +- ▁ئىش +- кае +- ▁sagt +- ▁simultaneously +- ▁perch +- များ +- ன்ன +- อยู่ +- ▁dla +- ▁쪽 +- ▁orderly +- ▁spade +- ▁ancora +- ▁jesuit +- важ +- ▁raging +- ▁carrier +- ▁drifting +- ສ +- ▁flocks +- ▁boon +- ▁contribu +- ность +- ▁turi +- ixen +- маг +- gire +- ▁renounce +- kwata +- ▁Dans +- ▁displeasure +- ▁maids +- riga +- ▁vertical +- rongo +- ▁superficial +- ▁fling +- ▁detachment +- ▁Fri +- ▁deren +- abanya +- fica +- melo +- ▁nearby +- ▁생각을 +- tops +- ▁lingvoj +- ▁fascination +- رف +- 하잖아 +- bürger +- ▁industrious +- дат +- پتۇ +- ▁accounted +- ▁imitate +- ▁distracted +- froid +- magnet +- ▁gye +- ▁Frage +- ▁gotten +- ▁comedy +- bano +- ▁hailed +- ▁register +- ▁saluted +- эння +- ▁exit +- 따 +- cn +- / +- ▁actress +- okra +- һеҙ +- kunga +- ▁кошт +- '16' +- été +- سان +- gwi +- ▁weniger +- ▁gebruik +- über +- ▁Spi +- ▁якая +- ▁mains +- ▁recib +- ▁penn +- 난 +- aplica +- ▁dein +- ▁hinted +- '30' +- ▁camps +- miz +- cava +- goma +- ား +- ujo +- ງ +- ▁troublesome +- ť +- ▁był +- ▁열 +- media +- 만원 +- ▁precipitate +- ▁Sel +- ተ +- ▁accommodation +- ▁daisy +- ▁statute +- ▁cod +- ▁fugitive +- lige +- ▁wha +- ▁dolly +- dore +- ▁Bis +- саҡ +- ellement +- ▁gratified +- ▁geheim +- embla +- ▁stair +- ▁pies +- цкі +- kire +- ▁Recht +- ▁breeding +- ▁començar +- ▁gladness +- ink +- ▁ddim +- ▁patches +- ▁shameful +- teddu +- ▁Cro +- rufen +- corre +- ▁identified +- croft +- üß +- hagaze +- 번 +- 金 +- ää +- ▁precipe +- ▁bam +- ▁hija +- ▁weaker +- کرد +- ▁booth +- ▁unbroken +- 為 +- ▁clump +- same +- arle +- ▁shiver +- ndash +- аб +- ▁Frank +- llah +- about +- ▁Mr +- ▁Char +- kere +- ▁chaplain +- ▁enchanted +- ▁nodding +- timo +- служ +- ▁nachdem +- ▁companionship +- igeze +- weit +- дам +- ▁detached +- ▁grammar +- sized +- ▁uzata +- شان +- री +- tende +- ▁considera +- ▁fritz +- ▁corri +- ▁który +- 출 +- ganira +- dank +- ต้อง +- 女 +- ▁dripping +- fast +- ▁kuti +- 갈 +- Һ +- ▁richtig +- ▁securing +- ▁relish +- ▁curls +- ▁ĉar +- hili +- ด้วย +- ▁claude +- ▁lasting +- uzo +- ▁رفت +- ач +- หล +- ніч +- ▁muddy +- ▁Gil +- ▁educa +- ▁controversy +- zet +- ▁profits +- goro +- 순 +- ká +- تۇ +- schieden +- ranger +- ▁trouver +- ுவ +- ір +- sign +- ▁estaban +- 知 +- ▁crouched +- 글 +- مس +- ▁julian +- krib +- ▁streaming +- ▁athens +- zzle +- ▁tylko +- ▁instruct +- ▁Richard +- ▁Problem +- umuntu +- ▁ransom +- ▁오빠 +- ▁males +- ▁writ +- ▁stitch +- πο +- gezi +- ती +- ▁cowardly +- foli +- ▁lyon +- ▁syria +- ▁verle +- ▁discouraged +- ▁nommé +- әт +- feli +- ▁arro +- ▁yamu +- ▁metro +- fant +- اء +- ▁publi +- orde +- ގައި +- ▁przed +- ენ +- ▁அவர் +- trap +- 임 +- ▁feind +- umvise +- ▁whipped +- ▁exert +- ▁hoofs +- ▁생각해 +- ▁dungeon +- ▁என்ன +- праў +- ▁aqui +- ▁faris +- ĉi +- 줬 +- ▁그러면은 +- laba +- nça +- ▁alexandr +- cend +- ▁tortured +- ▁coneix +- ▁түгел +- ▁언제 +- ▁бала +- ähr +- ▁familio +- ▁memorable +- ▁kenn +- hagi +- ▁toilet +- ▁니가 +- ▁resource +- rane +- ▁molto +- ▁duration +- ▁туды +- पा +- ▁manor +- ตัว +- ▁construct +- craft +- ▁clamor +- ▁commend +- dah +- ensemble +- ▁palabras +- кру +- ▁comin +- 無 +- rain +- ▁deliverance +- Н +- 새 +- makers +- ▁banc +- borg +- ▁perdi +- ▁entertaining +- ▁surrendered +- ▁urging +- iĝi +- ▁তার +- ▁arises +- ▁athenian +- ▁moisture +- ▁noodig +- geno +- னர் +- ▁amar +- ▁chickens +- verein +- ▁footing +- lı +- ▁Roma +- adores +- rila +- ▁persuasion +- ▁stag +- ▁eri +- ▁howling +- ▁prit +- ▁Tar +- cata +- ▁ascertained +- کۆ +- ރަ +- чка +- eared +- isant +- ▁uttering +- ▁همه +- ▁invest +- ಮ +- асць +- rill +- ▁gros +- hwe +- tanze +- ▁spontaneous +- ▁consultation +- teeken +- ▁Kra +- ctic +- 엔 +- ancien +- ▁derive +- imento +- лад +- ø +- ▁jaren +- ٍ +- ▁electricity +- ▁biz +- ▁husbands +- ▁Hill +- ▁sorte +- ▁rive +- ▁undergo +- ല +- ▁sparrow +- nage +- ▁걔네 +- хи +- kirche +- ▁boundary +- ӧр +- ▁snare +- ubuzima +- ▁oppression +- ▁camels +- ▁паказ +- wain +- ▁Bat +- ▁뭐지 +- ▁birch +- شى +- نَ +- ▁ubwoba +- ▁historian +- ▁pati +- änder +- ਮ +- ▁façana +- ่น +- ▁nuestro +- bow +- 身 +- ▁집에 +- பி +- ▁magician +- ▁되고 +- ▁Universitat +- ▁pillar +- iah +- 先 +- urus +- ▁roy +- ała +- ▁reel +- ▁glaube +- 하기 +- ▁bury +- ▁chairman +- ▁heroine +- ือ +- ▁anew +- ▁rook +- leben +- ▁prodigious +- ▁rho +- ▁canoes +- ▁зноў +- politik +- wana +- ▁Eb +- сен +- subira +- rauch +- rę +- ▁outlaw +- ▁allerdings +- فا +- ▁forcing +- ▁prick +- ლი +- ▁남자 +- eilt +- ▁mechanic +- ▁desolation +- ▁imposing +- ▁Mur +- ▁regi +- ▁canton +- ▁yahweh +- ▁mazi +- home +- ▁cours +- یەک +- ▁describing +- ыла +- mog +- 灣 +- vine +- ▁define +- ▁nicely +- ▁poisoned +- idée +- aï +- ދަ +- ▁এই +- phan +- schaften +- ▁seeks +- ▁females +- ▁merrily +- вяз +- activitat +- ▁poz +- ▁menshi +- ▁gelang +- kling +- ▁accepting +- ▁thrilling +- ▁ihres +- ▁execute +- ▁ebenso +- there +- تو +- ▁Und +- ▁movie +- ▁degli +- agala +- ▁clasp +- ▁dernière +- ▁heavier +- aliment +- ▁listener +- tagon +- ▁inevitably +- ▁correspondent +- мент +- ▁gleamed +- ▁reluctantly +- ▁баш +- ▁retra +- ▁pronto +- poko +- ugi +- ▁адчу +- kutu +- ▁afore +- ▁hurl +- ▁esteemed +- кія +- ія +- ▁perchance +- ▁decade +- ▁regain +- pera +- ▁complaints +- ▁morton +- turm +- ▁agafar +- ▁Tt +- ▁celebra +- ▁herein +- ▁Gas +- schließen +- ▁sums +- ▁collecting +- dier +- wert +- ▁clement +- ▁lucien +- ▁Ana +- ▁bustle +- estat +- xon +- fau +- ▁hände +- ▁Ober +- ው +- ▁resembling +- ผ +- ▁dispense +- ▁organic +- ▁volgen +- ▁resisted +- 原 +- ▁stationed +- ▁venne +- ▁believes +- 했던 +- ▁aristocratic +- ▁région +- ▁intending +- ટ +- 삼 +- ▁mike +- ▁paste +- نم +- ▁California +- shoboye +- விட +- ▁milit +- ▁pedro +- ▁ecstasy +- ▁wears +- cine +- ▁oben +- ▁Bla +- ▁revolutionary +- '!..' +- pher +- liwa +- ▁тө +- ▁australia +- ▁usurp +- èn +- ▁salle +- ▁sorely +- rette +- тру +- ▁requisite +- ▁sneak +- rta +- ▁compensation +- ▁worshipped +- vura +- ▁semble +- ▁lois +- ▁despairing +- ▁surround +- ▁alcohol +- ▁Did +- ▁awaken +- gesprochen +- ▁perplexity +- ▁colli +- ▁shu +- ▁abyss +- ▁jehová +- ▁portuguese +- 란 +- wachsen +- هم +- ▁notorious +- lê +- ▁roche +- lulu +- hwa +- нды +- ▁cooper +- ▁howl +- ▁aramis +- ▁associate +- جا +- ▁kentucky +- ▁settlements +- efk +- ▁headache +- ▁último +- ▁egyptians +- ▁prim +- ▁luggage +- 比 +- ▁palms +- impu +- ద +- ▁epistle +- interes +- ▁ominous +- ырға +- ▁nav +- ▁discreet +- 基 +- ▁shivered +- ▁muffled +- ▁morto +- τι +- ним +- ▁ciel +- ▁detained +- сцю +- ▁fisherman +- ٽ +- ▁demons +- 주는 +- staat +- ▁particles +- ▁dedicated +- ▁quedar +- авана +- ▁serie +- ် +- graduate +- ▁musing +- ▁зрабіць +- дуць +- ▁starving +- ▁publisher +- nneɣ +- ▁grocer +- ▁жыццё +- ▁thereafter +- 起 +- ▁disgusted +- ▁trading +- здо +- шты +- ▁двух +- ▁ferdinand +- zda +- ▁exceed +- 친 +- ▁circulation +- ▁kunt +- ência +- ▁아니고 +- อะไร +- ▁한번 +- treffen +- afu +- gefallen +- ▁Super +- ▁involuntarily +- ▁thirsty +- вать +- ará +- ▁talks +- ән +- situ +- ▁Durch +- ▁hardened +- bí +- ▁cork +- spread +- ▁canst +- emente +- ▁British +- ▁unequal +- ▁flourish +- erta +- ▁эр +- ▁sujet +- ▁봤어 +- ымі +- ▁fama +- schal +- ▁ahantu +- ▁joue +- ▁hals +- cere +- ▁prompted +- uï +- ޓ +- kio +- ▁situa +- ▁muchas +- ▁memorial +- ▁Albert +- ▁balina +- ▁costs +- ▁direkt +- ▁empress +- меш +- xton +- ▁straat +- ▁depressed +- лат +- уд +- ков +- ▁racing +- ▁cherry +- ▁corazón +- ▁crag +- Pa +- った +- gte +- ▁erschien +- плы +- ▁labors +- ▁bedside +- mio +- ວ +- ▁wiggily +- ▁siete +- ▁meek +- ▁vigorously +- erton +- ▁sleeves +- ئے +- ▁икән +- ▁jacques +- ijuru +- radi +- ▁төш +- ▁Mad +- ▁China +- ▁باز +- ▁burke +- ▁horizontal +- пыт +- ▁briskly +- rial +- abri +- чнай +- ້ +- ▁primarily +- ▁chuckled +- ▁curved +- сьці +- ার +- '15' +- baw +- tätig +- ▁genus +- ▁sinners +- ▁choir +- asu +- ့ +- į +- ▁trabajo +- ▁batter +- ▁luminous +- ▁darker +- ▁moore +- ▁giants +- ▁motions +- ▁hateful +- ō +- ħ +- mers +- نش +- koro +- ▁hears +- rush +- ▁sekva +- visi +- ▁glove +- ▁voluntary +- ▁losses +- وار +- ▁Von +- ▁mechanically +- ève +- ▁prova +- ▁Ken +- nika +- ▁Ĉe +- ▁energies +- ▁veni +- ▁technique +- ▁fellowship +- ▁escrit +- ▁miß +- faransa +- ▁edmund +- 겠 +- ώ +- half +- ▁riddle +- avez +- ▁나왔 +- mäßig +- роз +- ▁ziet +- 렸 +- péri +- ▁blunt +- ▁titles +- ▁williams +- usse +- ▁recovering +- ▁Ntu +- ▁само +- է +- ▁rolls +- шын +- ▁pail +- ▁founder +- ▁ripple +- halen +- ▁marcus +- ▁rwanda +- ▁excepting +- ▁civilisation +- geschlossen +- angiza +- ▁була +- quill +- dealer +- rigo +- ▁schwarz +- ▁dickens +- ▁erhielt +- ligt +- tega +- ▁snowy +- ▁그서 +- ▁pronounce +- tré +- ▁entgegen +- нія +- maz +- ▁salad +- ▁já +- лей +- ▁moth +- gaba +- ▁feuer +- ▁அந்த +- خە +- зм +- ваўся +- ?! +- ▁picnic +- esc +- world +- ▁traduk +- ▁constantinople +- ▁així +- ▁archie +- нава +- ▁clown +- маш +- ▁ooit +- gesteld +- ▁Яго +- поль +- ▁charmed +- йшла +- ▁Madrid +- чныя +- ▁peru +- ▁opponent +- wahr +- ▁locality +- ▁steed +- ▁sobs +- мый +- ▁maso +- ами +- بە +- ▁owen +- ▁हो +- рға +- gena +- ▁superstitious +- ▁altri +- ▁Fla +- விட்ட +- ▁Pour +- ▁infidel +- ▁kilo +- ▁felix +- ▁respondió +- ▁flattered +- loca +- ▁mining +- ацца +- ▁stored +- estructura +- ▁dazed +- erei +- oze +- ▁nieuwe +- ▁aaron +- யாக +- ▁anecdote +- ▁diary +- ▁가면 +- ▁Let +- ▁humorous +- ▁syllable +- miento +- pfu +- gown +- ▁shovel +- tamente +- lob +- зь +- ▁teaches +- މާ +- ▁tart +- ▁administered +- ▁ignored +- ▁balcony +- ош +- ▁rubbish +- ▁State +- ▁finishing +- тем +- ▁dissolved +- ẓẓ +- ▁criminals +- ▁nza +- 布 +- ▁cambridge +- koni +- ▁Kor +- gib +- част +- ión +- ▁contemplated +- ▁hubiera +- ▁Tes +- ▁swelling +- ▁tons +- ▁út +- ▁Quer +- ▁duncan +- ▁strap +- ifuza +- ▁headlong +- ▁seele +- ▁possessing +- aille +- ▁dormi +- éra +- вуч +- 常 +- ▁injure +- ▁neder +- ▁orthodox +- ▁chemist +- ▁thickly +- ▁worms +- ▁delante +- ▁parrot +- ▁emotional +- ð +- ▁expectations +- ▁beans +- litz +- іх +- ▁esqui +- ▁Lake +- ▁trent +- ▁ties +- hak +- ყ +- һе +- ▁tips +- ▁Pat +- inshuti +- ▁futur +- ▁свой +- 애 +- eerd +- anze +- ▁declara +- '00' +- ▁resemble +- ▁داشت +- әлә +- ▁Jan +- валіся +- ▁bridegroom +- ▁많아 +- ▁bowing +- ▁vuba +- ▁شا +- Ho +- iano +- ދު +- ▁principally +- rsi +- ▁curate +- ▁plentiful +- ▁clemens +- ▁Wasser +- ваю +- ride +- ▁fawn +- ▁iminsi +- ▁imirimo +- óż +- দি +- nach +- ▁embroidered +- wende +- ▁inquiring +- ▁canyon +- ▁effectually +- ▁holt +- gung +- ▁glowed +- ▁secrecy +- ▁Thomas +- ▁nueva +- жэ +- wał +- ▁lac +- servant +- εί +- shyize +- ▁blamed +- ▁Jam +- ▁jego +- بێ +- annon +- ▁physicians +- ▁barbarous +- ▁shifted +- ахь +- ▁vienna +- ▁accidental +- ▁explica +- ▁participate +- ptic +- 该 +- ▁mortis +- ▁overcoat +- ▁miser +- ĥ +- ▁starve +- ▁anzu +- bih +- lida +- ▁expand +- ographic +- grid +- ▁eliminat +- бер +- ezza +- ▁scanty +- jk +- ▁caballo +- யும் +- rega +- ▁parola +- bry +- ▁procedure +- ▁trug +- kaa +- ▁ئۆ +- ▁Jag +- ▁persistent +- ▁Arab +- ▁representa +- ▁кожны +- ▁Kirche +- ▁arising +- plaats +- ▁alarming +- ▁solchen +- ront +- ▁struggles +- ▁steadfast +- ▁martyr +- пал +- hoff +- ▁Dw +- ▁Of +- ▁parker +- дө +- edged +- kiem +- ρι +- tana +- ▁suoi +- ▁peal +- িত +- bore +- ▁baronet +- ธ +- ەن +- ับ +- bilitat +- quent +- ▁Posteriorment +- ▁conversa +- ▁joven +- ▁reine +- ▁fing +- ▁운동 +- 格 +- baga +- ▁continental +- vyo +- เท +- ▁goats +- org +- ▁catholics +- ▁declares +- ▁quint +- ▁devices +- ▁camí +- ▁Berg +- big +- ▁signify +- ▁composure +- parent +- ▁stew +- ▁insight +- ▁Obu +- ▁क +- Y +- ям +- æ +- ▁yacu +- imodoka +- ▁Ĝia +- στ +- ▁dus +- simu +- ▁arisen +- بین +- ير +- пат +- گى +- ▁dismiss +- чыта +- ▁marches +- ▁beak +- ▁آنها +- dige +- ▁alleged +- ष +- ஜ +- ▁internacional +- ▁peach +- ▁wedi +- rine +- свят +- ▁competent +- کاری +- ▁Internet +- ▁moltes +- cushion +- اند +- ▁compose +- ▁Verein +- waardig +- enia +- ▁profoundly +- ▁paso +- ▁spotted +- ▁needful +- ▁gehe +- نس +- ▁ruddy +- adors +- ▁pilgrimage +- ▁elektr +- ▁soften +- ▁loveliness +- ckel +- ▁hooren +- ▁crane +- である +- ▁mówi +- ▁trusting +- ▁übrigen +- க்கும் +- builder +- profund +- mpaka +- ▁kneel +- ▁gewiß +- ைக் +- ▁hai +- bild +- ила +- ▁Their +- buka +- éis +- ▁autour +- ▁censure +- 플 +- پا +- ▁Dio +- wanda +- ▁haya +- һи +- ▁overlooked +- ▁novelty +- iad +- ▁honored +- ▁kugenda +- vl +- ▁Plu +- ▁Sil +- شر +- wechsel +- ▁français +- ▁chime +- ▁doubled +- ▁polar +- ▁wanderer +- ել +- ▁inviting +- undo +- ▁Kal +- ▁messengers +- ▁funciona +- ▁bato +- ▁handling +- ▁darin +- бай +- ތު +- ▁kila +- ▁propri +- ப்பட +- ▁watches +- ▁mash +- ▁petites +- urira +- ▁feudal +- esten +- ▁magnitude +- ▁turtle +- кс +- ▁enforce +- ▁devait +- гер +- વા +- ▁reuni +- تۇر +- àtic +- ruta +- ▁emptied +- ▁insensible +- mino +- யை +- ▁Jahren +- ▁launched +- lex +- ▁membro +- xx +- ত্র +- ▁праца +- бал +- ▁humming +- fres +- ydd +- 花 +- ▁scowl +- yam +- ▁teams +- ▁manière +- ▁ginger +- nyije +- 可 +- ▁hello +- issement +- ▁uproar +- ▁ocup +- ▁bibli +- yó +- tempo +- 建 +- kili +- ▁lacked +- mack +- gelegenheit +- ▁console +- tora +- gestalt +- стая +- ▁Pau +- 써 +- ▁spacious +- ood +- ▁youngster +- ▁involve +- bbling +- ▁clutched +- yak +- 或 +- ▁patriotism +- іўся +- ți +- ▁гэтых +- ▁sanctuary +- ▁premises +- يد +- affe +- ҙең +- 期 +- ▁tremulous +- 业 +- ▁fuerza +- sson +- ▁рух +- ▁attic +- ▁сум +- fana +- ▁robust +- ▁larry +- ▁Entre +- ▁livre +- ▁nursing +- lauf +- ▁podia +- йә +- ▁refinement +- ▁mfite +- ▁overboard +- зні +- ĉo +- ▁mouvement +- ▁dispara +- loos +- angi +- hong +- اری +- ▁bait +- isson +- ▁dump +- ▁portraits +- ▁Ар +- ▁Igi +- 민 +- រ +- ▁hören +- ▁noticing +- ▁okazas +- ▁appoint +- ▁situat +- ▁алды +- ▁interrupt +- fir +- ▁discomfort +- ▁horribly +- axe +- empi +- landa +- cuba +- ▁disliked +- ভা +- ▁nero +- ▁shrieked +- ▁ingenuity +- േ +- ▁spun +- aide +- ▁sollen +- ▁vegeta +- ▁fals +- ¿ +- ▁gertrude +- ▁potential +- mutwe +- ▁binding +- teller +- clad +- ▁favoured +- għ +- ▁scales +- ▁onto +- ▁laborious +- ▁scope +- ▁zealous +- уе +- ▁calf +- ▁basti +- ▁suppress +- ွ +- ▁hazel +- ▁promo +- ▁muti +- ▁magistrates +- ▁vijf +- ▁cheered +- нняў +- ▁antiquity +- ▁ungrateful +- ▁думаю +- ▁сваю +- atumye +- ити +- ▁trobar +- kiba +- umunya +- ▁operator +- ▁harimo +- ড +- kama +- 저 +- برا +- ▁confront +- ▁bully +- shell +- ▁fascinated +- ▁rapide +- ▁ascent +- ▁conjunt +- ▁Dort +- ▁Hä +- ordnung +- zana +- ▁разам +- simila +- bib +- ajja +- ▁favored +- ▁memor +- ▁seria +- ▁jin +- ▁Stu +- ▁sow +- ▁fühlte +- ▁churchyard +- تَ +- ▁vary +- rium +- ▁noblest +- 军 +- ▁처음 +- ▁contend +- ▁weekly +- ▁odour +- lieb +- schutz +- arma +- 化 +- ▁grandson +- ▁Kenya +- ▁voulu +- ▁sortir +- ▁Ти +- ▁feller +- уюць +- ▁consternation +- 兰 +- ▁umwanya +- 봤 +- ێن +- ▁prezent +- hampton +- nec +- ejoj +- miti +- ент +- agrada +- ierten +- ▁dispatch +- ▁repay +- тың +- ▁marvelous +- شه +- ▁nasty +- ▁auszu +- ▁ocho +- জন +- ▁ports +- uno +- ▁damals +- ▁disturbing +- ▁nave +- 罗 +- ▁earning +- ▁naho +- ź +- قد +- staff +- ▁Mitglied +- ▁tym +- ▁fails +- ▁hillside +- ▁immensely +- ▁antony +- smith +- knecht +- Ä +- hra +- holders +- stä +- Mo +- kirwa +- schrieb +- ▁مد +- ▁Michael +- ▁différent +- ▁unfit +- gesch +- ▁sorg +- ▁blij +- ▁nyingi +- ▁sag +- ▁دەر +- ▁فکر +- ▁sidney +- ▁blushing +- ely +- ▁sly +- тон +- کس +- вага +- ▁buzz +- ▁Ende +- ▁daresay +- alta +- Ж +- 군 +- wch +- ▁navigation +- тая +- ▁Real +- ▁healing +- senga +- куп +- ▁Би +- ▁warlike +- ▁implied +- მი +- рҙы +- ▁pesca +- ▁woodland +- dà +- нын +- uganda +- ▁italien +- ▁swaying +- زم +- ▁brink +- 다는 +- ▁compi +- يو +- ▁praises +- rirwa +- ▁welches +- sloten +- ▁série +- բ +- lom +- toire +- cret +- ▁pau +- ▁correctly +- ▁pitcher +- する +- tuig +- crib +- ▁sentit +- ▁dispersed +- дук +- ▁votes +- ▁bridges +- ▁elector +- eiro +- য +- чен +- ەکانی +- taal +- ▁combine +- ▁ўсіх +- ▁mien +- ▁annex +- mort +- ▁wearily +- telefon +- ▁composer +- brü +- ▁cruelly +- čči +- ▁Pala +- dire +- mg +- ▁dije +- дзей +- ▁propaga +- ▁teritorio +- ▁Green +- kant +- ▁gentleness +- richtung +- buga +- zuri +- ▁attacking +- fera +- ▁valued +- ▁ebenfalls +- ▁performing +- ترین +- ▁fixing +- zono +- pí +- ▁Luc +- oci +- ▁бок +- န +- Re +- ▁ell +- ▁inland +- ▁adjacent +- ▁malicious +- ▁studi +- page +- زە +- ▁salv +- ▁richest +- اط +- ▁abate +- ▁certe +- ▁barri +- ▁fluttering +- 開 +- nacht +- abagore +- ▁dough +- räum +- ▁beads +- ▁powerless +- ▁dashing +- ின +- reke +- ▁сям +- ▁electro +- ▁hoy +- bind +- ▁disputa +- ▁brin +- ▁volcano +- sprache +- ▁providing +- woord +- ▁mounting +- ▁nothin +- ▁massachusetts +- mpo +- ▁whirling +- ទ +- ▁preskaŭ +- ▁daybreak +- ▁twinkling +- ▁evelyn +- nahm +- ті +- banza +- bola +- ခ +- ▁politique +- rano +- ▁перад +- ▁illustrate +- мал +- 큰 +- 표 +- ▁cabbage +- ▁beatrice +- ▁glaring +- basha +- няў +- دي +- ▁чалавека +- ▁moviment +- ٌ +- کات +- ▁gewo +- ▁dès +- ▁nzu +- ▁vibra +- ▁mentally +- ▁Cat +- version +- こと +- ▁Ker +- ▁вядома +- 然 +- ▁sufferer +- ałem +- ொ +- ▁warmed +- ▁zoon +- cham +- tate +- ▁Rock +- ▁cheat +- ▁glided +- ▁futile +- ffle +- bic +- ▁ashobora +- empresa +- rata +- ▁cultural +- ▁veux +- ▁kuma +- habit +- ▁swollen +- ▁blinded +- ▁legte +- ▁située +- وق +- ▁сталі +- ▁Person +- mado +- ▁zuster +- ▁Aha +- ▁història +- ზ +- zor +- ▁unsuccessful +- ▁mientras +- ▁gratification +- ▁такая +- shye +- эм +- స +- books +- schijn +- ▁tossing +- ▁leisurely +- ▁transition +- egna +- 했었 +- ланы +- vro +- sichtig +- ▁orbit +- ▁gern +- ква +- dite +- ▁achieve +- ▁indignantly +- ▁pistols +- ▁wowe +- 後 +- کان +- льна +- dagi +- ▁Vous +- middel +- ▁часам +- ▁entreat +- 지는 +- ▁இருந்த +- ▁turk +- ▁hernam +- ▁diet +- 自己 +- ▁sûr +- ▁forts +- ▁여자 +- ▁dorm +- ▁langue +- thal +- mbye +- ▁emerge +- ▁Rei +- bikora +- ▁nurses +- ங்கு +- ▁promotion +- ▁darrer +- нап +- ▁manufacturer +- ▁ribs +- ▁surpass +- ▁deputy +- ▁kuna +- ▁Laŭ +- ▁territori +- ▁новы +- ▁commented +- ▁joshua +- 口 +- ▁deliberation +- рма +- ▁coins +- ukura +- ▁morbid +- ▁reflecting +- ▁dochter +- Ubu +- 잡 +- 左 +- ▁pensée +- ▁imagin +- ubw +- ▁pell +- nki +- ▁retro +- besitz +- ▁muscle +- ▁Met +- ▁Mittel +- abakozi +- ைப் +- ▁guides +- nę +- ▁لێ +- ▁Now +- كان +- ▁spinning +- ▁Pli +- '11' +- ▁bathed +- ▁tackle +- ▁guise +- ▁geste +- ▁crusade +- ▁faz +- ▁audible +- خۆ +- ▁burrow +- ▁qualified +- ▁drown +- ▁Ul +- ▁whisky +- ▁imply +- ▁ligne +- حا +- ▁beethoven +- ▁thompson +- ▁sentir +- ▁Gegen +- bba +- ▁grandi +- 今 +- ▁момант +- umbu +- جه +- ▁illustrated +- ▁tails +- ർ +- ▁कर +- яз +- ▁Band +- ހި +- ghe +- ало +- ▁drooping +- zucht +- umwe +- ▁crook +- puso +- জা +- ▁surveyed +- ▁grasping +- turi +- ▁slack +- lista +- ▁lachen +- gaben +- ▁commissioners +- 很 +- ▁wearied +- feri +- ▁Mel +- 반 +- ▁veiled +- сад +- ▁murmuring +- ணி +- 工 +- ▁liverpool +- 等 +- ▁Icyo +- ▁Aquests +- iddwa +- mous +- ▁samo +- brac +- hold +- ▁comparative +- brach +- ▁hallo +- ▁бара +- ▁بن +- ▁similarly +- taki +- ▁transcend +- ▁tangled +- kunft +- ▁tidet +- twenty +- ▁status +- ҡан +- ▁peck +- ▁incessant +- ▁davy +- ▁galloped +- ▁stature +- street +- ▁eclipse +- ▁titre +- ša +- udde +- ▁yal +- ▁fatto +- ▁schä +- kultur +- ▁honours +- леш +- ▁Bw +- ▁railing +- альны +- ▁vieille +- ▁Ке +- ▁Ні +- ▁squad +- ▁sulphur +- ▁retir +- кую +- รา +- ▁transformation +- rabi +- ▁harris +- ▁Usono +- oa +- خان +- ▁цікава +- ▁sticking +- ▁Kigali +- ▁fringe +- abba +- ▁doet +- уға +- шай +- آ +- ▁farms +- ▁gehad +- adora +- ▁mariner +- ▁zahlreiche +- torio +- ▁uncon +- ▁progressive +- ніцы +- ▁aisle +- ยา +- ▁lebt +- ▁ĉefurbo +- ▁உள்ள +- ucht +- ▁franz +- ▁dominant +- troph +- ▁geluk +- алы +- け +- ▁eyelids +- ▁portugal +- скім +- ▁grind +- ▁distr +- ▁malalt +- яцца +- ▁accompanying +- ife +- ▁spice +- lira +- ▁cama +- entrada +- مِ +- chon +- ▁empor +- Ha +- ▁godfrey +- bbing +- ▁Cho +- ▁orienta +- ▁conferred +- conc +- ▁quiver +- ном +- hafte +- ▁얼마 +- teka +- ▁unlucky +- ▁neniam +- ▁spies +- xel +- ▁такія +- ▁homem +- '18' +- ▁suffrage +- lessness +- ▁wensch +- lux +- ▁sav +- ▁crawley +- ▁recom +- ▁random +- ▁concrete +- ▁odds +- eries +- ▁싫어 +- agradaria +- ▁offense +- pond +- vari +- gge +- reif +- arà +- ▁choosing +- erken +- ▁ache +- ▁przez +- ▁treating +- ▁immortality +- ▁contro +- ▁vines +- ▁lectures +- ià +- ▁barred +- 世界 +- ▁replies +- ▁welsh +- gant +- ▁glimmer +- ▁Hay +- ▁gaiety +- ▁happiest +- ▁deity +- nyanja +- volution +- ▁atari +- กา +- ▁resident +- नि +- ▁maple +- ▁heaped +- ▁easter +- ▁праект +- ற்று +- naweza +- ▁прад +- ني +- ச்ச +- ▁truths +- prop +- аты +- ▁discontent +- ▁déli +- Ch +- many +- ▁있으면 +- cutter +- ▁With +- ixa +- urar +- graph +- ▁weave +- рос +- ▁quiere +- ▁nought +- ▁było +- друг +- ▁wires +- ্ব +- ▁suppli +- ល +- ▁indebted +- ▁chevalier +- ▁Guerra +- нг +- ▁natura +- bana +- ອ +- ▁Japan +- ▁endeavored +- ▁germana +- ▁suggestive +- ▁repar +- ▁deutlich +- ▁sant +- ▁recommendation +- ▁appealing +- ୁ +- ▁russell +- ▁nightmare +- ▁perched +- nois +- ▁ingingo +- А +- ▁Pres +- ▁bitten +- üm +- ▁chaos +- ▁문제 +- 겠어 +- gula +- inka +- tuk +- ▁Mü +- ▁dahin +- okuwa +- 中国 +- ▁quién +- рады +- як +- ▁vibration +- vole +- ▁zones +- ▁permane +- byara +- ▁stran +- ▁dai +- ▁mubi +- vollen +- uil +- ▁arriva +- ସ +- ▁whereof +- ▁presenting +- ▁selfishness +- mächtig +- tumia +- ▁aufzu +- últim +- ▁curios +- ▁jener +- meri +- gez +- ▁scores +- ▁seguir +- homa +- ▁invested +- ▁só +- dala +- ▁communities +- ▁bakora +- ത്ത +- ▁올라 +- jad +- ▁맞어 +- dependence +- ▁sita +- ▁lounge +- ▁slippers +- бә +- ▁channels +- ފ +- ପ +- ▁ministre +- ▁jemand +- ▁suf +- izer +- ▁evolu +- ванне +- ▁pred +- ლ +- ▁rains +- valu +- oye +- ▁aki +- ▁satisf +- ыште +- asta +- ަށް +- 팔 +- ▁delights +- ▁dirige +- ліч +- ацыі +- ▁кит +- indre +- moedig +- ▁усе +- ▁edinburgh +- ▁associates +- ही +- ▁craig +- ▁cheque +- dozen +- ▁ballad +- hrung +- kro +- quarters +- ▁ici +- ▁tema +- ▁friar +- ▁Who +- ▁먹을 +- ▁classical +- 했지 +- ▁maw +- тән +- ▁automatic +- ▁Nti +- ▁laquelle +- ▁fulfill +- arbeiten +- 중 +- ышты +- ▁pierce +- гарад +- ▁Vin +- ט +- ▁glimlach +- ▁mischievous +- сте +- ▁bunk +- ipe +- pto +- ▁belly +- ▁montra +- ▁idleness +- ▁pork +- асці +- 평 +- ▁agaciro +- ▁yearning +- uvi +- ▁прэ +- ▁bitllet +- 列 +- zeichne +- ού +- life +- ḥemmel +- argo +- ▁coo +- ▁Ok +- ▁darn +- èl +- 點 +- 맛 +- 했잖아 +- mati +- ދި +- ▁schlug +- witt +- ıyor +- 술 +- ▁twins +- boj +- պ +- ทาง +- ▁obligations +- ▁attentively +- чаць +- 들은 +- ▁agita +- ▁abominable +- ▁bila +- ▁missionaries +- ▁justification +- ▁candy +- ▁jerked +- ▁població +- ▁montre +- చ +- रि +- ▁Mill +- ג +- embra +- ▁clearer +- ▁impact +- ▁interna +- ▁benson +- ▁mediterranean +- domo +- ▁trad +- ▁alluded +- onian +- Ro +- ▁prosecution +- ром +- ▁grands +- ▁Му +- ▁despatched +- ▁elles +- fähr +- ▁diplomat +- ▁muttering +- ombre +- ▁hav +- 仔 +- 式 +- ▁kui +- ▁bijzonder +- ▁akvo +- ▁amelia +- ▁madrid +- fassen +- ▁testify +- ▁paz +- ído +- ▁seigneur +- ▁месяц +- ี่ +- ▁магу +- ▁modified +- jat +- ▁smash +- ▁pasa +- spoken +- 做 +- ▁rhine +- ▁network +- ▁affecting +- ▁herbs +- ル +- ވި +- ▁participa +- ▁jelly +- ▁exempt +- ▁wanton +- ▁herum +- ▁arimo +- ▁vexation +- ário +- ▁disait +- ▁fret +- frei +- нула +- ▁lippen +- езд +- ▁taxi +- església +- 백 +- 动 +- ▁galli +- ▁hiram +- ▁tack +- پى +- ▁கொ +- ▁meteor +- عَ +- ▁kwamba +- ಡ +- 결 +- лося +- ▁chased +- ▁эти +- komst +- ▁pausing +- ▁나한테 +- пай +- ▁seamen +- ▁erano +- ▁marguerite +- ▁benefits +- ▁cuenta +- ▁sacrifi +- ▁rushes +- тя +- ▁parce +- ▁precedent +- ugar +- ▁나중에 +- ▁French +- ▁natal +- 기도 +- ▁stables +- ▁Són +- ▁infancy +- 한다 +- ▁swelled +- interess +- ▁Го +- gim +- ▁reli +- ▁vaig +- جان +- ▁requirements +- ▁buff +- willig +- ▁infer +- grav +- န် +- 机 +- ▁Kla +- ▁sherman +- ▁suspend +- ▁awakening +- ▁operated +- ▁proving +- ▁dogma +- ик +- ▁honors +- ▁flags +- ል +- ▁гэтую +- 메 +- ▁yuko +- yungu +- ▁holidays +- ▁ejaculated +- 省 +- ▁patriarch +- ▁consumption +- ▁Kommission +- ▁magi +- ▁zee +- ▁whirl +- ▁universally +- ▁самі +- այ +- ▁agomba +- ▁әйт +- ▁defy +- ▁musician +- root +- ይ +- bbie +- ▁necessities +- ▁pieds +- ▁colonists +- .” +- ▁descon +- ▁curly +- мар +- ستی +- ▁chaste +- ិ +- ▁أَ +- mvu +- respon +- erekana +- ҙәр +- nyuma +- hild +- ▁һин +- скую +- ▁entrusted +- ▁näher +- ▁ducks +- dev +- ▁attribute +- ▁thrilled +- ବ +- ▁fogg +- gezo +- ▁respectfully +- ▁filo +- ▁Gro +- ovi +- buye +- ▁außer +- icyo +- ▁transportation +- ▁огыл +- ▁confided +- ▁detain +- ▁pleases +- ▁Sim +- ▁overheard +- empe +- muzi +- ▁theology +- ▁laurel +- gru +- เอ +- 的一个 +- ▁darum +- ▁mei +- ▁그거를 +- ffa +- ▁broom +- für +- obe +- stones +- مش +- ▁tienen +- ▁магчыма +- 平 +- omi +- ▁opponents +- jut +- ழி +- ▁patriotic +- ▁Blu +- ▁exceptional +- 間 +- ▁wenyine +- ▁Frei +- ▁coincidence +- ▁Amek +- ▁bindle +- ▁marco +- щ +- ▁okuva +- wide +- 正 +- ▁ascending +- ▁newton +- ▁vais +- ▁mused +- ulf +- ▁gestures +- bbs +- ▁planets +- ▁natuurlijk +- ▁matilda +- lep +- ▁forte +- ▁mice +- issen +- ▁disguised +- сво +- ▁juda +- ▁Eŭropo +- ▁hie +- ▁plaza +- ▁nadie +- ▁echoes +- ▁claus +- eeuw +- ▁zunächst +- ▁expos +- ▁printing +- ▁scottish +- kapa +- ▁mord +- ▁clap +- huh +- ▁tej +- ▁erreicht +- ▁systematic +- ificació +- ▁malone +- hia +- ▁دارید +- են +- ▁한국 +- ▁douce +- ▁ruhig +- ▁горада +- mper +- zetten +- ▁хутка +- setzung +- ▁eliza +- ▁waarop +- 经 +- ▁tipo +- baren +- lega +- ▁yagize +- ▁traversed +- ▁outcome +- ▁richer +- ▁cameron +- eilig +- ▁capitaine +- ▁verra +- ▁voted +- ▁colle +- ▁wharf +- ▁gegenüber +- ▁girdle +- ▁brooding +- ▁solved +- ▁noises +- buze +- ▁commissioner +- gerageza +- ▁connexion +- ▁strenuous +- ▁clash +- ▁cane +- ▁nächsten +- ▁lors +- ட்டி +- ▁свае +- ধা +- ▁które +- ▁mpamvu +- 电 +- ед +- ▁television +- ▁conde +- cover +- ▁kindled +- ▁llegar +- ▁antique +- ▁ignore +- ▁baptist +- nais +- ▁sina +- ▁palaces +- ▁rites +- ▁construcció +- hla +- ▁whistled +- ▁chil +- aggi +- ▁мас +- ▁rabbits +- ▁clutch +- ▁governments +- ols +- ¡ +- 九 +- 员 +- 过 +- эг +- ▁component +- ansi +- ▁crosses +- ▁potter +- ▁siguiente +- boek +- ▁amagambo +- ▁Atlant +- ▁schoolmaster +- ▁rustling +- ӹн +- ▁width +- ▁coeur +- ဆ +- ▁bayonet +- лған +- ▁publish +- ▁clerks +- ▁сер +- ▁següent +- ▁생각이 +- zimmer +- ▁pobl +- ▁fetched +- ▁Пер +- ▁begrijp +- dust +- ▁Kri +- ▁kumwe +- ugen +- grado +- ▁adel +- ▁іншыя +- سىز +- ▁atom +- tud +- ▁llega +- ▁diversion +- ڑ +- ▁нашай +- τε +- ▁besten +- ▁Пад +- ▁referring +- ▁gaunt +- ▁veranda +- ▁obscurity +- quant +- schlagen +- mik +- ▁benevolent +- ▁starts +- रे +- ▁interroga +- лән +- ▁fewer +- ▁навук +- ілі +- ście +- ▁burl +- ▁finely +- ▁fragrance +- ებ +- ▁parecía +- holes +- chine +- อน +- lò +- ▁lachte +- 目 +- ▁빨리 +- ▁barricade +- coj +- rlo +- ▁consta +- eien +- ▁ordained +- ▁osborne +- kud +- salt +- andika +- ▁whiskers +- ▁뭐야 +- ▁fariĝis +- ক্ষ +- далі +- ▁starved +- cross +- ▁scamp +- ▁aristocracy +- iş +- ▁newman +- ▁ھە +- ▁moat +- ట +- plica +- ▁intelligible +- ▁butterfly +- tı +- cake +- dział +- pate +- wenden +- cioj +- ▁demande +- ▁emir +- ▁impulses +- ▁treatise +- namo +- ▁boyish +- geri +- wheel +- ▁outstretched +- ▁stony +- Ц +- ▁numbered +- ▁voorbij +- ▁mbalimbali +- époque +- ▁Christ +- дае +- ▁treu +- scheid +- vla +- ▁thad +- ▁surtout +- ছিলেন +- Institut +- ▁periodo +- ▁thickness +- ▁мал +- ▁Ndi +- ект +- ▁Sami +- وش +- 保 +- kwiriye +- ▁reconciled +- cento +- ▁бү +- ▁dialogue +- ▁priscilla +- ▁regulations +- nji +- burger +- ▁rendering +- ▁hopelessly +- ▁doings +- psy +- 격 +- ▁standards +- ич +- ▁그러니까 +- saw +- ▁Sun +- ▁көн +- tando +- ▁그러고 +- ▁decía +- entendre +- ▁folgte +- 취 +- ▁appalling +- ▁destitute +- ▁diligence +- ▁disastrous +- ▁mathematics +- ▁columbus +- koreshwa +- рож +- affaire +- ▁roe +- ▁trouva +- ▁hanno +- ▁retirement +- ▁mend +- ▁shark +- ▁хат +- vara +- coop +- ▁hostility +- ▁oan +- ▁амаль +- ▁reptile +- ▁wipe +- ▁aloof +- amor +- ина +- duce +- operative +- ▁tissue +- ▁shines +- دید +- ▁hubert +- ▁hablar +- ▁wheeled +- 间 +- ▁amakuru +- ستان +- lac +- ffin +- üz +- พูด +- saka +- ▁alight +- ▁Durante +- ▁explanations +- zogen +- pada +- imari +- rrington +- ▁іншых +- ▁Nk +- три +- objet +- авалі +- ▁shocking +- rink +- ▁modes +- ▁Шу +- ▁sector +- ލ +- ▁radiance +- ▁totes +- ▁livres +- doni +- ленне +- 겨 +- ळ +- ▁algunes +- stelling +- ▁inquisition +- ▁monseigneur +- ▁tort +- ▁یہ +- ▁rocking +- ▁hilda +- ▁pairs +- ▁displeased +- ▁convincing +- ▁않을까 +- ▁billow +- саб +- ▁ау +- ▁improvements +- یکی +- چی +- дэн +- から +- ▁Així +- ▁spoedig +- balanced +- 流 +- rava +- ▁taller +- inus +- கிற +- huis +- ▁Trans +- ▁rocket +- әм +- ▁torna +- ▁wenigstens +- ▁injuries +- ». +- ▁scalp +- amp +- bord +- ▁socialism +- ▁vermo +- 重 +- ▁Mala +- arth +- ▁delegate +- ▁brazo +- ▁enlarged +- 쳐 +- leri +- ▁kuyi +- sanze +- नी +- ին +- ▁projects +- ▁piles +- ▁jude +- bish +- ▁speedy +- bres +- ▁cemetery +- ▁modification +- ▁pouvoir +- ieux +- nab +- euses +- ▁quicker +- ▁haze +- ნა +- ▁Familie +- ▁ruling +- ▁firmness +- ▁spill +- noma +- ▁Tura +- ▁reasonably +- намі +- atte +- landes +- ▁écrit +- нё +- ▁voce +- 一个 +- ▁uneasily +- ▁clapped +- wszy +- ▁achievement +- ▁Kir +- ▁sonne +- ▁packing +- ▁perpetually +- ▁compar +- льнай +- ▁destructive +- ацыя +- уч +- rating +- imper +- ▁mortals +- ▁irritation +- ▁Можа +- dler +- ▁może +- ▁straightway +- ▁handwriting +- gestion +- ىدى +- 她 +- ▁Natur +- ▁palabra +- umubiri +- ▁humid +- ▁cuff +- acca +- arden +- ▁Namen +- ▁chuck +- ▁quench +- dita +- ▁shifting +- ▁flavor +- ▁Roger +- ▁expenditure +- ▁peninsula +- pari +- ▁هستم +- oroshye +- ▁tension +- stimmung +- း +- ▁besoin +- eleg +- ▁quebec +- dication +- ▁są +- ▁Ча +- ▁jurisdiction +- vizi +- ▁dismounted +- ▁devoured +- ▁baja +- культур +- ▁shaded +- مَا +- cote +- huza +- ▁anglo +- ▁nahe +- ▁sings +- ▁wrinkled +- ▁insulted +- ▁өсөн +- ziehung +- 담 +- ▁crushing +- weisen +- apfuye +- ▁burton +- ▁miniature +- 옷 +- ▁jersey +- نج +- imos +- ▁passé +- ▁hereditary +- ▁allgemein +- ▁һәм +- ▁திரு +- fine +- tangaje +- ▁concili +- ▁hätten +- ▁boulevard +- жана +- ▁liaj +- ▁carter +- ▁brent +- ▁longtemps +- ▁penetrating +- ▁egli +- artig +- ▁Terra +- ▁закон +- шат +- ▁meva +- ▁taunt +- penny +- ▁나와 +- zibu +- ▁soutenir +- aggio +- ṛuḥ +- oubli +- دۇ +- ▁muerto +- ое +- ▁suggests +- ▁Tod +- ▁plaisir +- 얘 +- ▁Roedd +- ▁aucun +- ▁interpreter +- ▁Premi +- ▁hinzu +- ▁netta +- ▁Bericht +- ▁retiring +- attend +- ▁똑같 +- ▁trout +- ▁retard +- च्या +- ▁skilled +- ▁Kam +- alia +- ▁ніхто +- present +- ্র +- ▁propre +- 이지 +- ▁inhabit +- ▁konnten +- اح +- ▁fry +- үз +- ▁extravagance +- ▁boulder +- ނަ +- 城 +- ế +- 손 +- ▁cristian +- ▁omitted +- ▁Krieg +- دې +- ▁parlament +- ▁verlassen +- ▁Belg +- ያ +- ▁연락 +- ▁jokes +- ▁deprive +- ▁vows +- ▁debe +- ▁variations +- ▁dances +- ▁நட +- üste +- انه +- tuye +- ▁triumphantly +- école +- lga +- ▁kwanza +- ▁magnificence +- ▁sydney +- avuye +- ulous +- ▁lleg +- ▁пыта +- ▁impartial +- ▁Bob +- misch +- étais +- ▁grapes +- bte +- რი +- kamu +- ▁fluttered +- ▁чынам +- ▁liep +- ▁religi +- İ +- ▁baltimore +- ▁mañana +- ▁grievance +- itari +- ▁grains +- posit +- ▁slate +- ▁hoor +- ▁caf +- ▁emblem +- 십 +- borne +- ▁growl +- ▁concur +- dist +- ▁stooping +- ▁gripped +- ▁creative +- ufer +- män +- ▁acabar +- ▁largo +- ▁dominions +- 육 +- ▁Dal +- ▁judith +- ▁skipper +- ▁шул +- ▁facility +- ▁projet +- ▁sexes +- ▁Ell +- serve +- ▁exertions +- ▁lupin +- ▁efficiency +- ▁hypothesis +- ▁кӱ +- ▁Regi +- ▁silken +- flug +- ▁squat +- মে +- tegeko +- ▁communications +- ▁reformation +- ▁vapour +- ▁grunt +- ▁captains +- ▁maji +- յ +- ▁könne +- ▁captivity +- ▁tuj +- plate +- verkehr +- lito +- ▁Smith +- ▁trojan +- ▁kwiga +- sibil +- ▁کسی +- ▁waki +- autant +- ▁volver +- ▁devised +- ▁või +- ▁longest +- ▁stuffed +- '0' +- ▁sew +- ▁ravag +- ▁accurately +- ▁khan +- iera +- ▁scrambled +- ▁crowns +- өт +- ▁tbe +- 일이 +- ▁busca +- lait +- ▁directing +- chard +- ▁handy +- ▁sym +- Ö +- слуха +- ▁mathematical +- ▁நான் +- ▁cancel +- ▁bleak +- njo +- ▁quello +- ▁situé +- cencia +- ▁arabs +- ▁halls +- ▁में +- ▁odious +- ▁solemnity +- шэ +- ▁traits +- ▁پار +- ▁psychic +- ▁싶은 +- الت +- ▁videbla +- guzi +- ŝi +- illy +- ▁coals +- oul +- ▁Après +- ▁mesa +- ▁Ды +- ▁beset +- ▁stoop +- bide +- کا +- чыла +- illi +- ▁алып +- ▁receiver +- kijken +- nker +- ▁기억 +- éré +- ▁fuller +- ▁verso +- ▁kerk +- cloud +- ▁instinctive +- ▁가야 +- 망 +- ▁outburst +- ▁überhaupt +- ▁Rat +- ▁dusky +- คร +- father +- разуме +- ▁Clàudia +- 참 +- ▁частка +- nale +- ▁collector +- ▁трох +- ஞ்ச +- 機 +- ▁boyhood +- ▁cherish +- ▁flattering +- тычна +- uyu +- ▁graces +- ▁Ня +- ▁sixpence +- ▁flute +- oper +- ▁prakti +- stream +- ▁leapt +- ▁politische +- ▁cultivate +- ▁keiner +- ▁millionaire +- ▁spaces +- ப்பட்ட +- ົ +- ▁stunned +- ▁збіра +- ▁peuvent +- ▁resultat +- handlung +- ▁algunas +- naw +- indo +- ▁flutter +- imy +- party +- ▁zweite +- ▁actua +- ▁inzira +- rän +- ▁mauvais +- ▁precision +- muha +- gevolg +- itzar +- ▁scant +- ព +- ▁Dolça +- ▁valentine +- ▁tarzan +- édé +- ▁antwort +- ▁pater +- laire +- ਵ +- ▁resto +- naire +- pse +- jyanye +- ▁Gold +- rui +- obscur +- ▁spat +- ▁другі +- damente +- ▁cruz +- ▁ramp +- ▁ئەوە +- ▁comercial +- ▁confidently +- kaka +- ▁silvery +- 기가 +- ▁hitch +- operation +- ▁impetuous +- ▁arranging +- ▁juge +- ▁advances +- cza +- ▁cheering +- ĵ +- ▁strokes +- лем +- ▁manche +- atta +- meḍ +- 적인 +- ▁snakes +- оу +- શ +- ▁certificate +- ト +- ▁Phil +- ▁niño +- ▁Llan +- cera +- ▁tuvo +- lico +- ▁venait +- ▁enclosure +- ▁pluraj +- მო +- ▁anon +- شون +- рі +- ▁schönen +- ▁offen +- jah +- ▁hacía +- ▁fortified +- fahrt +- ▁garret +- uan +- ▁mercury +- ▁hospitals +- ▁integrity +- نظر +- ▁campbell +- amahoro +- 이가 +- ના +- rouw +- deg +- ▁এক +- ▁Усе +- ное +- ▁pigeon +- ▁columbia +- ▁pamphlet +- 想 +- ▁defensive +- ▁jog +- ▁ідзе +- ▁nader +- ▁patrick +- aĝo +- ▁baş +- elek +- ▁fainted +- ▁procura +- Abanyarwanda +- ▁campan +- ▁prins +- 붙 +- ▁posterity +- ▁پیش +- castle +- buri +- gesetz +- ▁lwe +- ▁utiliz +- ▁okaz +- อร์ +- ▁hangs +- ▁acceptable +- яд +- añ +- ▁pena +- ிட +- umupira +- ▁compartment +- ▁illuminated +- ▁courageous +- ▁investigate +- ووە +- ▁julien +- hinda +- ▁maintenance +- ▁sinful +- lée +- وس +- klä +- uca +- ▁leak +- ▁taifa +- lado +- riba +- ▁ordinarily +- 셨 +- ▁спр +- ▁cierto +- ▁India +- clar +- пле +- come +- ▁baffled +- ▁Have +- ▁bom +- غى +- cente +- ▁contemplate +- ийн +- cem +- ▁hardships +- ▁repel +- 当 +- nigh +- yumva +- تے +- রি +- gehalten +- ေ +- нет +- ▁nigger +- һең +- ▁toys +- ▁confinement +- ▁relatively +- یش +- ▁fari +- ੱ +- 尼 +- ▁chagrin +- ▁falcon +- ilian +- шко +- ▁geese +- ▁wherewith +- leḍ +- ▁capo +- ▁bako +- ▁feder +- ▁garland +- nisse +- ▁adrian +- orden +- ▁wis +- ▁societies +- kę +- abira +- ▁sehemu +- ▁akora +- ▁haz +- ▁regió +- ▁kapit +- ▁luce +- yenna +- ▁aspects +- ▁ултыр +- ▁nina +- ια +- ▁billet +- ▁Sir +- ▁indication +- ▁hurricane +- ▁oxygen +- ▁shelves +- cino +- ▁pian +- ▁akked +- ▁franca +- ▁dipped +- тө +- ▁negre +- ▁locke +- 적이 +- ▁muscular +- fahr +- რა +- ▁clover +- posto +- niu +- ій +- ват +- ▁guinea +- ▁comprehension +- ใช้ +- ▁haggard +- ▁gambling +- ▁patty +- ▁Franc +- ▁marshall +- lir +- kuvu +- ▁bravery +- ▁tests +- ▁requi +- ▁souvenir +- liśmy +- ▁thump +- ▁franco +- drift +- ▁advent +- ▁Tele +- ગ +- ▁grâce +- ▁katharine +- ▁imperative +- ▁southwest +- ▁gladstone +- vě +- zamu +- ▁бик +- അ +- ▁restaur +- ▁coolness +- कि +- wife +- ▁intuition +- ▁музыка +- тел +- ▁irony +- zentr +- ▁rattled +- றை +- ▁mesure +- ▁winged +- ▁verko +- ▁rake +- ▁bruder +- ▁District +- 죽 +- ▁familiarity +- رَ +- ندا +- çon +- ▁muzzle +- ▁enduring +- ▁transmit +- izen +- ▁hohen +- ▁joyce +- hed +- ▁désir +- ▁Musik +- xer +- bido +- 打 +- ▁beetle +- größte +- ▁해도 +- ungu +- ▁ifite +- ية +- 했다 +- tala +- ▁ruffian +- ▁walker +- ▁visual +- ▁naughty +- ▁nubwo +- ▁peas +- ▁bonheur +- ▁theatrical +- course +- ▁exploration +- ▁freunde +- ▁gilded +- fren +- ▁کند +- pipe +- бла +- ibihe +- ▁chaise +- ▁distingu +- ▁monuments +- ▁wolle +- guma +- ▁jedem +- fällt +- ▁komple +- ▁doktor +- ▁crawling +- ▁wade +- halte +- დი +- ktu +- ▁Anti +- ndung +- musik +- ▁militia +- mije +- abona +- hort +- 벌 +- ŝo +- ▁mitten +- ▁greedy +- юся +- ibya +- '50' +- tett +- ▁aquellos +- ▁arctic +- mast +- ▁investigations +- ▁overthrow +- 制 +- ും +- fitting +- দা +- дал +- ḍen +- ▁Feder +- ัด +- ▁spurs +- migrant +- റ +- ▁rectangular +- ▁deacon +- ▁patricia +- ▁Ala +- ète +- วัน +- ▁hilo +- بان +- època +- ▁Mari +- ▁assailant +- ▁supplement +- ায় +- ▁crave +- lough +- ▁soda +- ▁cadet +- 타 +- kai +- ▁surmise +- ▁minut +- ▁convicted +- huye +- ▁identify +- ▁scouts +- ▁welk +- tuuka +- ▁carlos +- ގަ +- ody +- ▁vacation +- ▁грошы +- 줘 +- ▁cay +- сно +- ▁cushions +- ▁conoce +- ▁pictured +- ▁perpendicular +- ▁uburenganzira +- ▁Umugore +- enzi +- anglais +- ▁Black +- ▁florida +- ▁foreigner +- unta +- ▁undisturbed +- пала +- hic +- ▁simplest +- ruhu +- ▁склада +- mö +- icio +- ▁sede +- ▁anderson +- лена +- ▁روی +- ▁myriad +- ▁pensar +- กระ +- ▁بال +- ▁accidentally +- thus +- ▁langs +- ▁scaffold +- körper +- ▁frenzy +- spiegel +- ▁prou +- ▁хоча +- ச்சி +- ▁mutton +- ▁oracle +- zaam +- ▁jeszcze +- ▁پس +- ▁monarchy +- ▁сай +- brou +- ▁Tiuj +- ▁poca +- ▁چرا +- rgan +- étaient +- oog +- fest +- ▁deriva +- ▁homer +- tives +- ▁Ihr +- 웃 +- Pierre +- ▁exhaustion +- gde +- zijde +- ▁зас +- führung +- ▁dishonour +- ች +- ▁rhythm +- ली +- ▁musi +- Univers +- ▁cens +- рай +- سر +- ▁Kun +- lata +- ▁admirably +- É +- punt +- ▁trink +- ▁solicitor +- ▁pessoa +- ▁ranged +- ذا +- ਪ +- ލަ +- ▁shares +- dh +- ▁narra +- 바 +- قل +- liɣ +- ▁atoms +- кова +- ugo +- ▁libera +- ▁vrees +- ▁attributes +- ▁먹는 +- нім +- umuhanda +- ▁sermons +- ▁blazed +- ▁posture +- ▁corinth +- ▁cyn +- ▁polic +- ▁frigate +- ▁jason +- ▁drap +- ▁dose +- ▁oval +- erden +- сё +- дзіць +- ▁Kol +- ▁elastic +- ishimiye +- ▁imperious +- ▁асоб +- ▁shepherds +- ▁stella +- lusive +- ▁bisher +- ▁genre +- ▁seaman +- reiche +- ▁шӱ +- ▁findet +- ▁sterk +- ▁grandaj +- ▁precautions +- 的一 +- мын +- ктар +- cab +- ▁proven +- ▁physically +- 독 +- つ +- orge +- ▁Mol +- puesta +- roep +- ▁omit +- ▁keel +- ▁stets +- ▁wrecked +- ▁zulke +- оз +- puta +- ▁Ton +- ▁bern +- influen +- рэн +- ▁defects +- ▁würden +- ▁complet +- ▁waggon +- ▁vient +- servi +- ▁smoothly +- friend +- ော +- คือ +- cali +- که +- ▁swiss +- yah +- ӡа +- amiento +- ▁partisan +- ▁einde +- ▁Bol +- ▁saison +- ▁estimated +- イ +- ▁entreated +- ▁piazza +- ендә +- ▁confound +- ▁Dun +- asanzwe +- ▁sane +- ▁neun +- ▁gems +- ▁knit +- ▁ushobora +- ▁workman +- пло +- ▁barons +- ▁geval +- ▁blight +- іла +- ▁hysterical +- เส +- ▁elbows +- ▁bulungi +- ски +- ▁foc +- bá +- ถึง +- ▁trio +- ▁App +- ены +- گیر +- ▁photographs +- ▁overtake +- ▁Bill +- arrêt +- ີ +- ▁karakter +- hera +- ▁کم +- würdig +- ggio +- umutekano +- masa +- ▁کہ +- ▁jede +- ragon +- ▁Außerdem +- ▁ferocious +- ▁moscow +- ▁leib +- க்கி +- яй +- ▁hasi +- қа +- ▁teatro +- ҵа +- ▁titan +- ตร +- sucht +- ız +- ▁currents +- link +- ▁maurice +- ▁männer +- ▁sensual +- ▁obres +- carp +- ▁کنی +- ▁frowning +- ▁resent +- ▁volley +- ▁adversary +- بَ +- ▁senseless +- fak +- ▁whig +- lyk +- ▁mukuru +- اره +- bumenyi +- กล +- ▁pesa +- ▁만약에 +- ▁homely +- ował +- ▁reprodu +- ▁alc +- ▁blend +- hre +- ▁amigos +- ▁inexplicable +- wür +- 位于 +- ▁arouse +- ▁travaille +- ▁раслін +- ▁chick +- ▁cavall +- ▁chieftain +- strich +- ▁consume +- ▁Tiam +- ▁besteht +- ▁Tella +- ndel +- ▁defile +- нкі +- ▁Christian +- ▁relics +- બ +- ಹ +- ▁shaggy +- ▁Minister +- ▁verst +- ности +- note +- ▁baada +- ǧǧa +- ▁shrug +- ▁giles +- ▁scots +- ▁chacun +- iken +- ▁Joseph +- estro +- bazi +- শি +- ▁worker +- nham +- ▁Чы +- ▁heftig +- द् +- ▁cust +- ▁fod +- ▁اما +- ▁nombreux +- ▁apenas +- ▁concentration +- وز +- ▁overflow +- ▁varias +- ▁buon +- ▁regained +- ▁contribution +- ▁cicero +- ▁Tots +- ggy +- rudi +- ɣer +- ுடைய +- оуп +- ▁vivis +- ▁fent +- бой +- гаспадар +- ▁ribbons +- ngereza +- ▁eya +- pec +- ▁epic +- veld +- ▁stellte +- ▁edifice +- ▁fuß +- waarde +- ▁inspect +- ▁Ки +- ▁regardless +- ▁wisest +- american +- gufasha +- ▁convertir +- ▁아무 +- ▁enforced +- indig +- ▁Més +- ▁spencer +- ▁arbo +- ▁deutschen +- ▁benches +- ▁هەر +- ▁pièce +- ▁starvation +- 色 +- ▁feebly +- ▁아빠 +- ▁accessible +- ▁desiring +- improvis +- звычай +- ▁avis +- ▁böse +- ▁100 +- बा +- ▁nikola +- ▁accommodate +- ▁vineyard +- خور +- ▁bruit +- мян +- ▁머리 +- ▁shutting +- ▁ghostly +- ▁masculine +- 테 +- ▁savoir +- 角 +- енно +- ▁사진 +- Umu +- lamp +- knife +- ▁maintaining +- ▁gild +- ▁Grup +- ▁oaths +- enze +- ọ́ +- ▁inference +- stok +- ggia +- ▁moo +- ▁recoil +- ▁denote +- ▁shady +- ▁bracelet +- valent +- ▁sympathies +- воль +- ▁congratulate +- ▁choking +- ▁willie +- ▁silas +- ▁inherit +- ▁په +- ▁indies +- kret +- 머 +- ▁passatge +- ▁accumulated +- ▁hints +- ▁occupations +- ▁geef +- алар +- ▁absorb +- ▁langer +- kab +- ▁uruhare +- よ +- ▁ijambo +- ▁knie +- ▁gothic +- grim +- ▁già +- ▁shutters +- ▁woven +- bouw +- ▁gezelschap +- ▁outbreak +- کش +- ▁gonna +- ▁brillant +- prom +- isé +- obi +- ▁milan +- ▁norah +- ▁trouw +- ▁gusto +- ▁jefferson +- ▁niwe +- ▁één +- дзіў +- ▁czar +- ▁betwixt +- ▁difícil +- руш +- ▁frauen +- ▁Tess +- ▁amis +- ▁melting +- ▁haunt +- deur +- ▁protecting +- ▁fugitives +- ▁abzu +- วิ +- ▁gewoon +- yeho +- ▁Pin +- 최 +- 투 +- 확 +- ▁sovint +- umwaka +- gic +- ति +- ▁кәрәк +- 啲 +- ▁milion +- ▁hauled +- খা +- ▁safer +- ▁morsel +- ▁commis +- izindi +- parti +- ▁adored +- ண் +- ▁traf +- して +- ລ +- ɓ +- gesproken +- 八 +- schluss +- ▁reproduce +- auber +- ▁tumble +- espai +- ▁cognom +- ▁launcelot +- téri +- ▁implies +- ▁boundless +- ▁inwardly +- ▁freilich +- وری +- ▁exhort +- tijd +- ▁donker +- eilen +- brush +- ▁impossibility +- ▁plantes +- ordon +- ▁parliamentary +- рых +- быҙ +- ▁switzerland +- ▁gdy +- ▁healed +- ▁lyric +- ▁Aquestes +- гуля +- ▁ivy +- টা +- 球 +- cée +- ▁commandant +- ▁hardest +- grin +- lc +- épa +- ▁어차피 +- ▁menor +- ▁elephants +- ▁traders +- lj +- ▁сваім +- ▁Sara +- ▁conta +- ▁gracia +- һен +- ▁Fahr +- streng +- rati +- ▁fortuna +- ▁excused +- ▁uchun +- precia +- ▁warwick +- appa +- ▁captives +- ▁rivero +- ама +- plane +- 브 +- rückt +- ▁aucune +- ▁пачаў +- ▁Mwa +- ▁politicians +- ▁salmon +- ▁Indian +- ▁espanyol +- வேண்டும் +- ▁peeped +- ▁consequent +- ▁vowed +- нта +- sehn +- hte +- сам +- ▁footman +- ▁avert +- বি +- ▁vroeger +- 해야 +- ვი +- ingabo +- ▁malgranda +- ▁amendment +- ▁cabo +- ▁nests +- twali +- forma +- ▁capitan +- zko +- ▁baskets +- yê +- ▁Ist +- ▁bricks +- ▁lowering +- ▁okupiĝis +- ▁motto +- lava +- ▁Pep +- ▁figured +- legged +- ▁carrière +- іі +- ▁benevolence +- ▁okul +- ғыҙ +- ▁vegada +- agiye +- länder +- ▁prussian +- лон +- ိ +- ▁grudge +- ▁blas +- ▁temporada +- ▁provoca +- ▁relax +- ▁concealment +- ▁rugo +- ▁restis +- ▁добры +- 쉬 +- ▁nadat +- ▁daarop +- ologio +- ▁shew +- ▁negotiations +- onic +- ▁finale +- ạ +- бар +- 줄 +- ың +- ▁Bank +- día +- rote +- ▁courtiers +- ▁Хто +- ▁kvin +- ▁Дзе +- ▁profane +- ▁grandma +- ▁nerv +- ▁ntiya +- ▁stubb +- ▁fists +- ▁также +- gesehen +- jara +- yari +- ekka +- mbura +- arlo +- наў +- ები +- ▁vigil +- ashakaga +- take +- ▁kwamen +- ▁نییە +- sobanura +- ▁threads +- хь +- ▁cree +- بح +- ▁traverse +- ▁reluctance +- 曾 +- ▁tool +- ▁volunteers +- yum +- lgan +- ▁وجود +- ▁existe +- ▁lifeless +- ▁bandit +- ▁ponto +- ▁Bild +- ому +- ▁kugera +- ▁alighted +- เพื่อ +- ▁spruce +- ▁Tru +- ছ +- ▁gör +- ▁furtive +- El +- ▁quaker +- Ө +- থ +- ės +- ▁provoked +- ▁spielte +- ▁allegiance +- ▁achilles +- ▁endeavouring +- bush +- َيْ +- ık +- ▁الأ +- தற்கு +- ▁reddy +- ▁straining +- ▁conclu +- ▁torches +- ▁incessantly +- 럽 +- ▁acquisition +- ▁straightened +- Orient +- ніца +- کھ +- 个 +- rce +- 별 +- ▁jonathan +- ▁thigh +- ▁clair +- ორ +- ▁interpreted +- ▁такой +- ▁Boston +- анне +- дары +- zeichen +- নের +- ▁Yi +- ▁miaka +- ▁aŭto +- 태 +- ▁muryango +- ▁taper +- ▁그렇지 +- ında +- ▁resolutely +- ▁euro +- ▁miners +- ř +- ▁caprice +- closed +- agh +- ▁flores +- ▁kibi +- other +- 합 +- gründe +- เข้า +- worm +- ▁bondage +- ▁throws +- ffel +- ▁vuur +- ▁gewalt +- ▁vernon +- ▁exceptions +- ▁பொ +- مار +- vali +- ▁victories +- ▁dunkel +- ▁brightened +- ▁fay +- bewegung +- zaal +- tabl +- թ +- ада +- ▁educational +- brechen +- ▁gerard +- ▁preface +- ▁implored +- ষ +- ▁wardrobe +- eusement +- ▁relates +- sbury +- ▁devour +- 니깐 +- ▁tutta +- ▁charitable +- 六 +- state +- ▁Kuki +- ▁timidly +- ▁Sor +- ▁булма +- ▁drums +- hame +- ▁registra +- ▁eky +- سک +- ▁enquired +- ▁computer +- ▁penitent +- ▁knave +- ▁ordering +- ▁гурт +- ▁cerv +- ías +- ▁아냐 +- flex +- ▁recording +- ▁rattling +- 两 +- валь +- ▁estudiar +- ғ +- senya +- ▁variable +- ▁importun +- ▁batalla +- ▁baku +- ▁président +- हरू +- ▁approaches +- ▁altitude +- ▁lilies +- ▁Miquel +- jol +- ▁snarl +- ▁cheers +- père +- ▁tickets +- ▁luckily +- ▁horas +- gaban +- কার +- station +- цей +- ▁nomon +- ▁pilgrims +- ▁energi +- enjawulo +- ▁néixer +- vam +- ▁prone +- мон +- ▁пытанне +- ▁Av +- ▁proces +- ▁fis +- ▁valjean +- ▁aims +- prac +- ▁scienc +- ▁allait +- ща +- ▁musicians +- spor +- ▁forbade +- ▁dictate +- mberg +- ▁tranquillity +- vado +- ▁Тады +- ▁많은 +- ғә +- ▁disp +- ▁molest +- দের +- bewe +- ▁prevailing +- ്ര +- ▁wail +- ▁kurusha +- ם +- рок +- ▁sidenote +- mama +- ▁cope +- ufe +- ▁pique +- หน +- roepen +- ▁Werk +- ▁стара +- ▁brandon +- ▁Pod +- ▁reicht +- ▁Таксама +- زه +- ▁corrected +- ▁Pap +- ▁flap +- 본 +- ▁House +- ▁будзем +- ▁knob +- visa +- ▁Cent +- ▁unten +- ▁invaded +- ▁grill +- ▁America +- bak +- лаў +- 시간 +- ច +- ▁sheridan +- ▁거잖아 +- ▁opposing +- amye +- ▁pense +- ▁queried +- 대로 +- fly +- ▁kuji +- ▁sweden +- ▁noiselessly +- ▁cumi +- ▁apartenas +- ▁foreman +- ▁weakened +- uré +- ▁tenor +- ▁huddled +- 解 +- 影 +- ▁시험 +- ▁Со +- ▁aversion +- ынан +- வும் +- ▁persia +- kole +- ▁ruby +- ▁kicking +- ▁ether +- ▁emphatically +- ▁адразу +- ▁subsided +- ▁deception +- ▁inaugur +- ě +- ▁manual +- ▁mostra +- дь +- geme +- ▁abolition +- ▁shrunk +- ▁vuestra +- ▁depot +- ▁bintu +- ▁arran +- wash +- ▁vermi +- 買 +- 少 +- ▁cheerfulness +- ён +- ▁indicating +- ▁increases +- сон +- ▁deixar +- ▁Central +- ▁simul +- кол +- نەوە +- ▁adjusted +- lien +- ▁plucked +- ▁kannst +- али +- އު +- ▁leicester +- ▁despe +- ▁länger +- blue +- त् +- ▁chaff +- ▁hushed +- ▁Толькі +- ▁inherent +- rza +- ▁theodore +- ▁horseman +- ▁chivalry +- ▁tristram +- posing +- sworth +- ▁norway +- них +- atura +- মান +- ▁moods +- ன்று +- ▁embarked +- ▁milton +- ▁rowed +- ▁vinegar +- genera +- ▁mots +- rari +- ▁bruised +- ▁consecrated +- лак +- ▁hungara +- кет +- ▁colossal +- bogen +- ▁gustav +- лож +- ▁scratched +- base +- ▁creo +- ▁palmer +- ▁daarom +- ließ +- geschäft +- ▁trod +- ▁privat +- ▁God +- ▁Pia +- ▁jahren +- acle +- mbwa +- ▁vesti +- ▁gifted +- تې +- рк +- ▁дзве +- 귀 +- ▁junction +- ▁minha +- ▁adjust +- ▁certa +- taire +- ▁voelde +- cero +- ▁accomplice +- 太 +- gou +- ▁pledged +- ▁Nous +- ▁legends +- ▁кел +- ▁jehovah +- ▁español +- ▁großer +- ▁milly +- ▁sighs +- ▁Om +- bata +- ▁customer +- cinq +- ▁plein +- ▁engaging +- ▁joel +- stell +- brown +- ▁wrung +- чер +- ▁ruhe +- siri +- wehr +- ▁alexandria +- ขึ้น +- ▁дзіця +- ▁supo +- schütz +- ىرى +- ▁donald +- ▁casos +- ▁aching +- ▁listless +- سل +- ▁Sto +- ▁patter +- ▁exclaim +- ▁compta +- retse +- мысл +- ▁wilful +- лган +- ▁hank +- ▁shack +- ▁interfered +- höhe +- də +- ▁promet +- ਾਂ +- ▁parmi +- isiert +- ▁judah +- ▁quivered +- ▁barbarians +- ▁landoj +- 릴 +- ▁città +- ▁раней +- تي +- ▁stesso +- ▁eneo +- zeg +- ▁eble +- ▁تح +- moral +- 느 +- ▁hemel +- abagabo +- ▁claire +- ▁boer +- ▁sidewalk +- йце +- étend +- тив +- 战 +- ▁qualification +- ▁cometh +- hamya +- ▁pallid +- haba +- gati +- 입 +- ಬ +- ▁llavors +- rende +- ▁avoiding +- 찍 +- ikira +- ▁chattering +- seɣ +- ▁wage +- 페 +- ▁appeals +- ▁thanksgiving +- ▁quanto +- ▁Street +- ▁angelic +- rale +- kî +- ▁diplomatic +- ▁führt +- ▁allerlei +- ▁dividi +- ▁Ро +- كا +- کەی +- ▁gukoresha +- ▁cria +- ▁temporarily +- ▁betsy +- 이라고 +- ▁battered +- દ +- ▁expanse +- ▁abilities +- ▁junta +- ▁Llo +- ▁esplor +- ▁гэтая +- exist +- ▁convulsion +- ▁cerr +- train +- прост +- ▁serà +- ▁insurrection +- ▁pans +- ▁può +- ▁Nil +- யின் +- popul +- ▁productive +- ▁projected +- ▁mop +- ី +- ▁kad +- Mar +- escri +- philosoph +- ▁picket +- ▁pře +- ▁alms +- фон +- ▁factors +- black +- ▁incline +- ▁erwiderte +- 看 +- strung +- ▁freshness +- ▁katy +- qqa +- ▁defending +- ▁produit +- ▁films +- ▁malcolm +- ▁biting +- ▁cosette +- ▁kureba +- ▁foresee +- ҷ +- 河 +- ▁들었 +- ▁exerted +- ▁komencis +- گاه +- oce +- ▁ĝia +- mbuka +- ▁pickle +- ▁самы +- ▁isabella +- ▁denomina +- ▁baptized +- ▁château +- 光 +- 어서 +- ргә +- ▁delivery +- kung +- ▁lustre +- ième +- ▁assailed +- ▁hain +- ▁realization +- ▁rechts +- وب +- ▁Minisitiri +- ▁imyenda +- 交 +- ҙың +- ▁vehicles +- mpu +- ▁hospitable +- нуўся +- އެއް +- もの +- glie +- ▁сіл +- 적 +- ▁resented +- ady +- ▁Chan +- ▁bard +- ▁bientôt +- ▁myaka +- ▁Anwa +- aagd +- ▁алма +- ▁nigra +- virus +- ▁tenants +- ▁தொ +- iš +- ▁ferment +- ▁kite +- ޫ +- anc +- ▁сваёй +- ǧǧ +- ologi +- ▁Emp +- ▁cheery +- ▁insolence +- ▁Ula +- ▁carlo +- ▁tudo +- oire +- blé +- ziert +- Au +- ▁conserve +- ▁gep +- ▁thwart +- ▁towering +- ▁dawned +- ▁precipita +- rema +- ▁papel +- ▁disclosed +- ▁acknowledgment +- ▁unserer +- ▁testified +- ▁Cab +- ▁Ly +- '40' +- ▁wellington +- ▁dishonest +- жыл +- ▁mirongo +- ▁curb +- ▁같아 +- rón +- Esp +- ▁flashes +- äm +- gegaan +- 話 +- ▁höchst +- ▁regula +- ruz +- kö +- durch +- ▁improbable +- ▁variation +- ▁contributions +- ▁behar +- ▁альбом +- ▁பிர +- ▁lascia +- ▁magazines +- ▁nickleby +- ▁dublin +- arren +- ▁facilities +- intel +- ▁kare +- ▁winchester +- ▁corral +- ▁fût +- ▁гадоў +- ▁Се +- ▁hearken +- ▁Yett +- ▁utilisé +- ▁보면은 +- ▁colt +- pois +- ▁joyfully +- ▁nich +- ▁International +- ▁perspective +- ▁skirmish +- 웠 +- ▁verda +- лення +- nimmt +- ▁naast +- ▁voet +- ▁வந்த +- ▁barrels +- ▁teddy +- ▁perdre +- ▁barking +- liste +- ▁vriendelijk +- ▁immoral +- рәк +- য়া +- шая +- ombra +- عا +- Ca +- ▁torpedo +- ▁whispers +- ▁playful +- ignit +- bavu +- ▁termina +- ▁blonde +- ▁folding +- ▁resulting +- ޯ +- ▁cox +- ▁ursula +- ▁institute +- ▁renov +- eater +- ▁convictions +- ▁fondly +- маны +- kano +- ▁senor +- zado +- ▁आहे +- ▁axxam +- ▁President +- ▁julie +- ṭa +- ▁securely +- ▁burr +- ▁são +- ▁conjure +- ▁scrape +- ነ +- ▁жанчын +- ▁commotion +- ▁stating +- ▁carn +- ▁Пера +- 살 +- ump +- gata +- ▁tennessee +- ▁hauria +- ▁realitza +- ▁adviser +- ▁heading +- ▁generated +- witz +- 났 +- ▁sait +- ▁accursed +- ▁remonstrance +- kungu +- tailed +- ▁metals +- ҙан +- ▁discord +- лага +- ▁riders +- ▁وال +- ▁intercept +- ▁wield +- ▁irene +- ▁cromwell +- ▁iniquity +- ▁afresh +- भा +- ▁vividly +- ▁befallen +- ▁gushyira +- ▁donar +- праб +- нікаў +- ▁consulting +- ▁redeem +- ▁arches +- ▁apologize +- ญ +- ▁belgian +- ▁sota +- ▁culpa +- lita +- member +- ▁paced +- fract +- ▁furs +- ▁compost +- ▁partition +- chau +- ▁perceptible +- 學 +- ▁magdalen +- ggo +- ▁selber +- уж +- yiga +- ▁Jack +- ▁grinning +- ▁hoewel +- ▁eure +- ▁Krome +- ▁benefactor +- 石 +- ▁petticoat +- ▁mortification +- ▁collabora +- ▁retreating +- ▁transported +- roch +- ▁schel +- kuli +- ▁kenne +- ▁haunts +- ▁찾아 +- ▁creating +- лады +- ▁slab +- ▁institu +- ▁historians +- ▁barbar +- umuyobozi +- ঘ +- geon +- ▁duda +- ▁대학 +- ▁tumbling +- ▁sommes +- ▁financ +- ▁varia +- ▁exhaust +- ▁abstain +- ▁addresses +- ▁slippery +- ▁Fil +- string +- ▁deixa +- ▁справа +- ▁fuerte +- pande +- kje +- ▁illinois +- ▁giorno +- ▁cigars +- ▁tyr +- ▁Nekk +- ▁brust +- nol +- ▁forcibly +- ▁clima +- ▁novelist +- ▁кем +- ▁incorpora +- ▁эшлә +- бәр +- 른 +- illé +- ▁fanatic +- ▁blanca +- ▁stanley +- ▁gateway +- ▁flare +- цё +- œ +- вест +- ▁heeren +- ▁fruitful +- ▁دل +- 只 +- ▁doubly +- ▁دیگه +- дей +- 이고 +- ▁demanding +- ▁gracefully +- tabi +- ▁investment +- ▁Font +- ▁venetian +- ▁ngombwa +- ▁tends +- featured +- strom +- ▁sorr +- ▁이상 +- ▁Pf +- িয়া +- ▁tucked +- ▁felici +- ▁jeunes +- ▁prescribed +- dräng +- ▁geene +- istan +- ▁ounce +- burgh +- écu +- гавор +- coli +- prav +- حد +- 즈 +- ▁ingredient +- ▁sleigh +- chou +- nische +- ▁magnetic +- ▁Englishman +- ▁uxxam +- τη +- niye +- ▁secular +- patri +- ▁geheele +- سەن +- weza +- ▁mariya +- ▁calendar +- ▁incur +- ší +- ُو +- zelf +- ▁domine +- lington +- อา +- ▁mim +- бро +- ▁mellow +- ▁slander +- ▁thorns +- ▁pregunta +- ▁farming +- ▁especie +- ▁mockery +- ▁recess +- ▁чаму +- ката +- ishije +- system +- ▁eden +- ▁cordially +- ▁curses +- ▁كۆر +- ▁pertain +- ▁thames +- ▁furrow +- ▁выніку +- ▁années +- ▁messrs +- اں +- ▁whiteness +- tzeko +- ficient +- ▁extends +- ▁icya +- സ് +- ▁grease +- ▁fibre +- ▁meilleur +- strahl +- Lo +- posicion +- ▁plight +- lingva +- ▁apprentice +- شم +- ▁nightfall +- ▁flannel +- итель +- ▁ئۇنىڭ +- ▁forsaken +- ▁vivas +- ▁interven +- ▁boldness +- år +- гна +- bub +- ririmba +- ▁footnote +- 달 +- ▁affectionately +- ▁duidelijk +- ▁Час +- ант +- ▁cyril +- ubuyobozi +- ▁involuntary +- ▁жыцця +- ▁stumbling +- ▁boasted +- quai +- ▁Dom +- ▁ceaseless +- ▁einigen +- ▁Ста +- kah +- sigaye +- يدۇ +- ▁merced +- ակ +- txe +- ▁Sohn +- züge +- ▁milito +- 'No' +- ▁sú +- agne +- ▁authentic +- ▁shrinking +- ▁digo +- anisha +- ук +- ▁improper +- 이래 +- auto +- ▁genie +- ▁counties +- ▁sickly +- ▁ngā +- ▁Vil +- kawa +- ძ +- koff +- ▁implica +- ▁fondo +- كۈ +- чным +- ▁plaintive +- ▁decrease +- nui +- ▁delicately +- schreiben +- ▁tinha +- ▁Gerard +- ▁falk +- ▁lore +- ူ +- ▁күп +- شا +- ▁wistful +- ▁situació +- ▁dado +- ▁russians +- arco +- ▁administer +- 커 +- ▁месцы +- ▁hides +- aving +- ▁Nacional +- ▁unfold +- емся +- ▁выда +- ▁wabo +- kibuga +- ▁وقت +- ▁pathway +- ▁zugleich +- agra +- mahl +- ▁revealing +- ▁disciple +- ▁kompren +- ▁Washington +- ▁unmistakable +- ▁tuft +- ▁Каб +- matik +- quelle +- pont +- ▁deepened +- трап +- дзіцца +- 已 +- ▁oppressive +- ▁claw +- ▁dejar +- ▁peuple +- acion +- ▁leadership +- ▁rainy +- capitul +- ▁behoor +- ▁Ле +- ▁waarvan +- ▁vite +- ▁fruitless +- ▁occidental +- ▁gradu +- ▁kion +- ▁pasar +- ▁principi +- 数 +- they +- vik +- ▁swearing +- holder +- ▁constituent +- informa +- ▁vassal +- thie +- ▁groves +- ▁insure +- ▁Você +- ▁Church +- ▁дзяцей +- 何 +- ▁ziek +- ▁poke +- ▁buzima +- يت +- ▁pensi +- ▁다음 +- ▁لطفا +- ▁tapped +- ▁шлях +- ▁gravi +- ▁ethics +- ▁spectator +- ▁перш +- ▁enlarge +- ▁flirt +- lipo +- sack +- бур +- ▁tolerably +- 더니 +- ▁nymph +- ▁muff +- ▁Zwei +- ▁corte +- ▁rebre +- ރާ +- establish +- ▁reluctant +- auteur +- ▁From +- current +- ▁ntibi +- ੋ +- ▁öffentlich +- ▁presumption +- ▁pamoja +- ▁supremacy +- ▁sparks +- ▁fuese +- meslay +- эд +- ▁wailing +- ▁impertinent +- ▁expresses +- ▁absurdity +- ▁montague +- ▁provides +- ҡта +- onze +- ▁superintend +- ▁confirmation +- gypt +- ▁Laura +- طر +- ▁cartridge +- ▁egin +- muni +- ▁Karl +- ▁Poli +- ▁oar +- ▁medieval +- zmer +- ▁caring +- ▁leise +- ив +- ▁populace +- fällig +- ĝas +- ▁speechless +- ▁vergessen +- ▁gratify +- ▁muda +- ▁outlines +- 말 +- celle +- ▁chess +- ▁পর +- ▁disgraceful +- iéndose +- 봐 +- ▁যা +- ḥemm +- ▁hercules +- umuhungu +- voting +- ▁senhor +- ababyeyi +- ▁curtis +- zajya +- ▁universitato +- ▁vroolijk +- ▁ocasion +- 林 +- ▁서울 +- ▁yao +- pound +- ▁temperance +- ▁extinct +- цәа +- ▁kids +- ޮ +- ▁auge +- ▁একটি +- ▁bouquet +- Mi +- ▁pratt +- жар +- ▁frente +- schouw +- appelle +- ▁maeneo +- ▁Mir +- ▁monsters +- ▁unfair +- ▁stata +- ▁ніяк +- ڕە +- ▁rowland +- ocupa +- rura +- pende +- 언 +- rezi +- 절 +- ambient +- neg +- ▁airy +- ▁часта +- ▁boar +- ▁occupying +- goye +- ▁Yidir +- anira +- ataj +- sulta +- ▁Lle +- guen +- zog +- wego +- ▁그까 +- ▁потым +- ▁sofort +- ▁condemnation +- теш +- ▁relinquish +- ғас +- ۇق +- angiye +- вое +- ▁누구 +- ދ +- tegura +- ژی +- ▁baked +- ▁reconcile +- ▁rhetoric +- ▁lucilla +- мест +- ▁topics +- ▁performances +- ▁developing +- ▁камп +- চা +- estil +- robi +- manya +- ▁principalment +- ▁nominal +- ▁abnormal +- trodden +- ▁aristo +- مې +- ▁insulo +- ▁steve +- ▁statesmen +- ndy +- ▁сапраўды +- ▁deference +- 共 +- ▁прызна +- autor +- ▁scatter +- ▁spars +- ▁griz +- ▁distrikto +- ▁intruder +- 냐고 +- ▁producer +- ▁connais +- ▁خو +- ▁엄마가 +- ▁rhe +- ▁jij +- komere +- 넣 +- ▁prinz +- ▁sallie +- ▁illegal +- ▁paws +- ▁Neben +- kron +- ▁quixote +- ▁nane +- ▁حرف +- ▁Actualmente +- hua +- ▁triple +- کت +- vira +- ▁трэ +- ▁equipo +- фер +- hah +- jada +- ▁smashed +- standing +- ▁thirtieth +- ▁tuza +- ▁psychological +- ▁ounces +- ▁velo +- uja +- շ +- ▁elegance +- ▁throbbing +- ▁있으니까 +- ▁recreation +- ула +- ▁skr +- ▁poetical +- ▁climax +- ▁compli +- ▁буй +- 真 +- ಪ +- ▁superfluous +- ▁decisions +- interromp +- ▁Sand +- ▁Olu +- ▁һөйлә +- ▁wann +- ▁Jak +- ▁enveloped +- ▁projecte +- cité +- rome +- ▁자기가 +- ваюцца +- ▁sliding +- ▁Ry +- ▁harding +- aller +- ▁دور +- kampf +- ▁sighing +- Hal +- ▁société +- ▁cleaned +- ▁stripes +- وي +- ▁liberties +- ▁wagner +- ▁voiture +- зан +- ▁să +- ▁photo +- erweise +- ▁comprehensive +- ▁mowbray +- ▁susceptible +- wohl +- cide +- ▁Ty +- ას +- inze +- ▁fossil +- ▁है +- ziel +- ▁ponies +- ▁күҙ +- ▁hellen +- ▁thoughtless +- 王 +- ▁sordid +- ▁peacock +- ▁시작 +- ▁herod +- ▁გა +- ▁blond +- Di +- ▁gavumenti +- ▁furthermore +- larni +- ▁Brazil +- ▁refreshing +- baya +- ▁Ari +- ▁skim +- ▁bleibt +- kosa +- ▁affront +- tsu +- ▁mateixa +- arbre +- ▁jedes +- bereye +- ▁testimoni +- ▁baxter +- anthrop +- ▁enraged +- shingiye +- ▁unreal +- ▁recourse +- ▁zoû +- ▁Aya +- ดู +- ▁awal +- ▁spaniard +- ▁pouch +- ▁tested +- ▁executioner +- ▁brazen +- ▁habitants +- ▁quale +- ▁hymns +- ▁última +- ▁unselfish +- ▁veritable +- cliffe +- ▁selten +- ▁grange +- қә +- ▁scheint +- ▁Pel +- ▁komerc +- ▁residents +- ▁Damit +- ▁mettre +- ▁unclean +- ▁estimation +- ▁tiresome +- ▁오래 +- nij +- ▁raymond +- ▁lunatic +- ▁fearfully +- दा +- ▁bezoek +- ños +- ழை +- ▁pigeons +- ▁offender +- '20' +- дәр +- ▁maternal +- падзе +- ícia +- ▁Ан +- ▁ambaŭ +- gb +- uhoraho +- nium +- ▁rogers +- ց +- ނި +- ▁uncovered +- endelea +- ாது +- ▁quiero +- ática +- ▁merciless +- ลง +- akomeye +- ▁barba +- ▁Uma +- ▁Drac +- ▁veut +- жь +- ▁repress +- berna +- ▁Stand +- ▁imminent +- dicion +- ออก +- ▁horde +- ▁batal +- ▁cleaning +- ería +- ▁Bɣiɣ +- ▁такога +- ▁clearness +- ▁busily +- య +- ▁ellis +- ▁ueber +- bron +- urteil +- ▁얘기를 +- ▁astray +- ▁naturalist +- ▁misty +- ▁knots +- kê +- ▁twisting +- mbro +- ▁fortitude +- ▁rally +- gerichtet +- ziga +- ▁kubaho +- ▁infamous +- ▁camilla +- trof +- ▁privation +- – +- ▁sovereignty +- ▁dagegen +- меры +- ▁coldness +- َلَ +- ▁barley +- ▁군대 +- ▁kansas +- ▁مرد +- مال +- ▁stroll +- ▁congratulation +- 셔 +- ▁sarcastic +- ▁siège +- ▁Dann +- ▁seals +- ▁hayo +- koop +- ▁amounted +- ▁unspeakable +- ▁reçu +- ▁pitied +- ▁factories +- talo +- ▁dago +- ccu +- йшоў +- ▁funk +- ▁sojourn +- ▁survivor +- koresheje +- ▁psychology +- ▁revive +- ▁கூற +- ▁juliet +- ▁ndani +- ▁intervention +- ▁creak +- ▁eldon +- ▁període +- ▁redemption +- கிறது +- ▁repris +- ▁பல +- islam +- aĵojn +- horo +- ▁Carl +- ▁suport +- enaar +- ▁indescribable +- 啦 +- ▁중에 +- ▁marking +- kisa +- ▁dwellings +- ▁offerings +- ▁걔는 +- ▁yakoze +- ▁musa +- យ +- ▁грамад +- èxit +- maha +- ▁Está +- ▁politic +- wärts +- ▁chord +- ▁observance +- waga +- ▁celo +- lieden +- ▁capitalist +- ▁چیزی +- ▁hues +- માં +- 言 +- ▁obw +- chio +- ▁мой +- ▁Anna +- ▁gig +- obten +- şi +- ထ +- ▁توانم +- rando +- ▁ordinance +- ▁catalogue +- ถูก +- ▁percival +- ▁insurance +- ▁arbitrary +- ▁розных +- gian +- czę +- ▁آب +- umugore +- raff +- grat +- ▁höher +- ▁blindly +- وێن +- ▁truthful +- taha +- ▁häufig +- น้ํา +- ▁сур +- tegereza +- ▁친구들 +- ▁vitality +- ალ +- ▁einst +- gling +- сім +- ▁fairest +- ▁필요 +- ▁마음 +- ▁paternal +- ▁vocation +- ▁swine +- ▁suda +- ▁alternately +- ▁leiden +- ▁처음에 +- yobora +- ▁einzu +- вол +- ੁ +- ラ +- 直 +- ற் +- lijken +- ▁weighing +- ▁shelley +- ▁jî +- ▁zweiten +- ▁encontrar +- ▁continuar +- ñas +- ural +- ▁sympathize +- ▁nämlich +- ▁broeder +- ▁гына +- ▁aperis +- ▁carol +- ▁soleil +- ▁fij +- ಗಳ +- ▁joie +- ▁пераважна +- ▁prussia +- ▁kalm +- ▁reverie +- ▁recollections +- งาน +- леп +- mph +- ▁bathing +- ▁establishing +- ڻ +- 人口变化图示 +- ▁cache +- ▁aspire +- ▁fasten +- ▁guineas +- 달라 +- ▁sabre +- ▁dispon +- kong +- ▁suff +- ▁wooded +- toned +- حم +- ▁voluntarily +- ▁rails +- cimiento +- ▁chauffeur +- ▁perseverance +- ▁propositions +- ▁ascribed +- pflicht +- ▁mold +- ▁catal +- ▁handelt +- ▁langsam +- ▁demonstrated +- ▁Villa +- wą +- ▁Lebens +- падобн +- ▁zaken +- եւ +- 世 +- 场 +- ▁partners +- ▁Buganda +- ுள்ள +- ▁manipula +- ▁nook +- ▁skate +- ▁lookout +- πα +- ▁permanently +- 政 +- petto +- ▁Hö +- poro +- igihe +- ▁campus +- ▁precepts +- اض +- ▁calcul +- ▁pursuits +- ▁contrivance +- ▁Pont +- ▁fraction +- ▁literatura +- ▁spike +- ▁цябе +- ▁cornelia +- lala +- xic +- ▁pana +- kubwira +- ▁ridges +- dico +- ▁학원 +- ▁bessie +- вид +- ▁verlangen +- ▁primi +- ▁eustace +- ▁rebecca +- ▁invincible +- ▁besieged +- 但 +- ▁straits +- ▁ŝia +- umuriro +- carna +- ▁soc +- ▁fishermen +- ▁believers +- ▁apparel +- ▁hearers +- winkel +- ẹ̀ +- ▁alphabet +- ▁hodiaŭ +- ▁través +- ▁conegut +- ▁elkaar +- ▁sèrie +- ▁ronald +- qü +- ▁applies +- ▁grate +- clothes +- ▁gaol +- zuba +- uburyo +- ▁clause +- ▁politician +- ▁pondered +- ▁dunia +- ▁excluded +- ològic +- liko +- ▁Clara +- ှ +- 院 +- ▁chemistry +- кий +- incendi +- ▁sangre +- ▁bany +- ▁spine +- still +- impr +- essor +- intérêt +- ▁rampart +- ▁refreshed +- ▁voici +- ▁musste +- ▁judicious +- nyumba +- ▁instrumental +- bui +- tuba +- ▁vaga +- hay +- ▁tochter +- girira +- ▁sparkled +- ▁protestants +- ▁présente +- mén +- latin +- ▁insufficient +- ключ +- ikin +- туры +- ▁regional +- ▁größten +- ków +- 沙 +- uomo +- ▁Kong +- ▁arbeitete +- ▁arbitr +- ىلى +- گۈ +- хы +- ▁option +- ▁deuce +- ▁Var +- ▁каза +- ük +- ▁rhoda +- ▁secretari +- ▁Every +- ղ +- 처 +- หรือ +- ▁expansion +- わ +- ▁gemein +- ▁trata +- industrie +- éné +- ▁couleur +- ▁subscribe +- ▁exposition +- ▁unlikely +- ▁baroness +- seits +- utz +- loup +- irent +- 건 +- ▁করেন +- Do +- ▁marcia +- ▁Tanmateix +- 设 +- ▁timu +- ▁clang +- ▁engagements +- ▁Fle +- usta +- ▁promoted +- ▁harmon +- اوە +- ▁나랑 +- ▁graciously +- ohlen +- ▁lächeln +- 向 +- 학생 +- ▁linked +- digen +- ▁cultiv +- nick +- ▁trumpets +- ▁dreamy +- ▁Weg +- ମ +- ೇ +- 首 +- ▁vele +- ▁тӱ +- gä +- ▁travelers +- ▁сучасн +- erekeye +- ▁nobler +- ▁refers +- ▁getan +- ▁formally +- ▁disseny +- lú +- ств +- ▁filthy +- дак +- ▁pueden +- ▁calculation +- ▁prêt +- tā +- ▁madly +- 루 +- ▁sniff +- ▁excitedly +- ▁Sankta +- kleid +- cumu +- ▁panther +- است +- ▁Праўда +- ▁ніколі +- ▁glistening +- ▁volcanic +- ▁vastly +- ▁manchester +- ▁strut +- issent +- ▁oneself +- ▁없고 +- ▁adore +- ▁presentation +- إِ +- ▁Дл +- incomprehensible +- ▁aggressive +- ▁völlig +- ▁نہیں +- ▁operating +- aram +- ▁کجا +- bourne +- ▁scrutin +- ▁tape +- hul +- ▁smelt +- ▁ouvert +- ▁Catalunya +- ▁freddie +- psi +- дара +- 지도 +- ▁پو +- ▁frankness +- beḥ +- հ +- zga +- 古 +- ▁abamu +- ▁boundaries +- ▁noah +- ▁idee +- ▁канцэрт +- 日本 +- ffeɣ +- تىن +- saal +- ▁manger +- ▁attainment +- ▁liever +- ▁craving +- miye +- ▁consign +- ▁gesto +- blü +- ບ +- ହ +- ▁tormented +- ▁harrison +- ନ +- 美 +- smelling +- ▁marx +- ннем +- ▁variant +- ▁approbation +- ▁beckoned +- ▁animation +- ▁zoudt +- ▁deshalb +- akoze +- ▁povis +- ▁sable +- ▁humil +- ▁yagiye +- bida +- ▁temptations +- ▁wilhelm +- ▁injurious +- ▁trifles +- ▁느낌 +- ▁julius +- ▁blur +- ▁sobie +- trak +- ▁gras +- รู้ +- ▁dudley +- ▁paradox +- ▁believer +- ▁requiring +- dici +- ganga +- сар +- ▁rasp +- ▁expressly +- ▁Cri +- muri +- ▁Трэба +- ▁clasping +- urg +- ▁administrative +- ▁ordeal +- ▁چې +- geschick +- kanda +- gold +- ▁mliḥ +- ▁Telefona +- ▁breeches +- ▁notamment +- ▁segundo +- ▁devoid +- ▁Coll +- ▁نام +- ří +- adh +- ▁Londres +- ▁evade +- ▁whiskey +- ▁barimo +- ▁hyn +- ▁papal +- ▁fading +- cito +- riri +- ▁injunction +- ▁pharisee +- 音 +- 程 +- expliqu +- ▁holly +- หน้า +- ▁mewn +- ▁studis +- ▁physi +- ▁cruise +- ከ +- ▁exig +- ▁flown +- ക്ക +- няй +- ijoro +- ▁ferro +- haupt +- footed +- automat +- arsi +- ვე +- ▁snel +- trained +- ddin +- ▁Además +- ▁barracks +- क्ष +- ▁clamour +- ліз +- чная +- verlo +- жал +- glu +- 돌 +- ▁punctual +- 情 +- ▁emerson +- чаны +- شار +- ▁reconciliation +- ▁scarecrow +- stimmt +- ▁tribune +- ▁яра +- ▁browning +- ▁prohibition +- ▁plata +- vida +- дзея +- oedd +- drà +- ਗ +- ▁scrupulous +- ▁menys +- ▁knitting +- ▁pools +- ▁welchem +- ▁mlle +- oux +- ▁vervolgde +- ▁versions +- ▁선생님 +- ▁sacerdot +- ங்கி +- ުން +- ▁verschiedene +- ▁ببین +- 창 +- ▁mosquito +- ▁latch +- existent +- ▁Qual +- ▁comuni +- Й +- nega +- ▁allora +- ància +- ▁premature +- 回 +- ▁yemma +- ▁merriment +- ▁apprehend +- 系 +- ▁flavour +- ကို +- vri +- schnitt +- ▁Daniel +- bà +- ▁suitor +- ▁راه +- tawi +- ▁wur +- ▁impenetrable +- ▁Day +- greifen +- கின்றன +- ucci +- ▁individuality +- ▁پای +- รับ +- ▁experimental +- ▁plow +- ▁cloister +- матэрыял +- ▁Royal +- ▁studierte +- 둘 +- iji +- ▁conrad +- ▁Each +- ▁splinter +- ▁mortimer +- ▁vell +- ivamente +- ▁Punkt +- ▁indications +- жат +- ▁dike +- ▁chopped +- ▁universitat +- ეს +- ▁madman +- accia +- ▁dadurch +- ▁projecting +- ӹм +- ▁warden +- ▁mesmo +- ደ +- 패 +- 星 +- ▁verlaten +- gerera +- ▁denkt +- people +- கூட +- 시에 +- ▁sentenced +- ▁freundlich +- ▁degradation +- ▁항상 +- 队 +- ▁அவன் +- ▁gottes +- овыя +- zani +- ▁penance +- illus +- că +- ▁paroles +- ▁ashton +- ▁lessen +- ▁monkeys +- ▁crowding +- ▁decora +- gukunda +- ▁renown +- fio +- sohoka +- ▁һүҙ +- ▁imaginative +- ▁کردم +- ▁especialment +- ▁blooming +- rusha +- ▁stretches +- ▁basse +- мир +- ▁Ram +- даецца +- ▁funkcias +- mmun +- ура +- ▁trinity +- olla +- ▁League +- abaye +- ۈپ +- aardig +- ▁geography +- inspiring +- ▁malpli +- ▁Mul +- ▁forsake +- ▁verre +- ▁شۇ +- ▁bezig +- ਬ +- ▁glared +- ▁Rück +- ▁plunging +- ▁revival +- rious +- ▁necessitat +- holz +- ▁гыч +- ▁celebrate +- ▁Monte +- ▁diminish +- städt +- ▁Kagame +- hov +- wale +- ▁apud +- kva +- ões +- ▁verkoj +- рем +- ▁multajn +- ▁дома +- ▁omens +- ▁banners +- ▁recount +- 특 +- 動 +- ▁jordan +- kuza +- лік +- ▁democrat +- соб +- има +- ▁expanded +- ▁substitu +- ▁Сам +- ▁drawers +- ▁ndirimbo +- ▁krijgen +- ▁shrubs +- ▁entstand +- ދެ +- ▁sopra +- phal +- ▁engineers +- experi +- loven +- ▁cassi +- genze +- ▁además +- ▁nickname +- ▁counsellor +- ▁tempting +- ▁colla +- ▁stung +- ouest +- paka +- lotte +- зім +- ▁devis +- ણ +- ▁하루 +- senge +- यो +- ▁augusta +- quier +- ▁lease +- ▁verlegen +- улы +- ▁crouching +- ▁explicit +- ▁pretensions +- ▁bityo +- ிருக்கிற +- kaga +- idan +- ▁inmates +- stup +- ന് +- ىغا +- ▁Angeles +- ног +- ▁pavilion +- ▁싶다 +- mär +- ▁slecht +- ▁tangle +- ▁lamentation +- льных +- ▁Sou +- ▁reise +- ▁llevar +- häu +- ▁currency +- ڵێ +- ▁Nga +- ▁outlet +- ▁selon +- чкі +- ▁malo +- geschlagen +- ▁паміж +- ▁inshingano +- ▁fenêtre +- ibly +- wys +- ▁transmitted +- ▁sandwich +- ▁keith +- лом +- ▁savagely +- kleide +- ្រ +- ▁separately +- ▁warmer +- ▁genannt +- ▁arrayed +- რე +- ▁scrutiny +- ▁spire +- ▁clutching +- ▁Deutschland +- ▁verwendet +- ▁abused +- ৰা +- ▁rupert +- ▁completion +- ▁medici +- ▁trotted +- ▁norfolk +- acost +- àtica +- ▁Ost +- ނީ +- ▁vigilance +- ▁paintings +- ▁Second +- ற்க +- osamente +- ▁заяв +- treat +- ▁astronomer +- ▁inquisitive +- ▁tanzania +- ▁Weltkrieg +- ▁mulher +- uya +- ▁Radio +- ▁broth +- tela +- rwaho +- ▁distorted +- ▁impelled +- ▁solicitude +- дел +- ەکەی +- wyr +- itation +- ▁burgundy +- ▁scare +- ▁proteg +- ▁defensa +- аўся +- ▁jura +- ▁zeigte +- ▁tanca +- ▁palais +- ங்கள +- ▁Lea +- ▁basta +- ▁shipping +- ▁barbe +- ▁confide +- ▁aún +- ож +- haki +- ▁yarn +- ▁turret +- ▁імі +- ongeye +- för +- ▁soothe +- ▁gedachten +- suchen +- ▁fainting +- gama +- ▁twigs +- hielt +- nnek +- risto +- ▁bounty +- acte +- hira +- Le +- mung +- ▁perils +- 咁 +- tegereje +- ▁enmity +- ▁fürst +- ▁Kabaka +- ▁radiation +- ▁Vic +- ▁estable +- ▁lendemain +- Mu +- ▁Kiu +- kubita +- ashoboraga +- ▁Dabe +- ▁perspiration +- ▁lurking +- ▁patted +- لت +- pica +- เช +- ▁dissent +- اسی +- іліся +- ▁juno +- образ +- ▁depois +- ▁dated +- werp +- ▁يَ +- haka +- ▁grape +- fiel +- ▁podem +- ▁outro +- shimye +- ▁Mein +- mibereho +- 指 +- ▁niemals +- ▁faci +- simo +- ▁vian +- ▁indefinite +- ▁calmness +- schilder +- ▁imagining +- ▁poland +- ▁locomotive +- ▁ludicrous +- 車 +- ▁corpo +- kasi +- ▁potato +- ▁trotz +- င်း +- ções +- ▁dolls +- шәа +- ▁cask +- ▁comença +- ▁hugo +- ordinate +- amaguru +- ▁concord +- ▁mortar +- beau +- ▁anchored +- ▁possessor +- צ +- ▁доўга +- ▁Texas +- ▁gelegenheid +- வில்லை +- ować +- ▁blacksmith +- ▁wachten +- ▁가지 +- ▁gloria +- ▁comply +- цтва +- ▁politika +- ▁ramble +- ▁quickened +- овай +- ober +- ▁lure +- ▁garni +- sirika +- ▁돌아 +- ▁pug +- ▁dingy +- ▁гады +- ▁contrive +- пор +- ▁sì +- பு +- ▁sogleich +- ▁складана +- ึ +- ▁afin +- ▁клуб +- ▁fuse +- ▁constrained +- ζ +- ▁Israel +- ▁аўтар +- ▁deutsche +- attribu +- ▁angla +- ▁grassy +- gelassen +- ▁Zusammen +- ▁dikwijls +- ▁stairway +- ▁vela +- ñe +- view +- ▁concerts +- ▁jeu +- ਦ +- 田 +- ▁warehouse +- ▁cowardice +- ▁precede +- ▁portray +- стой +- ▁realizing +- ▁mío +- 끝 +- 关 +- ▁dinge +- ▁Рас +- ▁nku +- ▁ambrose +- jack +- ▁нашы +- ғын +- لق +- ▁và +- ▁nibwo +- piga +- ▁bike +- Ү +- ▁indistinct +- 空 +- 接 +- ▁prosecute +- граф +- ▁Mak +- ▁strewn +- ▁glide +- resse +- hilfe +- неш +- ▁braid +- ▁algún +- letter +- gola +- دىكى +- 름 +- 쯤 +- 线 +- ▁disappoint +- ▁newcomer +- ▁White +- ▁hamper +- pone +- ▁designated +- ▁intimately +- ▁Finalment +- Uwiteka +- ▁incredulous +- ▁tendencies +- ▁França +- ▁ŝajn +- ▁наша +- bä +- ▁opium +- ▁Acḥal +- ▁monotony +- ▁vagabond +- kkel +- ▁numb +- asso +- ▁pensive +- ▁Marie +- 七 +- ▁tactics +- ▁poisonous +- كەن +- ▁Blai +- ▁dimple +- itor +- ▁posse +- ▁montgomery +- ▁insanity +- ▁ruthless +- ▁haute +- gingen +- ▁tradi +- ▁medicina +- éron +- ▁ainda +- ác +- ▁ambulance +- ▁propra +- ▁jennie +- ▁scor +- ▁contemptuous +- าว +- terior +- wyl +- ▁gliding +- ▁gushaka +- ▁peculiarities +- اخ +- haltung +- 佢 +- ▁custody +- ▁працаваць +- рыс +- ▁stuk +- ▁ntacyo +- ▁wiping +- ▁Wort +- ▁semblait +- ▁amber +- ▁humain +- ▁lurk +- ੰ +- ▁announcing +- ▁بش +- ▁preserving +- ▁impending +- ▁devenu +- copi +- ▁parapet +- வன் +- ▁Fred +- funga +- ▁inhuman +- ▁fürchte +- ▁relació +- ▁уның +- ыі +- ▁realiza +- ▁mortgage +- ▁fonction +- ambara +- ▁falt +- ять +- mizi +- ▁Kim +- ▁eher +- ▁complaining +- ▁rasch +- ▁distinctive +- ▁curl +- ları +- ▁artikolo +- ۇش +- бан +- wó +- dî +- ▁Bundes +- ▁Sar +- ▁katherine +- 那 +- ▁இது +- ▁visibly +- 놓고 +- ▁commandment +- ▁truce +- valo +- ▁heiress +- ▁Про +- hala +- ▁drowning +- ▁embark +- 又 +- ▁Henry +- ▁Antaŭ +- De +- ▁ыле +- ▁michel +- matu +- ▁krom +- ▁tablespoonful +- ▁دیگر +- ▁varios +- crown +- ▁Later +- gebot +- ▁Гэты +- ▁தெ +- ▁undergone +- ំ +- ▁છે +- 꺼 +- 허 +- riot +- ▁mariage +- энні +- vido +- ▁freezing +- ssian +- ▁sundry +- ▁profile +- schneid +- ▁vrienden +- ▁units +- acque +- ▁heaved +- лө +- ます +- ashoboka +- ▁inadequate +- ▁planes +- ▁bezeichnet +- ppa +- ▁spre +- вор +- ▁Guim +- ▁Road +- 병 +- chain +- ▁posible +- ▁circul +- 字 +- 它 +- ▁musk +- ▁Merc +- fina +- ▁multiplied +- kimu +- łą +- раж +- ▁chunk +- ▁null +- ▁clarence +- ้น +- нэ +- penda +- estació +- ▁kleiner +- siya +- ▁pedi +- buc +- ▁کرده +- où +- gemeinde +- ▁squeak +- ▁rhy +- ▁tradicional +- ▁Heute +- ื่อ +- ▁copied +- ▁meddle +- ▁stiffly +- ▁Bürger +- ▁yiwet +- ▁ornamental +- coul +- ▁Universidad +- ▁spreek +- ▁gauche +- ▁kuki +- ▁Din +- ▁treballar +- ობ +- ▁dotze +- іль +- bildung +- ▁obliging +- ▁gesprek +- ▁czas +- ▁адным +- ▁werkelijk +- ление +- 信 +- Ç +- ▁dadelijk +- tele +- ▁recollected +- ▁performer +- ▁moors +- thorne +- ەم +- ▁Jim +- ▁annually +- ▁magical +- ▁этом +- ▁therewith +- إ +- 겼 +- ▁scène +- ▁influential +- ▁wünsche +- ▁nko +- ▁sloping +- manĝ +- ▁lambert +- ▁enferm +- anditse +- ▁idly +- ▁چىق +- ▁maca +- ▁Casa +- ▁Ng +- ▁ҡул +- ▁glitter +- ▁undone +- ▁rendre +- ▁minority +- 英 +- ▁plausible +- ▁bailey +- ▁nivell +- acció +- 볼 +- ▁알고 +- ▁loosened +- ▁helplessly +- teilung +- rion +- ▁muslin +- ული +- hout +- ▁raged +- équi +- ▁heresy +- ▁territories +- ання +- ँ +- ▁horrified +- ကြ +- dov +- fuga +- ▁willem +- ▁mcc +- ▁mab +- ▁bolted +- ▁début +- ▁amahirwe +- ارت +- ▁aristotle +- ▁schwester +- ▁sparkle +- ▁sandal +- ▁befand +- ▁inserted +- ▁нейкі +- 누 +- ▁inquest +- ុ +- ▁accompaniment +- 月 +- ▁sweeter +- ▁byron +- ▁먹었 +- ▁moderna +- ▁hond +- yanga +- ▁Dam +- ▁bekend +- ▁ekyo +- ള +- 發 +- ების +- ▁Even +- umuryango +- ▁commandments +- fremd +- ɣan +- ▁bemerkte +- akozwe +- حَ +- ▁items +- ▁volvió +- imbwa +- rath +- ▁Ella +- ▁prettiest +- ▁geschehen +- ▁grinding +- guany +- ▁significantly +- fydd +- ▁faintest +- ▁okukola +- ▁gedachte +- feuille +- ▁esperant +- yek +- լ +- ▁trimmed +- ▁frivolous +- ▁hurrah +- dab +- படி +- ▁dessert +- ▁مق +- ▁kennis +- สุ +- ▁dizzy +- ▁rations +- tula +- ▁scourge +- ▁mandat +- dach +- heiten +- lto +- ▁vorher +- ▁كۆ +- ▁rout +- ▁مج +- ▁opini +- wendung +- ▁сустрэ +- ▁chamberlain +- ▁citadel +- fassung +- ▁logo +- ▁каго +- ▁plau +- ▁crocodile +- 各 +- ▁entfernt +- 受 +- ▁probablement +- eeren +- тэн +- льні +- getragen +- ▁viejo +- rur +- ▁Gan +- hibi +- vella +- ▁unlimited +- ▁athletic +- ▁ulysses +- ▁schedule +- ▁килеп +- ▁flourishing +- ▁wilkins +- ▁fuego +- ių +- volvi +- ël +- ▁sometime +- ▁basic +- ▁뭐라 +- قَ +- бры +- ▁viatge +- teile +- ▁watson +- ▁bidden +- ▁않았 +- ▁applying +- ▁grosse +- alba +- ▁personas +- ▁bewilderment +- ▁embassy +- ▁jap +- bah +- ▁beneden +- வீ +- 넘 +- ahabwa +- غل +- ▁metallic +- ▁tiptoe +- ▁dealings +- ▁peers +- 졌 +- ▁candidates +- аюцца +- ▁thrash +- гон +- ▁jay +- officier +- ▁Igihe +- ▁disputed +- ▁invade +- ▁Euro +- ▁manufacturing +- ▁taqbaylit +- ▁elevator +- ▁gehabt +- ▁Rob +- რო +- ijimye +- ▁saucer +- ▁поз +- ▁wren +- ▁breve +- ▁ulac +- ząc +- rigen +- ▁hypocrite +- ▁sœur +- ▁appease +- ▁такім +- ▁explosive +- ِّ +- shon +- πε +- emble +- ▁louisa +- ▁Groß +- filled +- ▁mingle +- ើ +- ▁Apunta +- ▁itatu +- ▁Pic +- ilio +- ▁heather +- ème +- ▁originated +- ▁suivi +- ▁spoils +- ả +- ని +- ▁naskiĝis +- სი +- сыҡ +- ▁gruff +- akte +- ▁ushaka +- ▁rostro +- 大学 +- ▁symbols +- ▁concentra +- ▁difficile +- ọ̀ +- ▁Només +- ▁Tiel +- ▁Marta +- ▁타고 +- 라서 +- ▁sierra +- ṛen +- ▁노래 +- Bo +- ▁occupants +- ▁거고 +- ▁strips +- ▁freeze +- ▁taint +- لان +- permes +- bonne +- mauer +- ▁overtook +- ▁Johann +- ▁kitundu +- ▁perverse +- ена +- gewicht +- ▁adoption +- ▁leslie +- ▁amiss +- ▁бил +- ▁pasado +- ▁mondmilito +- ▁derrière +- ithi +- ▁Francisco +- عمل +- ىيە +- opa +- ▁catalan +- ▁dedicat +- ivu +- ▁juu +- ▁moaned +- লো +- prü +- ▁admira +- ▁Museum +- ړ +- ▁quotation +- ▁fervent +- ▁façon +- ▁iliyo +- ▁адкры +- ▁husk +- гыл +- ▁partoj +- ▁Andre +- funz +- ecido +- ▁һуң +- ▁carleton +- ▁eenig +- ▁Alguns +- 없 +- ▁bane +- ▁muziko +- ubundi +- ▁verbal +- ▁obstinacy +- داد +- altre +- ▁suelo +- ▁rooted +- mubwira +- ▁foreseen +- ▁actualment +- ▁hannibal +- He +- ▁consort +- ▁reckoning +- ▁nkuko +- ▁raleigh +- іст +- ▁appre +- ▁loka +- regul +- ▁Umuntu +- fatwa +- ▁trance +- ▁musique +- ▁minstrel +- ▁accents +- ▁булды +- ▁temas +- ▁Long +- ▁llegó +- ▁йөрө +- anca +- ▁Biz +- atic +- president +- ▁cambr +- ▁выгляд +- тыра +- ן +- ளை +- ▁centra +- ▁frogs +- گرد +- ▁abrid +- ▁Dank +- про +- ▁trin +- ▁Ame +- ċ +- คิด +- ▁Queralt +- 였어 +- ▁bastante +- ▁юл +- ▁blindness +- ләп +- urbo +- ▁shopping +- tě +- rigu +- กร +- ▁Adam +- ▁ritual +- ▁Cesc +- spruch +- ڕۆ +- 清 +- 至 +- 圣 +- thron +- ▁свеце +- werken +- ้าง +- ど +- ▁verheiratet +- ▁tournament +- ▁সং +- zil +- ▁saturn +- koma +- bericht +- ▁이번 +- ▁wedge +- ▁sequence +- itse +- ▁violation +- ▁Katika +- ▁Rose +- ▁halve +- seitig +- ▁shah +- ▁retrouve +- ▁renowned +- ▁варта +- ระ +- ▁nobly +- ▁gentiles +- ▁luna +- ▁moaning +- ▁malnova +- ▁کنیم +- ▁martyrs +- hani +- stanti +- ▁minsi +- ▁склад +- ో +- ▁sash +- ▁extort +- ▁committing +- ▁weêr +- cale +- ▁Ngo +- Ka +- ▁heretofore +- ▁utility +- 답 +- ▁unlocked +- geschrieben +- ▁lonesome +- ▁보통 +- loop +- өө +- ▁curves +- ৃ +- âge +- lema +- ▁promenade +- ▁lorsque +- ▁sketches +- ▁crashing +- ▁carelessness +- yeḍ +- meli +- ▁purest +- itzat +- ▁exploded +- 기는 +- ▁scientists +- К +- cú +- ▁ambush +- كل +- 頭 +- ▁braucht +- ▁hivyo +- رگ +- cchia +- ▁оста +- ▁grunted +- ▁ayuda +- ziya +- ▁temperate +- ▁менавіта +- ▁emancipation +- ▁cheval +- ▁virino +- ▁sling +- ▁Gebiet +- ▁denove +- ▁Shu +- ება +- ▁hissing +- chim +- gesta +- haza +- 료 +- 읽 +- وێ +- калі +- ▁ninguna +- ▁deinen +- ىمىز +- kuye +- ▁malgré +- ▁phineas +- ▁stupidity +- ▁Sea +- zona +- ▁frolic +- ▁mercat +- стры +- наз +- ▁contention +- clus +- ▁oysters +- ▁crucifix +- ▁Ingingo +- ▁trabaja +- ▁lens +- ▁triangle +- ▁되잖아 +- ▁enhance +- ▁asunder +- ▁mentioning +- ▁Бы +- pfel +- ▁silencio +- ▁ĉio +- ையில் +- ▁schließlich +- ▁Kap +- zej +- ▁befall +- ▁ئۆز +- న్ +- ▁Flug +- ▁profunda +- ▁arabian +- ▁Viele +- 經 +- ▁elinor +- compl +- kami +- accelera +- yeɣ +- ▁doble +- ▁siajn +- зор +- ▁Gla +- ▁anticipate +- ư +- ▁girt +- park +- ▁пачалі +- Ю +- ▁एक +- ▁alder +- ▁خوش +- ▁кож +- бег +- 법 +- 朝 +- antiga +- ▁fram +- bug +- tesse +- ▁helpful +- ▁года +- ▁departing +- ▁judas +- ▁mujeres +- ▁disobey +- ▁salir +- ▁titolo +- ▁Frida +- ▁purposely +- àries +- ▁dulce +- ▁gemeen +- ▁trailing +- ▁паля +- armée +- ▁groaning +- ஷ +- deux +- ▁savour +- ▁trader +- ▁পরি +- ▁azul +- ▁fusil +- fair +- ▁gunner +- hair +- ▁ddala +- ▁zeus +- ▁dividing +- ▁uplifted +- 이잖아 +- ▁involves +- ▁vamos +- Italia +- gebilde +- ẹ́ +- ▁arithmetic +- ▁Kel +- 称 +- দে +- mikorere +- போது +- lof +- ▁streamed +- dź +- ▁blest +- ▁Party +- ▁Cur +- घ +- 長 +- ▁lässt +- ▁whereabouts +- ▁змен +- ▁hetgeen +- при +- ▁celebration +- ▁elementary +- ▁anderer +- リ +- ▁இரு +- ▁doubtfully +- ▁legislative +- ▁webster +- ▁duque +- ▁estoy +- ▁miner +- ▁belgium +- ▁sinned +- ▁краіне +- ▁otter +- ▁bedoel +- সি +- ▁overtaken +- ▁준비 +- 福 +- ▁smothered +- ▁sicily +- ▁집에서 +- ▁thrusting +- bitabo +- lícula +- 住 +- ▁павінна +- ち +- ▁squaw +- ▁universities +- ▁هستند +- ▁needles +- ▁plastic +- ண்ண +- ości +- ▁Burg +- ▁Gli +- Li +- ▁blocked +- ▁fac +- 각 +- 충 +- ماس +- essi +- тин +- ▁aspirations +- ▁resembles +- рост +- ▁greed +- ▁mačči +- ▁twig +- үк +- kten +- budde +- ņ +- 國 +- ▁hovering +- ▁diciendo +- чнага +- ▁stunden +- ▁turban +- ამ +- ▁considerate +- tane +- tegeka +- programm +- ▁goud +- ▁poeta +- guar +- ▁chasm +- rable +- yambi +- ▁stabil +- good +- ▁macdonald +- 赛 +- intérieur +- ริ +- ση +- ▁traduc +- ▁slaap +- ▁Also +- ▁ўсім +- ània +- ▁enigma +- دەك +- 희 +- ▁Universität +- ▁miserably +- niem +- pò +- ▁keer +- ▁scarf +- ▁gewa +- крыты +- quil +- 씨 +- ▁improving +- ▁conclusive +- ▁henrietta +- ▁desperation +- ▁buddha +- ▁constrain +- ▁chilly +- rú +- рус +- ▁mcg +- ▁시간이 +- ▁jako +- ▁challenged +- ▁grating +- ▁ҡайт +- ▁garçon +- ▁받아 +- ▁spouse +- ography +- quia +- ▁бес +- ▁kuruta +- шкы +- ▁inconsistent +- สามารถ +- ▁alps +- igno +- banye +- ▁ewig +- ▁alternate +- interior +- ▁جم +- ไม่ได้ +- ▁shipwreck +- génér +- ավ +- ▁apren +- ▁verschiedenen +- ▁eph +- ِهِ +- ▁attest +- ارد +- ▁pequeño +- ▁асабліва +- ▁metropolis +- ▁cowboy +- Alba +- gevallen +- ▁पर +- ▁construir +- нцы +- స్ +- ▁calculations +- ▁darwin +- ▁fais +- ▁attractions +- juri +- ▁zeigt +- ekonomi +- ▁gouvern +- ijn +- ▁ugomba +- ▁frisch +- 更 +- 봐야 +- szedł +- ▁уны +- ▁zabo +- ravi +- ەیە +- ূ +- ▁тӱҥал +- ▁Ubwo +- buck +- ▁duniani +- ▁Brasil +- ▁அது +- чно +- ▁christopher +- ▁inquisitor +- sanct +- ▁shafts +- ▁Чаму +- òric +- ▁Mai +- ▁Lluc +- тик +- ▁Europäische +- 판 +- ▁ontmoet +- ▁blackened +- opera +- ▁Region +- ▁harper +- ▁ҡал +- 리는 +- führ +- ▁niko +- leute +- zaj +- ònic +- ▁rebellious +- ▁Madame +- 是一 +- ▁dicta +- ▁sheath +- ▁значна +- ▁bildet +- missió +- ▁policemen +- ▁deceit +- ▁grumbled +- ▁phases +- ▁advantageous +- лары +- ள்ள +- әләр +- bond +- ▁grec +- brado +- ▁апошні +- ▁quarry +- ▁glaubte +- اي +- ยัง +- ▁qualche +- ▁chama +- ິ +- މު +- ▁intermediate +- ▁kiun +- ▁dubious +- ▁oyster +- ▁Kio +- 먹 +- shaven +- ▁abstraction +- üsse +- ▁glories +- ▁exultation +- ▁enkele +- கா +- ▁hinauf +- ▁revolve +- ▁боку +- ▁buscar +- ▁kinsman +- ▁cristo +- ▁hoard +- ▁denounced +- ▁veer +- ▁multiply +- 쁘 +- ▁collision +- ▁Brown +- ▁gloomily +- insel +- ɣli +- นะ +- ▁hohe +- ▁astronomy +- ▁erkennen +- ▁ئادەم +- ▁hindu +- ▁banna +- ciąg +- ய் +- ▁murderous +- ość +- ▁کل +- ▁installed +- ▁admitting +- ▁downwards +- ர்கள் +- izuba +- ▁swoon +- 죠 +- ▁concentrate +- elyk +- ▁peix +- ▁doris +- ▁பய +- пач +- ঠ +- beard +- ▁electrical +- pina +- ▁desenvolupa +- ▁minim +- ▁факт +- ▁More +- вся +- ▁Хо +- ▁rivière +- ▁degraded +- ▁voulez +- ▁industries +- ▁peeping +- ிலே +- ურ +- ▁neniu +- ception +- ▁جن +- ніць +- ▁bicicleta +- ▁disinterested +- ▁besought +- ▁bamboo +- ▁Chris +- ▁sibyl +- ▁fase +- sooka +- ▁китте +- ▁mbona +- ▁detest +- ▁indulgent +- ▁candidat +- ору +- Hug +- ▁damaged +- ▁inconvenient +- 골 +- ▁measuring +- ▁flanders +- ▁furnishing +- ▁év +- ▁scruple +- ▁працу +- ▁frenchmen +- ▁baseball +- ▁rochester +- ▁precept +- рий +- ▁parisian +- xen +- ▁delirium +- ▁lowly +- ۇن +- ▁арган +- ніц +- үп +- ▁disarm +- lew +- ▁laisser +- ப்பி +- ӷ +- ▁commonwealth +- ▁volte +- ▁mastery +- hän +- ▁Estats +- ▁presumed +- drum +- ▁tweede +- េ +- ұ +- គ +- 神 +- tiri +- rolle +- ▁valour +- ▁venis +- เต +- ▁돈이 +- سَ +- ▁lashed +- 美国 +- 하긴 +- 조 +- ▁impudent +- ▁chilled +- ▁زیاد +- ▁serio +- ▁나이 +- hud +- dè +- ▁propi +- дың +- ▁열심히 +- 老 +- ▁civilian +- ▁flattery +- рав +- ▁puffed +- ▁маюць +- buru +- après +- ▁aliye +- ▁bleek +- ▁кап +- fähig +- ▁Jenoside +- ▁hastings +- ▁моцна +- frau +- ▁casually +- ▁ultra +- ▁groans +- ▁celia +- yemo +- ▁remu +- Ni +- ▁звычайна +- ▁gravaj +- ▁niyo +- zeiten +- ▁어쨌든 +- ▁Gor +- ḥen +- ▁shuddering +- ▁voulait +- zustand +- руч +- possession +- who +- ▁flooded +- ▁informació +- commissioned +- ▁audacity +- ▁dezelfde +- ▁dürfen +- breed +- ▁Another +- ▁numva +- ▁zweifel +- ▁trampled +- hiri +- ▁rudely +- ▁steering +- ▁우리는 +- ▁flask +- ▁peculiarity +- ▁مِنْ +- ▁emphasize +- ▁crescent +- ▁línia +- ▁vrouwen +- bearing +- ▁planting +- ▁Lliɣ +- Po +- ర్ +- ▁chateau +- ▁куды +- gica +- ލު +- kho +- ▁Several +- ▁малады +- ▁працэс +- ▁overlook +- ▁Como +- mead +- czył +- ▁nursed +- ▁sep +- ▁wirst +- ▁пеш +- zabi +- ▁relapse +- 未 +- 维 +- komoka +- ۇر +- дор +- ▁важна +- 한다고 +- ▁brigand +- ▁trump +- guda +- ▁твар +- En +- 싫 +- ▁soaked +- ▁segona +- ▁català +- ▁شود +- ▁susy +- ▁эле +- ▁juna +- ▁flog +- 필 +- 联 +- ▁mult +- ▁sown +- ▁apprehended +- remos +- feɣ +- ▁signified +- ▁옛날에 +- ގ +- рып +- ▁ŝin +- ތި +- ▁kubikora +- ▁montagne +- ▁аднаго +- ▁farmhouse +- ▁걸로 +- ▁verwacht +- сма +- ▁wame +- ▁gambler +- ▁цалкам +- ッ +- ▁Deg +- boneka +- rindwi +- ▁Xavi +- ▁destroyer +- ▁celebr +- ▁doubting +- ҩ +- сал +- mbuto +- ▁pecksniff +- စ် +- ▁disponible +- ▁registered +- ▁semblance +- ▁felicity +- fab +- ▁Folge +- ▁ernstig +- ush +- ▁erhob +- ▁doña +- صو +- ▁digest +- kku +- ▁enemigo +- 지고 +- ▁hypo +- ▁forbidding +- ▁выбар +- ▁gev +- ▁pacing +- ▁hanyuma +- ▁jugar +- ▁pawn +- ▁lashes +- ▁gout +- ▁situada +- ▁Gebäude +- ▁milady +- ▁carving +- ޅު +- ▁malignant +- ▁Hon +- mori +- oche +- ▁parades +- ▁Up +- genzura +- ▁bagiye +- ▁زد +- ▁konden +- ▁pervade +- ▁aforesaid +- 快 +- ました +- ▁Vater +- ▁wildest +- ▁junk +- 對 +- 元 +- ▁forbear +- ogne +- ▁Cli +- lived +- ▁advancement +- iloj +- ▁арт +- 림 +- ▁workshop +- ▁stolz +- ▁dichter +- ▁거를 +- ▁achievements +- ▁lliure +- зер +- ▁troupe +- ▁wrestle +- ▁Аб +- ▁spectre +- ▁gallows +- ▁retort +- ▁стане +- ▁thuis +- ▁ensure +- ▁кажа +- hausen +- ▁jina +- ▁censor +- 点 +- ვა +- ▁soort +- ▁loĝas +- ▁galloping +- ▁echt +- 몸 +- ▁delaware +- ▁menacing +- ▁Platz +- wirtschaft +- ▁lorsqu +- بێت +- ▁serivisi +- 함 +- ▁lumière +- ▁moderation +- ▁harrow +- ▁아까 +- ahu +- ▁reservation +- ▁paler +- аст +- ▁paroli +- ▁teqbaylit +- punct +- ▁بى +- ▁subsistence +- оўка +- ▁Danach +- ▁apareix +- ▁suggesting +- ▁concealing +- ▁combina +- गा +- ▁onions +- วน +- ▁distressing +- ▁cloudy +- avano +- ▁гісторыі +- ▁mushroom +- 放 +- umvikana +- ▁galleries +- ▁voit +- schütt +- ▁sentry +- ▁abject +- ▁raced +- ▁Germanio +- рук +- ಅ +- ▁disquiet +- 틀 +- ▁따른 +- ▁hoffnung +- 改 +- ▁schmerz +- ▁nuestros +- ▁pourtant +- ingenzi +- ▁hoed +- teeka +- 하다가 +- глед +- 形 +- ▁encamped +- 品 +- step +- ▁말이 +- ▁continuance +- ▁tease +- ▁greg +- ▁measurement +- ▁Asia +- ▁bibiri +- ▁bourgeois +- ნი +- ア +- ▁introducing +- ▁según +- ▁bitandukanye +- вак +- оро +- ▁unrest +- ▁pleur +- ▁stow +- ▁lanza +- gehouden +- ▁хаце +- 끼 +- ▁식으로 +- ▁brusque +- ▁succès +- ▁strata +- ▁salud +- เห +- ▁وقتی +- ▁outset +- ▁volunteer +- fighter +- ▁accession +- ທ +- ▁oblivion +- ▁somber +- ▁dante +- ▁비싸 +- lova +- ▁shroud +- ▁demonstrate +- iau +- — +- ▁muzika +- ▁softness +- ▁artifice +- ▁winner +- ▁pili +- markt +- ▁fulfilment +- Č +- ▁untouched +- ▁enchantment +- ▁isoko +- ▁inscribed +- हि +- ▁scraps +- ▁vorüber +- ▁bimwe +- ▁scientist +- ▁Units +- ތް +- ▁zelven +- ▁edzino +- ▁Music +- wirkung +- ▁mannen +- ▁escrito +- banda +- ണ +- person +- ▁lengua +- ▁faust +- ▁patronage +- ▁denying +- ▁sharing +- ▁squeezed +- δι +- ▁reminiscence +- ▁pubblic +- ▁করা +- ▁nzira +- ▁mattress +- hôtel +- ▁ornamented +- ▁heaving +- ▁producció +- ▁frase +- ▁malferm +- ▁grasses +- ▁escriure +- ▁epoko +- ▁کام +- final +- ▁hatchet +- ▁loknomo +- ▁wilaya +- ▁software +- です +- hiel +- ▁pathos +- ▁sculptor +- ▁gasping +- ▁такіх +- bishobora +- ▁speculative +- ▁multiple +- ▁minimum +- َّا +- تب +- ▁vex +- ▁беларускі +- gewand +- 将 +- ▁пачу +- alist +- ẓa +- سپ +- ▁biff +- ▁Mann +- iii +- ▁geographical +- 卡 +- 类 +- atti +- ▁بالا +- 때 +- akcept +- prise +- ▁wollten +- ▁pauper +- ▁midday +- ▁sowohl +- ▁carthage +- ▁Ndashaka +- ▁envious +- ▁mynd +- ▁expired +- ▁bouche +- mika +- ▁deiner +- 治 +- abakobwa +- ▁봤는데 +- ▁ایک +- ▁seien +- ▁outlook +- ▁tremor +- ▁guild +- ▁speculations +- mię +- ▁disposa +- ▁treballa +- ▁qualsevol +- ▁nuisance +- ▁sammy +- ▁digital +- ▁schrik +- ▁cupid +- ▁الس +- wagen +- ▁wistfully +- ▁surpassed +- leq +- ▁resided +- ▁vermell +- ▁crawford +- ▁deserving +- ▁Organ +- хар +- ▁valencia +- seeing +- qqar +- torial +- ▁tabernacle +- 향 +- އަށް +- bruch +- ▁parable +- ▁trigger +- ▁calvin +- ɣeɣ +- ▁suffi +- ▁afero +- 겠네 +- ޒ +- ▁metropolitan +- ▁даволі +- preis +- ▁genoegen +- ▁witte +- ▁petti +- தோ +- ▁provincia +- ▁walio +- ▁möge +- 果 +- ▁cowperwood +- え +- ▁methinks +- ▁besondere +- jji +- ▁drunkard +- laced +- ▁brotherhood +- ▁masterpiece +- abadde +- ▁crucified +- ▁unnoticed +- ▁свайго +- 풀 +- 還 +- urwego +- ▁principio +- amma +- ▁rechten +- ▁velocity +- kich +- ▁mkoa +- conscious +- ▁fehlt +- shika +- ству +- ▁conduit +- ▁scruples +- ▁unmoved +- ▁clavering +- gendera +- twaye +- rebbe +- ▁assemble +- ▁tercer +- éternel +- ▁territorial +- ikan +- ▁Nico +- ▁defendant +- ▁maximum +- fleck +- ▁shred +- هَا +- маўля +- ▁psych +- ▁나서 +- şa +- ▁tacit +- zé +- خواه +- ▁billiard +- ▁cualquier +- ▁знаходзіцца +- ▁skip +- они +- igny +- ▁بخ +- ▁나온 +- mètre +- ▁poner +- ▁harmonious +- ▁mji +- ▁laborers +- ▁pele +- ▁tending +- ▁lombard +- ▁Kion +- ▁gehörte +- ské +- ▁prefect +- ▁сказал +- kusi +- ▁merkte +- дзён +- ঁ +- ▁squeeze +- ულ +- hunga +- ▁venen +- ▁خدا +- ▁petita +- ▁auditor +- ҡай +- ancienne +- umukara +- ▁لطفاً +- ની +- ▁이름 +- ▁pup +- ności +- லாம் +- بي +- ▁joking +- ▁periodical +- ▁dram +- ▁exalt +- oreille +- ▁sprinkled +- ▁breton +- ▁parchment +- ▁theoretical +- ▁huu +- ▁seor +- ▁reliable +- جە +- ▁nutri +- ▁katie +- ▁whisk +- ޅ +- ▁coon +- ẹ +- 语 +- ▁fernando +- steig +- ▁repast +- ▁danes +- igné +- ▁செய்ய +- ▁ethical +- ▁escorted +- ؤ +- ▁mysteriously +- ▁abundantly +- ▁latent +- وج +- aventure +- ▁சொல்ல +- voir +- ▁bituma +- ▁judy +- institut +- umber +- ▁sift +- ▁쪼끔 +- ▁moeite +- ▁procés +- ▁전화 +- atori +- éclair +- ▁normandy +- кын +- іза +- ▁Angel +- ▁tai +- ▁connecting +- ຕ +- ់ +- ▁llei +- ▁nombreuses +- ▁farthest +- ▁영어 +- ▁implore +- ▁pesar +- ▁soothed +- ▁Russia +- ▁miseries +- ▁vooral +- ▁ошо +- бас +- ▁выраша +- ▁tolerable +- honderd +- ▁heritage +- album +- ▁chercher +- ▁Nuko +- ▁printer +- ▁bravo +- ▁quail +- zul +- shinda +- ▁spelling +- weil +- ُّ +- ouvre +- เจ +- dün +- ▁adjourn +- hagarara +- ▁zwaar +- ▁context +- ▁میتوان +- maaso +- umwuka +- thia +- رک +- という +- ▁blanco +- ▁marcel +- ▁slit +- Tw +- ▁arched +- ▁principe +- нии +- '14' +- ▁scroll +- ބ +- ▁zeigen +- رِ +- ▁positiv +- ▁devise +- ▁nameless +- ▁imploring +- ▁sarcasm +- ouvrage +- тель +- ▁nueve +- ▁impost +- uju +- moor +- ▁siku +- ▁koning +- عد +- ▁peur +- ▁archi +- gänge +- льнага +- ▁Maɣef +- ▁bonaparte +- ▁unceasing +- igice +- ▁розныя +- ▁parlar +- ▁كەل +- ▁bagomba +- ▁roadside +- ▁bates +- ▁vanquished +- ▁bordered +- ▁worrying +- душ +- ▁caliph +- ▁نیاز +- ▁illumination +- ▁Multaj +- ▁wrinkle +- ▁chained +- ▁indoors +- төр +- ▁Doctor +- ▁outdoor +- ▁mingi +- овых +- ▁whine +- ▁bleu +- ▁beginnen +- ▁Tout +- ▁benign +- ▁rabbi +- ▁unfinished +- mero +- drawn +- nshuro +- vojaĝ +- ंग +- ▁ilikuwa +- ▁adelaide +- mbling +- eṛṛ +- ▁hoffe +- ▁equator +- ▁differed +- pfen +- ▁Today +- ▁exclaiming +- ▁kamera +- fourth +- ▁gewann +- ▁beaming +- ardo +- หม +- хам +- 白 +- ▁coyote +- 产 +- піць +- ▁collapse +- ▁zuerst +- laag +- idio +- ▁thur +- يل +- lil +- kanga +- ▁correction +- ▁flickering +- ▁coberta +- ▁cairo +- ▁plena +- ▁First +- ▁Дык +- ▁piedra +- oxide +- ▁bwanjye +- fugi +- misunderstanding +- ▁hoisted +- ▁nourishment +- ▁volunteered +- ▁fleeting +- ▁reciproc +- 车 +- inzego +- апа +- knik +- ▁gemeinsam +- ▁så +- ▁passport +- мыш +- ▁discontented +- ▁traveler +- ▁gutter +- ▁bugle +- ▁llor +- ▁mondiale +- 草 +- ပြ +- ▁Budapeŝto +- ▁drowsy +- ▁folgenden +- ▁imported +- ▁kutime +- ▁cortés +- ▁Politik +- ▁partnership +- ▁sloop +- ▁presto +- ignon +- ▁stevens +- ▁livery +- მე +- ▁wenige +- ▁gewöhnlich +- め +- ▁ibitekerezo +- ▁sporting +- braun +- ▁말을 +- ▁hump +- справ +- ▁powdered +- Ŝ +- ▁organiza +- ▁google +- ▁sasa +- ▁courier +- ▁blended +- ▁dye +- نان +- ▁cursing +- ▁gehoord +- ▁이랬 +- ատ +- ▁gratefully +- ▁때도 +- ▁Lang +- ingle +- ▁contemptuously +- 지가 +- riel +- ploy +- жә +- wahl +- kompon +- ▁discredit +- ▁زندگی +- ▁restricted +- ▁slum +- শা +- ▁womanhood +- ▁Bereich +- amahanga +- ▁இருக்க +- ድ +- ▁recompense +- ▁harshly +- ▁konsider +- ▁partridge +- ▁royalty +- abanyarwanda +- ándole +- ▁ляс +- ▁recalling +- ▁aircraft +- 第 +- ▁ushered +- кыр +- ▁alcun +- ▁Francio +- gawa +- ُونَ +- ▁lieben +- зяць +- ડ +- 如 +- ▁Während +- ▁seclusion +- ونه +- ванні +- ▁anfang +- ziba +- ▁Cra +- ▁lawless +- чыў +- ▁bwawe +- ▁yaje +- चे +- zg +- ▁persecuted +- ▁stile +- ▁pebbles +- simba +- based +- uruzi +- ption +- ault +- ▁satire +- ▁Council +- ठ +- ▁بھی +- 父 +- ▁practicable +- garden +- ▁ferme +- tugomba +- ▁benedict +- sezera +- ▁willoughby +- ▁infanoj +- toki +- овая +- ▁minuto +- ▁lodger +- fh +- nije +- геҙ +- ▁herab +- ▁Buli +- ▁averted +- ▁waltz +- ▁noticeable +- ▁strangest +- Á +- ▁кажу +- fälle +- ून +- ▁dissatisfied +- ▁sled +- ▁villagers +- illard +- ▁Nza +- ▁troben +- ыуы +- ▁heartless +- ई +- ▁unaccountable +- ▁졸업 +- ▁հա +- ▁señal +- ▁marido +- giri +- ▁partido +- алася +- eloos +- ▁경우 +- ▁endanger +- әл +- ▁stifled +- ▁festa +- ▁distraction +- iol +- ажә +- ▁prediction +- ▁respira +- ▁buenos +- ấ +- ▁zuletzt +- ელ +- ▁сярод +- stęp +- ▁wronged +- грэ +- ▁ambayo +- ▁chico +- hypno +- ▁شب +- ▁Gall +- bric +- ▁demás +- ▁evans +- ▁Алар +- ▁fasc +- ▁paga +- တ် +- avaient +- ▁hoek +- ▁wretches +- ▁збор +- ▁mildly +- ەك +- arbaroj +- quista +- ▁этой +- ▁erklärt +- ▁배우 +- ▁guiding +- ▁caressing +- ▁ubushobozi +- ிலும் +- ▁Stein +- ▁apprehensive +- ▁recital +- ▁trouvé +- ▁barry +- ▁cruiser +- ▁quince +- ▁voie +- 리고 +- ▁adventurous +- ▁wahrscheinlich +- ▁ساعت +- ავ +- ▁clayton +- त्र +- bunda +- ▁enlisted +- circle +- ▁indiana +- ▁possède +- bö +- 했는데 +- рыш +- لاپ +- ▁texture +- round +- ▁badge +- iamo +- ▁Necessit +- ▁clip +- ▁gevaar +- ▁essex +- ▁хоць +- ▁emit +- ற்ப +- ▁yaku +- ▁oddly +- ▁Mara +- ▁Chicago +- ▁accumulation +- 量 +- ▁coincide +- ▁drog +- ▁свету +- ▁dictator +- staande +- ▁preventing +- ▁clouded +- ▁guter +- ʼ +- ీ +- ▁conversing +- ەوێت +- struck +- ▁nombro +- azioni +- ▁இட +- ▁terminated +- ▁chirp +- ▁paved +- ▁Ĉiu +- ключа +- ▁palo +- ▁ongelukkig +- ▁jenkins +- ▁Please +- uṛ +- ▁prix +- vard +- ▁Wali +- ▁Abo +- ▁itegeko +- ▁ouden +- nnan +- ▁intact +- kubo +- ▁mida +- ▁loĝantoj +- ▁declining +- ▁wichtige +- film +- imperi +- ▁vertrek +- ▁malheur +- bina +- ▁minuten +- ▁meurt +- ▁boos +- élève +- ▁penal +- ▁yayo +- movado +- ľ +- 持 +- ▁decency +- ▁ngingo +- ▁этого +- ▁yew +- ▁колер +- ▁banish +- ŝanĝ +- жыў +- ▁vorbei +- வர்கள் +- vish +- ▁plymouth +- ▁reina +- ▁blinding +- ҟ +- ▁cooling +- stav +- ▁maryland +- orerwa +- uwo +- त्या +- ▁Only +- ▁сваіх +- ▁philo +- 학기 +- olog +- 이니까 +- ň +- ▁Unternehmen +- ▁troisième +- ▁gouvernement +- ▁madge +- ுகிறது +- ிக் +- kammer +- 場 +- 开 +- ▁hovered +- ubururu +- ▁intimation +- wane +- ▁vojo +- ▁memoria +- гө +- ист +- ▁carcass +- ▁yap +- cta +- сек +- ▁هەیە +- ▁сме +- ▁groping +- ▁prophetic +- ▁baas +- ▁editorial +- ▁mom +- ▁defiant +- ▁quay +- ▁cicl +- ▁contemporaries +- ▁Geschichte +- ▁생각하 +- ▁trespass +- ▁되나 +- opo +- ▁relied +- ▁obert +- ▁cortes +- ▁heedless +- ▁witches +- iru +- ▁dormir +- ▁intricate +- ▁요즘에 +- ▁vuestro +- ▁pintor +- avantage +- ▁terminal +- ▁participation +- umunsi +- ▁Ew +- öz +- proof +- ▁decorat +- ора +- kaw +- ▁permis +- ▁tipp +- ▁espècies +- ▁internet +- 꾸 +- ▁hypocrisy +- ▁noneho +- ▁회사 +- Ч +- zunehmen +- ▁mingling +- һың +- ▁табе +- ▁midden +- ▁compagnie +- ▁disgusting +- nitz +- ▁Qq +- ▁smoothed +- ီ +- هات +- ▁overcame +- ിയ +- ▁distil +- ▁entreaties +- ▁ydy +- ▁meekly +- ▁bosh +- ▁Landes +- ▁کەس +- ▁flourished +- 江 +- bivuga +- avança +- accou +- kwenda +- ▁cafe +- ▁expectant +- ▁адной +- ▁ике +- ▁bushy +- ▁heroism +- κο +- ▁lids +- ▁мужчын +- ▁puedo +- ▁envied +- bda +- ▁beacon +- ▁venster +- iteit +- خانه +- ▁Kuri +- ▁Qa +- ▁parfois +- 再 +- ▁bravest +- lsa +- ▁beneficial +- ▁limestone +- ▁Whi +- ▁Rome +- ▁flaw +- ▁luxuries +- ▁madonna +- ია +- ▁zara +- ▁nécessaire +- ▁государств +- 早 +- obulamu +- ▁isolation +- ▁فِي +- дно +- ▁ponderous +- ▁react +- ▁على +- ▁Fal +- களுக்கு +- ▁starlight +- ▁Erst +- ▁traum +- ▁tromp +- 석 +- ▁plutôt +- ▁адбываецца +- ▁involving +- мәт +- ▁econom +- ▁walnut +- ▁mosque +- вары +- ritse +- ▁recep +- ▁mouthful +- ▁invoke +- ▁поч +- ▁seiten +- 혀 +- ▁thoroughfare +- ▁Ziel +- ▁ardour +- ▁scoff +- ▁myśl +- ▁districte +- icles +- 슬 +- ▁academic +- guera +- ▁regulation +- ▁cumberland +- ▁halting +- ▁granny +- ographie +- ▁futbol +- ▁보여 +- uvo +- dade +- ▁reassured +- ▁lequel +- zeichnung +- ▁betrothed +- した +- steigen +- suzum +- ▁economical +- ▁chuckle +- ▁segment +- ▁урын +- ák +- organitza +- ходзяць +- ξ +- kond +- ▁revolving +- ▁cynthia +- ▁afloat +- 并 +- ▁geliebt +- vention +- ▁labored +- цә +- ▁вопрос +- ҙән +- shying +- ▁fui +- 해야지 +- ▁privacy +- ▁repeti +- ▁loosely +- ▁droite +- ▁irishman +- سو +- ▁extraordinarily +- ▁tinge +- ▁marquise +- ▁virtually +- ▁butterflies +- ▁rendezvous +- ▁crevice +- ▁gesti +- ▁provoke +- ▁leḥ +- ▁geringe +- Bahn +- ▁பெரு +- amateka +- ▁seriousness +- ▁exaggeration +- ▁appelé +- ρο +- ▁contentment +- klin +- ▁erzählte +- ▁literal +- ▁tsar +- europa +- hú +- ▁должны +- 권 +- ▁médecin +- ▁bardzo +- ▁limitations +- pya +- ▁colouring +- ▁müsse +- ▁graduated +- ▁право +- ▁kno +- ▁tortoise +- ▁яно +- ▁seguida +- ຫ +- ▁nshobora +- بری +- human +- ▁wanyama +- احت +- ▁cynical +- ئي +- ▁ryari +- ▁wange +- läu +- ▁authorized +- ▁Ré +- ▁sitio +- ▁wavering +- ▁brilliantly +- ▁edict +- َوْ +- ▁relentless +- ▁Kelkaj +- ▁Victoria +- ▁matar +- ▁indirectly +- އަ +- ▁bestand +- ▁tutte +- building +- சை +- 환 +- ▁conspirators +- ▁straightforward +- ▁Play +- ▁solace +- دەم +- mán +- ▁scornfully +- zaga +- ▁bough +- вен +- ▁좋아하 +- ▁dumm +- ▁compositor +- ▁fortifications +- သည် +- ▁yebɣa +- hydr +- ▁siberia +- ▁yorkshire +- ▁deseo +- маҡ +- ▁curling +- ብ +- ▁보이 +- ▁있거든 +- ▁bihe +- ▁Hoch +- ▁hairy +- ▁Sh +- صل +- ▁privy +- здар +- ▁scornful +- ▁раён +- ▁allotted +- ▁recesses +- ▁Xi +- when +- ▁comprehended +- ▁Biel +- zni +- ▁செய்த +- ▁maakt +- ▁christendom +- ▁ҡуй +- 思 +- льнікаў +- ▁turkoj +- ▁mobile +- ▁reappear +- 명이 +- ▁tenement +- ▁dieselbe +- investiga +- ▁courant +- ▁overseer +- ▁vooruit +- ▁Turk +- igitaj +- થ +- ▁calhoun +- ▁перыяд +- quita +- ▁yale +- ▁yelling +- ▁französisch +- ▁Ёсць +- 록 +- ▁surplus +- ▁nourish +- slaan +- kreuz +- ▁Boy +- anywa +- ▁hastening +- ▁varies +- ▁consummate +- ▁austin +- ▁көр +- ▁issuing +- lər +- ▁malady +- ▁absorbing +- tanda +- ▁jeweller +- ފައި +- ▁mormon +- అ +- ▁emmeline +- ▁outrageous +- ▁prevalent +- masomero +- ▁serge +- ▁edwin +- ▁obscured +- ▁наступ +- ▁brilliancy +- ▁nkuru +- ▁Genís +- ▁spiral +- ▁Akken +- ▁cornwall +- ▁motioned +- ▁degenerate +- ▁indirect +- ▁characterized +- ▁imparted +- ▁abolished +- ísimo +- ▁된다 +- ònia +- ▁miko +- beeld +- دام +- geblieben +- ▁lizard +- ▁llengua +- ▁johnnie +- ▁Wu +- ▁zoodra +- ▁brightest +- ▁پنج +- ▁predicted +- ▁aggr +- adju +- ▁rover +- ▁weaving +- ografia +- ▁wenye +- ▁broadcast +- ▁colleagues +- ▁rosalie +- ▁nawet +- ▁Kiam +- ▁poker +- atandukanye +- ▁karen +- ▁sneered +- ▁Wood +- women +- 품 +- ▁проблем +- ސަ +- κά +- astrolog +- ▁тол +- ▁Buch +- öd +- ▁bekommen +- ▁knoll +- ▁narrowly +- ▁우리나라 +- ▁фільм +- ombo +- ▁coro +- kaya +- ▁rumble +- ▁прыня +- ▁usanga +- ▁unheard +- ▁ziemlich +- ▁alaska +- ▁contemplating +- ▁hug +- ▁listed +- ▁Marcel +- ▁juro +- ▁kinini +- ▁sombra +- ▁Simon +- ▁squeers +- ▁булып +- breite +- ▁Зна +- ▁bathe +- ▁predecessors +- bajije +- bog +- ▁eugen +- ▁minste +- ▁extracted +- ▁swoop +- ▁serenity +- ▁gitabo +- ▁kurze +- ▁marshes +- ▁softer +- ▁flipp +- ▁instructor +- ▁sería +- ▁بزرگ +- ▁بیشتر +- ინ +- ▁ardor +- ▁Cira +- sezerano +- ▁assuring +- ▁comforting +- sagen +- étique +- мәне +- 쓸 +- ▁annihilate +- osiyete +- ▁manufactured +- ▁nhw +- ▁negra +- ▁Fine +- ▁Ilaq +- ▁nanjye +- ificar +- lord +- ▁northeast +- ▁càrrec +- ▁madeleine +- ▁tranen +- ▁Florida +- pula +- ▁falsch +- ▁karere +- кото +- ▁flax +- ктен +- ▁shoal +- ▁labyrinth +- ▁rôle +- ▁perezida +- ▁dissolve +- ▁Umwana +- ▁walton +- itsch +- ▁Old +- ▁salutation +- itzen +- usha +- ố +- ▁espíritu +- xar +- ▁daarna +- ▁maggior +- ▁loth +- ▁umutwe +- ▁cull +- fí +- ਂ +- ▁vestige +- ▁episcopal +- 非 +- ▁dokter +- ▁propio +- ingiz +- ▁fremde +- ikipe +- ማ +- ▁cleopatra +- ▁hammock +- ▁treasurer +- 提 +- ▁histories +- tinted +- ▁striped +- ▁montan +- ▁councillor +- ▁беларускіх +- ▁pompey +- ▁dotted +- ▁intervening +- ▁realities +- ▁basement +- ▁Best +- ோம் +- એ +- ކަށް +- nnem +- ▁sideways +- ▁hydrogen +- ▁participant +- ög +- maat +- ▁hoarsely +- ▁perkins +- خورد +- ئا +- ▁conjunction +- 욕 +- initia +- ▁camping +- ▁forfeit +- ▁constantine +- тыка +- ▁haren +- ിൽ +- ڵا +- ▁gulp +- ▁panted +- ▁testing +- ▁حس +- ▁내려 +- اتتى +- ▁templo +- ▁diligent +- ▁змя +- ▁termine +- ▁icyemezo +- ▁nostre +- ▁Portugal +- ▁konsistas +- ต่อ +- பட +- ▁thundering +- ▁hardship +- gesund +- 채 +- ▁debauch +- ähne +- ▁mainland +- 에도 +- ▁yicaye +- ▁trapper +- сов +- ▁gael +- ▁cataract +- ▁entangled +- 主要 +- ▁bulb +- йыш +- ▁probe +- ▁exclude +- zamen +- ▁decid +- ların +- fide +- ▁உட +- ▁olaf +- ▁więc +- abayobozi +- ▁compagnon +- ▁dishonor +- ▁instantaneous +- ▁succumb +- iterambere +- ▁seid +- fuhr +- ▁stronghold +- ificat +- аԥ +- wege +- ▁Estis +- activité +- ▁amazon +- ▁marjorie +- ލާ +- ▁nelly +- っ +- ▁descubr +- ▁abiding +- Ind +- ▁chanson +- ամ +- ▁Си +- conduct +- ▁global +- rían +- верх +- ▁bigot +- ▁quarrelled +- ▁Ross +- ▁Hamburg +- ▁되니까 +- حق +- ▁faites +- ▁Liaj +- ▁researches +- ▁balcon +- ▁explored +- jiji +- ზე +- ▁begriff +- ▁dictated +- 으면은 +- 使 +- ▁outskirts +- ▁siècle +- ▁shuffle +- 公司 +- ▁тысяч +- épouse +- ▁Final +- embro +- ▁bâtiment +- ▁Sala +- ▁peacefully +- shadow +- ▁affectation +- 香港 +- ▁interminable +- 镇 +- ▁dalle +- izibwa +- ahindu +- ▁raiment +- ▁adverse +- ▁adoration +- ▁Жа +- ▁depending +- ▁comparing +- ▁lamented +- ▁конкурс +- ▁характар +- ▁Company +- ▁glittered +- ▁এটি +- ▁nyina +- ▁йө +- dydd +- ən +- ndosi +- дала +- ▁Francesc +- ▁suave +- ▁enacted +- shira +- ▁trouvait +- kaze +- ▁паша +- zept +- menyetso +- զ +- ▁iglesia +- ▁barnaby +- ▁advertising +- ▁조금 +- ▁bloed +- میر +- nef +- ции +- ▁signora +- ▁கட +- 스타 +- ▁fondness +- ▁defenders +- ▁accorded +- ▁remit +- ▁archdeacon +- ▁todavía +- ▁daphne +- ▁palestine +- кыл +- śli +- বর্ +- endroit +- ▁internacia +- ordinaire +- ▁برو +- ▁excessively +- ▁wanao +- ้อง +- เรื่อง +- ▁betrachtet +- ▁messiah +- ▁cleft +- twala +- ▁hori +- ▁Gusa +- رسی +- ▁Dra +- ▁digitized +- ▁reynolds +- ▁brussels +- ▁Vaig +- ▁wuchs +- ▁affirmed +- ▁너네 +- ▁overflowing +- ്യ +- ▁Muḥ +- ▁sleepless +- ▁spät +- vola +- ▁precisa +- ծ +- 검 +- ▁busied +- ▁talbot +- czu +- cilla +- ▁enlighten +- امی +- ▁crumpled +- ▁колькасць +- شقا +- ▁bash +- ettaka +- ▁rechte +- ▁grecian +- ▁pursuers +- ▁چیز +- ▁pinched +- ▁interessant +- ڵە +- 꼬 +- ▁elliot +- ▁signifies +- ▁miał +- ▁tunic +- ▁examin +- ىدىن +- allemand +- ▁dall +- ▁subdue +- ▁dello +- gono +- ▁fusta +- өс +- ▁Imbwa +- ▁disagree +- ▁tapping +- brak +- ▁yabi +- ▁interesa +- ▁annals +- ނާ +- ▁brav +- ▁odin +- ▁None +- เลย +- iḥ +- cili +- ረ +- ীয় +- ▁diligently +- ▁brom +- ▁ceremonial +- ▁waarheid +- ▁থেকে +- ▁spricht +- ▁armas +- ▁études +- ▁drained +- ▁birdoj +- ▁eich +- ▁motivo +- wah +- тек +- ▁labourers +- цыйны +- ▁vehement +- ▁reappeared +- дав +- kisi +- ▁uhr +- ▁vehemence +- ▁télé +- 使用 +- ▁determining +- itaire +- ▁Great +- ▁relativ +- በ +- лись +- อก +- ▁Carla +- ▁lḥe +- ▁kart +- ▁modifica +- ▁shrieking +- ▁glorified +- ▁pallor +- ▁kelkajn +- ▁flinging +- ▁Main +- ▁eddy +- мө +- ▁bony +- ▁Berta +- kko +- ▁australian +- ▁bihugu +- ▁fallait +- ▁бацька +- ▁끝나고 +- igned +- ▁erfolgt +- جر +- ▁брат +- ▁Queen +- irem +- ▁Kann +- ▁evolved +- ҡҡа +- ▁loĝata +- ▁gitondo +- ▁снег +- nosti +- ▁ownership +- ▁cuán +- ▁witty +- áis +- ▁plumage +- 맨 +- ▁Damià +- ▁salisbury +- haven +- ▁derselben +- ▁kastelo +- ▁stimulated +- ▁buckingham +- ▁Imp +- masaha +- ər +- ▁respectively +- text +- ▁diverted +- ▁pelle +- ▁jock +- ҽ +- ▁predominant +- ▁павінны +- ▁verzoek +- ▁König +- ▁investigating +- ▁durchaus +- envol +- ▁hotly +- ▁exquisitely +- ▁dynasty +- ▁metaphor +- ▁Nna +- ▁rêve +- java +- ▁codi +- ▁demà +- eisen +- ▁adeline +- ▁obed +- wezi +- ▁zino +- ▁manoeuvre +- ▁ancestor +- ▁vesper +- ▁exceeded +- ▁песні +- ▁reduction +- inconnu +- çà +- сіць +- ▁Love +- ▁قرار +- ▁reddish +- ▁arrogant +- ▁despues +- ▁Three +- quito +- ▁어느 +- ▁disclose +- ▁Baada +- ▁aurora +- ▁livid +- ▁এর +- ambaye +- ▁collapsed +- ▁jake +- ▁heureux +- ▁morally +- ▁Crist +- ▁effectual +- wumva +- volta +- ▁efecto +- ▁wager +- 었던 +- ▁gaping +- ▁thud +- ▁laboured +- ▁плане +- ▁аша +- ▁redress +- ▁wose +- ▁cancer +- دەست +- terri +- ▁gebeurd +- ▁sensibility +- ▁கரு +- ▁notwendig +- ▁Yal +- ▁belli +- ▁якім +- эх +- ▁guanyar +- ▁System +- ▁crear +- цю +- ▁refi +- ▁advisable +- ▁counterfeit +- 嗰 +- ▁voldrien +- ▁denmark +- gonjwa +- ▁consolida +- ▁swede +- ▁lucid +- leggen +- ▁segura +- 받 +- रो +- ▁quería +- ціць +- ▁reproaches +- ▁vergeten +- kristo +- ۈم +- ▁bueno +- ტი +- oby +- ▁bangi +- ▁micawber +- 呀 +- ▁prodigal +- صد +- ▁staggering +- math +- ▁Ĉiuj +- ▁fragile +- ▁sehe +- سته +- ▁ikindi +- bha +- ▁wou +- ▁صدا +- ▁dairy +- ▁너도 +- ▁malign +- ▁Philip +- order +- ▁montri +- turutse +- dě +- ▁smack +- کرا +- tenti +- ▁என +- ▁studoj +- ▁wedded +- ▁outcast +- ழ் +- ▁بې +- emewe +- ▁classification +- двор +- ▁vertu +- ▁acquiesce +- ▁eyo +- ▁caractère +- ▁recognizing +- ▁bristol +- witted +- ▁comunitat +- ▁capitol +- shyamba +- track +- ▁perry +- мәк +- escence +- ▁standpoint +- ▁cuidado +- ▁calor +- befehl +- ▁psycho +- ▁brod +- сот +- ク +- ▁będzie +- 잠 +- ▁herhaalde +- ▁produir +- কি +- fringed +- lekt +- trois +- ▁сярэдн +- ▁harvey +- ▁obstruct +- ▁ennyo +- arrière +- ▁silenci +- ▁juga +- ▁pinned +- ▁Später +- ▁سامي +- 集 +- ▁kolekt +- pocket +- ▁mugabo +- ▁schrie +- ▁decreed +- ▁bamaze +- маҫ +- ▁voudr +- 感 +- 变 +- ▁títol +- ▁gash +- ▁carson +- ҳә +- ▁enirejo +- ▁이미 +- ▁collective +- ▁내일 +- ▁باشه +- ▁Emi +- ▁frosty +- gird +- す +- ▁Cemetery +- tekereje +- ▁embracing +- ▁Estudi +- ▁welchen +- ▁hippo +- ▁criteri +- ▁schatten +- ▁withhold +- ▁reproached +- ▁vivre +- ▁kapo +- ਣ +- ▁aussitôt +- ▁dissolution +- ▁stealthily +- ▁thar +- ▁лий +- ▁yitwa +- ▁repre +- даваць +- ▁reserva +- ▁bray +- äum +- ▁bushel +- ▁summary +- ▁crumbs +- ▁fleurs +- ▁okcidenta +- ▁auxiliar +- ▁cambia +- бат +- ▁charging +- вую +- ▁gentry +- ▁gutera +- เน +- тты +- ři +- ▁brauchen +- ▁trajecte +- ▁languish +- 京 +- 参 +- ▁manifold +- ▁baking +- ▁Yer +- ▁stacy +- assassinat +- ▁stati +- ▁robot +- чет +- ਟ +- ▁solle +- สม +- னா +- 자고 +- ▁disso +- viti +- ▁Yn +- ▁canaan +- ▁Ihnen +- ▁allude +- ▁diari +- impuls +- ▁ولی +- ည် +- ▁mustache +- វ +- ▁erroneous +- ▁monopoly +- 族 +- ▁subjection +- ▁보내 +- ▁seltsam +- ▁hilt +- '60' +- ▁darting +- ▁Gew +- ▁bruise +- ▁sufficed +- gelaten +- oggi +- ▁viaje +- ▁piteous +- ▁tidy +- teksto +- ▁interrupting +- ▁raoul +- ▁habitually +- wezen +- мах +- ▁bonna +- jorn +- ▁nulla +- مەت +- ской +- ▁Yehova +- ▁колькі +- 넌 +- ▁যে +- 座 +- ▁caresses +- ▁stolid +- ▁sniffed +- pital +- ▁stale +- ндар +- ▁priesthood +- ثر +- ▁diferent +- ▁камен +- гүй +- ▁hopped +- ▁expended +- ▁thorndyke +- ▁sunbeam +- ▁turmoil +- ਦੇ +- дзь +- выш +- ▁gukomeza +- kiro +- ецер +- ▁practiced +- խ +- ▁Nuntempe +- ▁chasing +- minster +- tiere +- দু +- ▁scattering +- ▁facilit +- রণ +- ▁États +- ▁colorado +- ▁reversed +- دون +- ▁disapprove +- ▁incomplete +- ▁ہیں۔ +- ▁congenial +- ▁uncanny +- ില +- ▁Нам +- ▁Bydd +- ▁স্ +- ▁haast +- ื้อ +- 깨 +- ▁initiative +- ▁shoved +- ▁throb +- ▁chris +- ▁surge +- 형 +- ▁faible +- аванне +- ▁frue +- artisan +- ▁mackenzie +- ▁lancelot +- ▁bourbon +- ▁gatatu +- ▁waterloo +- ▁Ик +- ▁usefulness +- ግ +- ▁remedies +- ె +- ▁нельга +- ▁mtu +- ▁sausage +- ▁бөт +- ▁Rolle +- ▁jugend +- ▁finite +- ▁culprit +- ▁etiquette +- ▁представител +- ▁pitiless +- ▁jovial +- ▁martyrdom +- looze +- ▁corrobor +- яш +- பா +- Comp +- führer +- ▁tandis +- һыҙ +- ▁vivant +- ▁πρ +- سې +- ▁maestro +- ▁набы +- ▁دوو +- ப்பா +- 블 +- ▁microscope +- ▁omnibus +- жын +- judi +- ေး +- ▁staple +- ▁communi +- ▁subsidi +- ▁specioj +- ल् +- ▁mimic +- ▁Mana +- ▁olvid +- ▁système +- ▁unfamiliar +- ▁Больш +- ▁reuben +- ▁Rod +- ▁thundered +- ▁foregoing +- burdened +- ляў +- ritz +- த்துக் +- ▁Kre +- śmie +- ံ +- ▁alacrity +- ▁persoon +- ▁steamboat +- midi +- ონ +- ▁middleton +- здым +- ▁gallantry +- ▁ingratitude +- ▁maturity +- ▁dazzled +- 百 +- ▁gerne +- تُ +- ▁brag +- ▁loomed +- tewe +- ▁fleda +- ▁һал +- Ы +- ▁stabbed +- 洲 +- வன +- ▁Komm +- ▁aristocrat +- itia +- ▁steaming +- intambara +- ▁rigging +- ▁maisha +- ▁honra +- ▁wegg +- ۾ +- 完 +- ▁arguing +- ▁chastise +- mbaga +- ▁boiler +- ▁starr +- মু +- ▁petals +- ▁militaire +- ▁dunkle +- rushanwa +- ▁gito +- ▁People +- ▁prinzessin +- pool +- ▁назва +- ▁breathlessly +- ▁awarded +- ossa +- ático +- ▁roasted +- ▁gleichen +- struktur +- ▁taille +- ▁volkomen +- geschenk +- ▁soldados +- сацыя +- ▁entendu +- ▁Big +- ▁demeanour +- ▁rained +- ▁follies +- ▁bajya +- ▁горад +- ▁rebut +- ▁Neida +- written +- даюць +- ▁complement +- ими +- ɛaw +- ▁afecta +- ▁reconnu +- ▁whirlwind +- ▁Совет +- ▁Palm +- ▁Bern +- ▁ŝtato +- lix +- cteur +- ▁Temp +- ▁Ла +- ▁поўна +- கொள்ள +- ▁française +- ▁sunken +- ▁chien +- ▁arras +- kufu +- ▁Gina +- ▁governing +- ôme +- ▁ishirini +- ▁tmurt +- ▁avowed +- ▁viceroy +- ▁кніг +- ಳ +- зала +- fläche +- ▁shipped +- ▁ஆக +- ▁Mata +- ▁closest +- โดย +- ▁repulsive +- walk +- ▁mollie +- yisa +- ▁thronged +- country +- ▁вынік +- cella +- ▁farce +- ▁schlimm +- ▁chiama +- ▁diners +- ހެ +- trans +- ▁urba +- ▁lute +- 現 +- ▁conceivable +- blätter +- ▁satellite +- zí +- brien +- ▁enlist +- ▁everard +- ▁testa +- யிர +- ▁یاد +- เห็น +- ▁desgracia +- ▁canterbury +- ▁affirmative +- ▁Avui +- ▁cocked +- ▁рада +- ▁eruption +- ▁girlish +- ▁folgen +- ▁gewissen +- ▁ناو +- ▁misunderstood +- ▁preposterous +- ▁шэраг +- ▁despotism +- ▁cuarto +- 官 +- ாலும் +- ▁nimble +- ▁수업 +- ▁Мый +- ▁balk +- ▁annoy +- ▁pudo +- ▁mystical +- ▁stanton +- ધ +- 普 +- 着 +- ▁blouse +- ▁bridal +- นัก +- perti +- ▁drying +- czyn +- авання +- mütig +- ▁paraît +- ▁nicol +- луу +- ▁Ир +- ▁clarissa +- ▁concluding +- ▁whimsical +- ▁Award +- responsibilities +- ▁супраць +- ▁هەموو +- ▁engraved +- cando +- ▁defied +- ัย +- chreck +- roost +- ▁moed +- ▁daarvan +- ▁pasó +- ▁Tiziri +- ▁mezepoko +- ষ্ট +- shimishije +- onym +- Oriol +- ▁inquiet +- ▁detection +- ▁Prince +- chodzi +- zonder +- ▁zoodat +- ▁nomina +- ▁lebte +- ▁outlined +- mashusho +- ▁Umukobwa +- ▁Regierung +- ▁informing +- ▁expelled +- ▁Because +- kräfte +- ▁Кан +- hield +- dził +- thirty +- ▁grandan +- ▁linden +- ▁பே +- رن +- kwo +- ziko +- ▁oasis +- ▁diable +- ▁উপ +- ▁sterben +- qay +- ▁sobald +- ▁booty +- ▁dyke +- ▁foxes +- ▁catharine +- ▁engrossed +- ▁sepulchre +- ▁sanguine +- وک +- ▁Gir +- ▁Germany +- piro +- ▁Sitz +- deelen +- humoured +- апла +- չ +- ▁gendarme +- objekt +- ▁frequented +- wych +- entrepren +- ▁feud +- ▁ноч +- fluss +- ▁predecessor +- ▁josephine +- ▁weston +- agent +- ▁ngeri +- ▁என் +- ▁cabell +- ▁soiled +- engine +- ▁mutima +- ▁называ +- għa +- ▁mountaineer +- infini +- ▁없지 +- bär +- 험 +- ubutegetsi +- อีก +- ▁greasy +- رش +- kommt +- ▁poised +- grove +- ▁sarebbe +- ▁prelate +- ▁Uko +- ▁Llu +- ▁whosoever +- お +- ▁gipsy +- ▁அவன +- acqua +- ▁Universitato +- ▁insulting +- injira +- ▁fresco +- kubiri +- ▁fons +- ወ +- ▁quattro +- ▁будет +- ▁azekka +- ▁выклік +- ▁gallon +- ▁Juan +- ▁Edward +- ▁сябр +- ▁bias +- ▁ajouta +- ▁pauline +- ▁comtesse +- ▁lilly +- market +- ▁modestly +- ▁punta +- ▁schenk +- ▁successes +- '80' +- 했을 +- ▁termination +- ▁auntie +- теп +- ▁Alex +- ▁nkunda +- স্ত +- 기로 +- ▁evangeli +- îne +- ▁kial +- เร +- ▁dictionary +- ▁unhappiness +- ▁vérité +- ▁Não +- ▁dónde +- ▁grabbed +- ▁substituted +- ята +- пон +- ▁comtat +- ▁Erfolg +- aĝa +- няе +- ▁Ulac +- antaj +- ▁ceremoni +- ▁kalt +- ▁methodist +- ▁lovingly +- ▁Xènia +- 앞 +- ▁raffles +- ▁Gy +- ▁Kwi +- ▁Chu +- ىكى +- kki +- spring +- ▁disappearing +- havien +- ით +- umuco +- hisha +- ▁voul +- ▁ھەر +- brow +- engeri +- ▁sitzen +- ▁phillips +- gruppe +- ▁rzekł +- àl +- ▁überall +- ▁wholesale +- avam +- ▁enorme +- ▁kidnap +- losen +- ▁particle +- ▁exasperated +- ▁چطور +- 收 +- ▁boffin +- М +- ggya +- ▁homestead +- prin +- ▁charcoal +- ▁merchandise +- ▁Center +- amashuri +- ▁yawning +- ngles +- ▁hardness +- ▁Kino +- ღ +- ▁provinco +- ▁dryly +- ▁Kab +- ▁وہ +- ▁hubo +- ▁bwose +- ▁lining +- ▁questi +- ஊ +- ▁timothy +- sided +- ▁slower +- ritten +- ▁servicio +- häuser +- ▁wê +- ▁giddy +- änglich +- ▁pila +- contamina +- avion +- ▁sumptuous +- ▁teaspoonful +- 송 +- ▁gairebé +- ▁recherche +- zame +- spraak +- baze +- ▁drooped +- iyê +- regierung +- ▁haunting +- ▁yellan +- ▁ndege +- ▁glue +- ▁rave +- zongera +- ▁지금은 +- ▁camisa +- бед +- 展 +- ▁aghast +- byabaye +- ▁choisi +- ▁kubaka +- embarras +- ▁philippe +- ▁womb +- ▁lind +- roque +- ▁느낌이 +- ▁diamant +- ▁reassuring +- 著 +- 近 +- ▁deficient +- ▁invaders +- ▁untuk +- ▁crippled +- ▁świ +- ▁heave +- ▁uncommonly +- pox +- ▁спі +- ರ್ +- 론 +- ▁атрымаў +- ▁জন্য +- ▁felon +- ▁cyiza +- zieht +- ▁Zweiten +- hava +- ▁jammer +- ಟ +- ដ +- ▁auprès +- 김 +- 뽑 +- ▁richelieu +- ▁cheveux +- ▁agencies +- چه +- ▁liggen +- ▁nomination +- 째 +- ▁dienen +- ▁schul +- ▁insert +- ▁huwa +- ▁працуе +- agombaga +- ▁banana +- ▁قوي +- ▁foreboding +- ▁bequeath +- ▁défend +- үй +- кин +- ▁complied +- работ +- ▁ئىدى +- ▁obeying +- rā +- partei +- ▁bragg +- ▁quilt +- मि +- ▁weakly +- ▁profusion +- ▁siendo +- ▁terence +- ไม่มี +- ▁suivante +- تىپ +- ഷ +- bbel +- ▁preso +- saxon +- ▁contemptible +- ▁managing +- ▁اگر +- нутр +- schwer +- ื +- िक +- ▁independently +- тычны +- ▁Regel +- ▁darkest +- ▁Blo +- cene +- ▁kwaad +- äus +- ▁analogy +- ▁дзяўчын +- ▁epithet +- jährige +- ▁إلى +- ▁meagre +- ▁대해서 +- ▁Gib +- ▁garanti +- zol +- ▁perpetua +- ▁neuro +- öyle +- jugu +- ▁peggotty +- ▁nötig +- ▁poultry +- ▁teufel +- bility +- ▁mummy +- ▁farthing +- freiheit +- களில் +- ▁wahrheit +- ▁simp +- ▁stroked +- ▁spied +- ▁colleague +- iriwe +- wî +- Alb +- américain +- പ്പ +- 总 +- ▁sleeper +- ▁township +- ▁schwa +- ▁splendidly +- ▁schoolboy +- ▁wangen +- ▁stoic +- ▁okwo +- ▁begreep +- 演 +- ▁bailiff +- ▁동생 +- ▁ukora +- ▁Kü +- ▁zest +- ĵo +- ▁finestres +- ▁certo +- лыя +- ▁Кон +- formation +- ▁attired +- ▁рыб +- ▁humphrey +- 广 +- ▁segunda +- ▁conquering +- لك +- ▁direcció +- ского +- ▁personnel +- ▁esposa +- ▁Лі +- ▁தன் +- ▁транс +- ▁comunica +- ▁Winter +- ▁소리 +- ▁acquit +- ▁dirk +- ▁subterranean +- ▁فقط +- ▁forefinger +- 还 +- ▁behalve +- balo +- ▁unfolded +- ▁zealand +- віта +- vreemd +- stuhl +- ހ +- ▁While +- ▁Sança +- ▁Brit +- ൻ +- ▁vindictive +- ▁encampment +- ▁све +- 伊 +- ▁smitten +- ▁approving +- कार +- ▁senyor +- ▁праблема +- ultima +- ▁wireless +- ▁angeles +- ▁norton +- 达 +- ▁누가 +- ▁evangelist +- ствен +- біць +- णा +- слав +- ▁앞에 +- ▁Cependant +- ▁anonymous +- 青 +- ▁composé +- خواست +- ▁personnage +- ▁supre +- aventura +- ític +- ▁plait +- ▁indifferently +- ▁docteur +- ▁händen +- tokea +- нулі +- 탈 +- cause +- ▁físic +- pang +- venta +- ▁kreeg +- ▁acquaint +- ▁considerat +- ▁compensate +- ▁flav +- ▁அமை +- ▁disgraced +- хан +- ▁mace +- imiryango +- ▁imprudent +- ▁compressed +- ▁гадзін +- ▁persevere +- ▁enrich +- ▁ishati +- ▁cooled +- ▁snip +- ▁Bak +- цоў +- ▁frying +- clé +- ▁memorandum +- ▁turbulent +- ▁cierta +- ▁deputies +- ▁resorted +- ર્ +- tuli +- ▁bagenda +- ▁crashed +- ağı +- ▁Dda +- qualifi +- ñu +- shake +- reisen +- เป +- ჩ +- ▁intrusion +- ▁بسیار +- ▁sonnet +- ▁Nyuma +- ▁howled +- ▁intrude +- ذر +- kā +- ▁generously +- ▁adaptation +- ▁magambo +- ▁வாழ் +- alcalde +- ▁geduld +- ▁repented +- neḍ +- ▁loco +- рэй +- wahi +- èle +- hush +- ▁мове +- ▁amsterdam +- ▁secluded +- ▁hindrance +- ▁speculate +- ▁йӱ +- ▁helfen +- fifth +- ▁manĝas +- ▁libroj +- пак +- ▁discarded +- 件 +- ▁apartenis +- пен +- ▁napkin +- gry +- ▁hoof +- ▁thé +- ▁pública +- rillo +- kow +- 는지 +- ▁impudence +- trifft +- ▁важны +- ▁grazing +- ▁subito +- ▁Lord +- ▁elekti +- ބު +- ▁fece +- กว่า +- ▁багат +- ото +- ▁entwickelt +- ▁eenvoudig +- ▁philistine +- ▁submissive +- ▁embarrassing +- ▁ئۈ +- ▁중국 +- ▁befinden +- ▁snapping +- lith +- çar +- ▁eigenlijk +- آور +- пис +- ▁bulwark +- 광 +- ▁versailles +- ▁árbol +- ▁yehova +- ▁nennen +- ▁شهر +- jú +- ▁outraged +- ▁moose +- ▁crutch +- ▁descendant +- urupfu +- katu +- abaga +- вон +- ▁European +- rzył +- ▁diferentes +- ▁arbres +- ▁index +- ▁fertil +- ▁manchmal +- '10' +- ▁paard +- ▁sinne +- ▁йәш +- ▁teresa +- ገ +- cznie +- ▁없잖아 +- ▁grenz +- ац +- ▁تک +- ▁posar +- ▁مرا +- анд +- ▁Entwicklung +- ▁comanda +- ▁Іх +- ▁infirm +- ▁cited +- ▁ھەم +- zaza +- ▁Jones +- metric +- ▁communicating +- ▁canopy +- ▁flit +- ▁Civil +- чат +- ▁beobacht +- кого +- ▁کنند +- ▁vestido +- ▁வரு +- ряд +- kufa +- ▁volstrekt +- ▁aperture +- ▁amasezerano +- ▁retrace +- ilidad +- ▁있었는데 +- ▁nyir +- ▁Tiene +- ▁ogu +- ಯ +- ▁bikaba +- ▁lejos +- ▁innocently +- ▁rapidement +- kür +- ▁dint +- ▁effectively +- гры +- ▁Temas +- ▁calci +- や +- ▁surmounted +- ▁goblin +- ▁پرو +- ▁echa +- өлө +- tesi +- alimenta +- ▁daytime +- reliance +- ▁stonden +- ▁byabo +- ganza +- ฝ +- ▁thrift +- ▁gyda +- ▁요새 +- ▁Ambaŭ +- ▁loveliest +- ▁uphold +- université +- temperatur +- lander +- ▁constit +- ▁steered +- brother +- ▁croire +- ▁regularity +- 더 +- ▁погляд +- ▁comporta +- ▁inaccessible +- ▁unquestionably +- κε +- ▁leopard +- 我們 +- ▁갔는데 +- ▁straf +- ▁cheaper +- ▁overlooking +- 쪽 +- batera +- ▁cuya +- ▁halfway +- ▁Iri +- ▁worten +- ▁schouder +- ▁dejected +- dito +- ▁와서 +- lehrer +- ibuka +- ▁filial +- pida +- ▁critique +- 었거든 +- ▁civility +- ▁ambao +- amini +- isce +- ▁wretchedness +- issimo +- ɛedda +- ▁jolt +- 았어 +- ▁publicity +- ▁verdure +- ▁postpone +- ▁herrlich +- ▁wokulski +- ▁glossy +- ▁dennis +- ▁Bü +- ▁bennett +- ▁cyari +- ▁mixing +- ланд +- ▁blink +- ▁compla +- ▁poplar +- ▁frederic +- kozesa +- ▁Wahl +- ▁якой +- ىڭ +- 원이 +- ▁fräulein +- ▁meditative +- àlbum +- ▁cocoa +- ▁Beispiel +- жив +- ▁geheimnis +- ▁puppy +- mbuzi +- ▁Sho +- ▁prohibited +- entrée +- ▁cripple +- bwiriza +- ▁piú +- ▁debía +- ຈ +- ▁spout +- agaragara +- pyr +- ▁broadway +- ▁credited +- ▁devilish +- ▁mirada +- ▁Ай +- kyali +- ▁ambos +- 리가 +- ▁고등학교 +- ▁buffet +- ▁என்ப +- ола +- ▁Quine +- мам +- shiriki +- ▁preciso +- ▁ôl +- lohn +- ▁lurch +- ▁있었어 +- sonnen +- pura +- ▁titus +- ▁obulungi +- ▁mohawk +- ▁atonement +- ▁smite +- ▁stewart +- ▁creaking +- ων +- гол +- ▁invece +- wissenschaft +- ▁havi +- żył +- ่าย +- жаны +- ▁가자 +- ifique +- ▁килә +- accés +- ▁botiga +- ▁fetters +- ▁begonnen +- ▁bakaba +- ▁brett +- ▁competitor +- ague +- нікам +- стве +- ▁yawned +- imiz +- ▁fastening +- ▁goeden +- ҙәре +- ▁specialist +- ▁staid +- ▁burying +- ז +- ▁Gavumenti +- ▁confederacy +- ▁prescription +- ▁unscrupulous +- တွင် +- ▁дакладна +- ▁somerset +- ▁furono +- ҵә +- ▁toilette +- ▁assim +- ▁सं +- mə +- ▁Luft +- graben +- ▁dispatched +- ▁voltant +- ▁graze +- ▁rowing +- ▁için +- ▁zwak +- ▁Home +- aigües +- ջ +- ▁momentarily +- авы +- ▁steeple +- ▁Helen +- szę +- ▁taɣ +- ▁முடிய +- isée +- ▁만들 +- ▁Court +- ально +- ▁compassionate +- ▁цікав +- ▁efface +- ▁officially +- ▁suspecting +- ▁antecedent +- ▁peligro +- 業 +- ▁gibson +- mbwe +- ▁pastime +- ▁contrasted +- йшло +- ▁emphatic +- няць +- ாமல் +- ennemi +- ▁beamed +- ▁pastoral +- skinned +- ራ +- ▁harlowe +- ▁obtenir +- ▁rezult +- даг +- ڕی +- ▁kabila +- ▁ozma +- ▁jonas +- krat +- ▁okuloj +- سي +- ▁artful +- 义 +- ▁avui +- ทําให้ +- ▁vermin +- ▁commença +- ▁volgde +- loving +- ▁slang +- yisi +- twintig +- ▁geleden +- ▁aviat +- ▁vigilant +- மர +- ▁cosmo +- ▁Uwo +- ▁immovable +- ▁핸드폰 +- ▁enumerate +- ▁arque +- ▁mince +- ▁итеп +- astre +- ▁Egi +- ▁bosque +- ▁wären +- boom +- റ്റ +- ▁astounding +- ▁grasshopper +- ▁sagacity +- ▁결혼 +- ▁dimension +- ▁residu +- ▁pseudo +- hängen +- cieux +- ▁divan +- 변 +- ▁Conserva +- ▁மற்றும் +- ▁значыць +- ▁principales +- дры +- ▁moglie +- ▁Centre +- ▁اول +- izado +- fc +- ▁کیا +- 하시 +- ▁electoral +- ▁mocked +- ibikoresho +- ▁bikomeye +- ▁sympathis +- ▁avrebbe +- ▁Serra +- oxy +- ▁forêt +- ▁hoss +- ▁quindi +- ▁azure +- ▁jade +- gekomen +- ▁Pakistan +- ▁является +- 움 +- ശ +- كُ +- ▁único +- ▁estrella +- อด +- ▁thaw +- ▁gratifying +- ▁کمی +- ▁pouco +- ▁privileged +- зму +- ▁człowiek +- ▁terrestrial +- ▁zwölf +- จํา +- ▁저번에 +- ची +- ▁saracen +- ▁prestige +- hamagara +- ▁stave +- ▁efect +- ▁labouring +- ▁다음에 +- ▁diverge +- ▁beastly +- icios +- ▁swirl +- ▁gestalte +- ▁expedi +- ▁voel +- ປ +- ላ +- Igihugu +- ▁subscription +- ▁surviving +- ▁automatically +- ▁presbyterian +- ▁imaginable +- prez +- ▁Гр +- ▁아닌 +- ▁versió +- ԥш +- ▁dû +- 邊 +- ▁savez +- ▁simpson +- ▁менен +- لىپ +- ▁participe +- ▁transit +- ▁trompe +- fuzi +- schrijven +- ▁hyacinth +- 居 +- ▁fidget +- ▁Blau +- bok +- ▁incurred +- shami +- ▁dandy +- ▁goût +- ▁necessaries +- ▁trough +- ivugwa +- ▁trok +- wow +- ▁Team +- ▁filth +- ித்த +- ▁кеүек +- фу +- ▁traurig +- ▁terry +- ▁verzeker +- 夫 +- ▁helplessness +- ър +- ▁controlling +- ிக்க +- ▁деп +- ▁bitewe +- οι +- rurimi +- ▁Schu +- ▁partida +- ▁writhing +- ▁prowess +- ▁Before +- ▁expound +- ▁preguntó +- ▁creditors +- ▁bijyanye +- мор +- ▁filed +- richter +- ignor +- ▁mourned +- built +- eût +- ▁Festival +- ▁oats +- gida +- нулася +- льную +- ▁muzeo +- ▁unmarried +- flügel +- Espanya +- ▁هل +- ▁Out +- ▁голо +- ▁scratching +- ▁Nzi +- ▁sahen +- ▁monitor +- رین +- ▁pecuniary +- 東 +- ▁mazarin +- ▁mitchell +- ▁película +- ▁skillful +- ▁کردی +- ▁shiny +- ▁thornton +- ▁Name +- ▁unimportant +- ▁17 +- spieler +- urugo +- ▁reginald +- ▁павінен +- ▁pedestal +- 命 +- ▁ehre +- ▁чакае +- funkci +- ▁runaway +- ▁creix +- ▁уйла +- ৱ +- ▁unbelief +- ▁fleming +- yise +- ▁surgery +- ▁downright +- ▁redeemed +- ▁jeremy +- ▁dominated +- ▁extinguish +- ों +- ▁гэтае +- ▁consoled +- ▁кеч +- ▁durst +- һө +- ▁рука +- ▁Kuba +- ▁televisió +- 反 +- rutse +- ▁Puig +- engage +- ▁golpe +- miş +- gizwe +- ▁blanka +- ▁genteel +- ▁impulsive +- ماي +- ▁exploring +- ގޮ +- 党 +- ▁deduction +- ▁gypsy +- ோடு +- ▁prohibit +- ▁얼마나 +- ▁jedno +- ▁hyper +- ▁ruim +- ▁waked +- ▁Jane +- ▁brewster +- ▁servitude +- ▁admonish +- ▁rijtuig +- tiye +- ▁deuxième +- ▁기억이 +- برد +- ▁conversed +- ▁suerte +- يق +- ▁seduce +- ಸ್ +- ▁تمام +- نگی +- ுடன் +- 꼭 +- hindi +- ▁stimulus +- گۆ +- ▁Hong +- voeg +- годна +- achtet +- middag +- ▁starry +- öö +- ▁podria +- ▁imitated +- ▁véritable +- 땐 +- 波 +- ▁supplication +- ރީ +- йҙар +- ▁troch +- ▁grimace +- entreprise +- ▁좋아하는 +- otti +- kritik +- ▁высокі +- ▁produc +- ▁adjustment +- ▁deluge +- ▁tarry +- lomb +- ติ +- church +- تار +- знаём +- ▁montreal +- ▁gesamte +- ▁supérieur +- ▁ehren +- ▁germain +- ▁Jackson +- ▁Text +- ▁پول +- platte +- nyira +- ulla +- ▁lobby +- ▁choke +- ▁impede +- ▁hindered +- ӓт +- ▁хотел +- ▁пункт +- ▁Cela +- treiben +- ▁palpita +- ▁syr +- ▁nibyo +- âtre +- ▁distinguishing +- ▁rotha +- һында +- ɣel +- tendant +- म् +- ▁якіх +- ▁ஆனால் +- ▁easiest +- ▁vollständig +- ▁아는 +- ▁лӱ +- ▁uppermost +- выр +- ▁Twi +- ▁Fund +- ▁Town +- ▁industria +- ภาพ +- круг +- 드라 +- ▁annoying +- gema +- issaient +- ັ +- ubugingo +- ïda +- ▁ironical +- ▁politiki +- ▁scharf +- crum +- ▁halo +- ▁inspira +- guruka +- ▁сваё +- ▁batatu +- әһе +- ▁stupendous +- ▁serikali +- kuk +- ▁setmana +- ▁حالا +- girwa +- гавары +- ▁abstracted +- drome +- ▁mohammed +- ▁blockade +- ▁gehören +- چھ +- ▁interchange +- ааи +- ▁patrino +- ктор +- bäume +- ▁soldaten +- ▁sedert +- rimmed +- ▁درست +- ▁Sergi +- ох +- 府 +- ▁найбольш +- ▁natasha +- 型 +- ▁assisting +- ٹی +- ▁mistrust +- ▁يەر +- ▁friction +- ▁barrister +- charge +- ▁benefici +- ▁булыр +- 们 +- ▁iroquois +- ▁troubling +- hinde +- ▁pali +- ▁squint +- ία +- ▁traves +- يات +- плат +- ▁barton +- qual +- ▁planter +- そ +- ▁frequency +- ▁forefathers +- ▁unhappily +- ▁hubiese +- ▁convoy +- ন্দ +- ▁seguito +- ▁Rosa +- ▁starboard +- ▁durham +- floor +- ▁calculate +- ▁dauphin +- ▁jednak +- ▁offenbar +- 이면 +- inici +- ▁vehemently +- casse +- нән +- ્ય +- Umwami +- 死 +- ▁пачатку +- ▁crumble +- ▁inflamed +- ▁manifestly +- ▁marianne +- کەم +- ▁displaying +- ▁deciding +- vance +- ▁Spring +- ▁Iran +- ▁estudis +- ▁johann +- ▁кок +- ▁Beau +- ggu +- ▁proximity +- ▁circling +- ▁screech +- ▁vouchsafe +- ▁먼저 +- 的一个市镇 +- ▁filosof +- ▁dennoch +- ▁schwach +- ▁quedó +- ▁endeavoring +- ▁şey +- garra +- bewußt +- ▁florentine +- ▁reinforcements +- 밤 +- ▁уже +- ▁strayed +- ▁proprio +- ء +- dringen +- ▁publishing +- ▁ferocity +- 母 +- ▁avarice +- ▁winked +- ▁pompous +- ▁nyamara +- strijd +- ▁뭐가 +- ▁крок +- ▁наступны +- ▁repulsed +- ▁돈을 +- นิ +- сія +- ▁gusoma +- ▁appro +- ҙар +- ޝ +- ể +- 取 +- ▁fanciful +- 地区 +- mbira +- ್ಯ +- бле +- ▁spielt +- ▁voltaire +- ▁gable +- ▁dros +- รี +- 野 +- ▁inoltre +- ▁pagar +- chino +- kolebwa +- ▁봤을 +- morning +- ▁promen +- ▁diferencia +- eña +- minister +- টে +- ▁développe +- ▁Costa +- ുന്ന +- ▁frantically +- schläge +- ▁adelante +- tinya +- ▁pounding +- ▁desist +- ifika +- чыя +- ▁undertone +- ▁hostilities +- เมื่อ +- ታ +- ▁sceptre +- ক্ত +- ▁Hund +- ▁음식 +- ▁adroit +- ▁verstehen +- ▁patsy +- рҭ +- ▁tilted +- ▁Bwe +- ޔަ +- ▁instituted +- ական +- ɣal +- ▁engineering +- ▁albany +- ▁flamme +- ▁getroffen +- ▁materially +- bani +- ја +- ▁manuel +- さ +- ▁stamping +- ▁conocido +- mbaraga +- ▁Patr +- ▁chisel +- 스트 +- лоп +- ▁southeast +- ▁swarming +- omwana +- grup +- ▁Ramon +- somesa +- ▁Gü +- ▁sluggish +- ▁sampson +- ▁overpowered +- ▁невялікі +- ▁Spanish +- ▁symptom +- ▁indolent +- ▁булһа +- ▁deane +- ując +- ستا +- ภา +- vět +- ▁από +- ▁occupies +- ению +- ▁terrier +- ħa +- ▁grated +- ▁pussy +- ▁watoto +- ▁insistent +- ▁armchair +- reihe +- ▁medden +- ตา +- ▁freak +- ▁eternally +- வர்கள +- ▁spécial +- ▁дакумент +- ഗ +- ▁précis +- ▁mesos +- لغان +- लि +- ▁веле +- ▁вочы +- ▁확실히 +- 께 +- ▁mozart +- 岛 +- ительно +- ▁tão +- ▁다르 +- ▁lakini +- ▁Nella +- ▁oído +- льная +- ▁iawn +- ▁stepan +- ▁chew +- ▁cleverness +- ▁maladie +- ▁Йо +- ▁wailed +- ør +- ▁recited +- ▁propietat +- ދާ +- ▁walikuwa +- ▁heureuse +- ▁intimated +- ▁depicted +- ▁plug +- ▁suppression +- risch +- ӓн +- ▁cuales +- ▁éclat +- ▁Tial +- ▁ekzist +- рана +- ▁reverent +- ▁metaphysic +- ajuda +- ▁detested +- អ +- ▁découvert +- ▁schicksal +- ▁لەگەڵ +- 샀 +- ▁tampoc +- ▁yelli +- ▁lazily +- ▁breit +- ▁relic +- ▁usher +- ▁अस +- dür +- ▁Motor +- ▁downcast +- ▁estão +- ▁inseparable +- ▁loĝantaro +- ▁oregon +- ▁adela +- جد +- ▁Mach +- ийг +- saki +- ▁dufite +- ▁beschäftigt +- 局 +- ▁сабой +- ▁schwierig +- ▁cray +- ления +- reiten +- ibihugu +- чин +- ▁pamp +- วา +- ▁appris +- ▁awkwardly +- 社 +- événement +- ▁இல்லை +- ▁aggravate +- ▁zwarte +- ▁surveying +- Qua +- ▁relat +- major +- ▁répét +- latina +- ▁discerned +- betrieb +- лик +- 높 +- ▁boisterous +- ▁unchanged +- ▁meditated +- лых +- ▁tush +- ފު +- ▁Wohn +- ▁disordered +- hib +- ▁слово +- ▁impotent +- لای +- ▁pratique +- ▁plural +- ▁portar +- ▁fervor +- ▁Julià +- kuuma +- ▁둘이 +- ▁betraying +- ▁Präsident +- ▁terstond +- மைய +- ▁diferenc +- ▁kurwanya +- ▁snort +- ▁나오는 +- ▁geological +- stemming +- ahise +- ▁upande +- ▁clerical +- ▁따로 +- ▁taxation +- ença +- ▁arabella +- ىنىڭ +- ▁Kenn +- ▁solcher +- umukinnyi +- ▁вышэй +- સ્ +- ▁rumbling +- ▁beweging +- ▁obligat +- levé +- ▁langzaam +- ▁antioch +- ▁nacia +- ▁acquiring +- ▁erfolgreich +- ހާ +- ▁niveau +- episodi +- spitze +- ▁stepmother +- broch +- mentioned +- ستم +- ▁Бына +- ▁ratio +- mpton +- ுக்க +- ▁Kunst +- uwd +- fior +- åg +- ▁Wirtschaft +- mugoroba +- ▁меня +- ▁nshya +- ▁evitar +- écoute +- ▁που +- ▁цар +- ▁supporters +- mitima +- ▁höchsten +- ▁congratulated +- ▁nourri +- ิง +- schriften +- ▁conceded +- ている +- หนึ่ง +- ▁sydd +- ▁Farners +- ▁wrapping +- ▁gym +- ▁bram +- ▁grandpa +- ▁sophist +- ▁hydro +- 증 +- ▁fotografi +- ▁giro +- ▁rencontr +- قدر +- ▁sozial +- ▁niña +- ▁saisi +- ▁miembro +- ▁kunywa +- glasses +- lisi +- ור +- сыл +- ▁tumbler +- คํา +- ॉ +- ▁prerogative +- рес +- 끼리 +- ▁persistently +- ▁umutungo +- ▁ostr +- awake +- ▁chlor +- ▁pferde +- iéndo +- ▁mended +- ▁Беҙ +- futa +- ને +- ▁clapping +- ▁statisti +- ▁stanza +- бере +- ▁objeto +- ▁watchman +- ▁sparing +- ▁ecclesiastic +- ▁foaming +- exdem +- ▁marchand +- ▁gevonden +- ▁strategy +- 片 +- ▁douleur +- ▁اتاق +- gendere +- ▁Тым +- ▁charleston +- chow +- ▁huck +- ashyizwe +- ▁asiatic +- ▁cael +- ▁கூ +- ▁studious +- ▁licked +- ▁minerva +- ▁deplorable +- wärtig +- ▁bedeutet +- ▁persönlich +- ▁müßte +- މުން +- ▁fenwick +- ▁sickening +- werfen +- ▁baldaŭ +- ▁joueur +- ▁unwise +- ▁tariff +- ▁abhor +- َعْ +- ▁süd +- ▁feina +- ನ್ನು +- ▁unaccustomed +- ▁fireside +- ▁legacy +- ▁assassination +- ▁austere +- ▁tentative +- ▁wote +- ▁выкарыстоўва +- ▁presta +- ▁bruno +- ▁schudde +- ▁гана +- vable +- 끊 +- ▁alvenis +- ▁pricked +- ▁tearful +- ▁huron +- neil +- ihariye +- ▁그런데 +- ▁königin +- നി +- gereza +- ▁precarious +- 於 +- ▁plotseling +- ▁adjutant +- ▁Reich +- ▁masked +- ▁ignoring +- ▁professeur +- idir +- ▁scud +- ូ +- ▁aeroplane +- ▁pedagog +- 造 +- ▁département +- ▁Reserva +- ▁hawkins +- 휴 +- ▁здаецца +- ▁provocation +- ▁بده +- stube +- grenze +- Ş +- 곳 +- 村 +- ▁längst +- ▁Waliwo +- ▁pourrait +- ▁Länder +- ▁사고 +- ▁corro +- ▁coche +- ▁impostor +- ▁carroll +- urubyiruko +- ▁waxed +- ▁reproduction +- ▁vollkommen +- occupa +- ્ર +- ▁postponed +- umweru +- ▁contraire +- ▁dritte +- ▁bloodshed +- worthy +- judg +- shinja +- ▁humiliating +- capacit +- ▁rallied +- ▁крыху +- lungi +- ▁moslem +- ▁галін +- ▁Virginia +- ▁versetzte +- ▁teori +- ▁wandte +- ▁fiscal +- ▁employing +- 습 +- ▁luxuriant +- ▁unsatisfactory +- ▁ياخشى +- ېرى +- ▁constancy +- ▁glor +- ność +- ▁penetr +- iteguye +- ▁forgetfulness +- คล +- ▁kugura +- ▁availed +- zida +- 第一 +- ▁jose +- தாக +- ▁presumably +- ▁grumbling +- ▁싶은데 +- ▁kep +- ▁beguile +- ▁handsomely +- ▁unglücklich +- ▁colin +- ▁specie +- ▁Filip +- ▁задач +- ▁cuthbert +- ▁biashara +- ▁sleek +- ▁cristal +- gebung +- ▁acquitted +- ▁Northern +- leɛ +- ▁oppress +- duizend +- ▁distract +- ▁От +- сістэм +- ника +- َى +- ▁kangaroo +- ▁synagogue +- ▁прынцып +- ▁emerging +- ▁hübsch +- ▁Mireia +- vloed +- byeyi +- ▁맞는 +- ▁dades +- ▁괜찮은 +- command +- ▁puss +- ▁gma +- ▁கொண்டு +- ▁foolishly +- ▁juego +- bitekerezo +- ▁quack +- วง +- ▁Doch +- urugendo +- viennent +- ▁이러고 +- ▁christina +- ▁Tür +- முடி +- னும் +- மும் +- 叶 +- ▁sullenly +- ▁이러면서 +- ▁cossack +- ▁penalties +- ▁élevé +- ▁Anne +- ▁tightened +- ▁campagne +- ▁bwabo +- ▁vulture +- shouldered +- ▁virgil +- رخ +- வள +- сэн +- เบ +- ▁Turn +- ӓш +- ishijwe +- ▁antics +- ▁boudoir +- กําลัง +- ▁abreast +- ▁모르겠는데 +- ▁sababu +- tandukany +- elijkheid +- ▁monatoj +- lustra +- yemeza +- assum +- ▁горадзе +- ▁dorrit +- ▁nettle +- iker +- чную +- ▁Fan +- ▁claro +- чем +- ▁participar +- ▁Netta +- ކީ +- ığı +- tambula +- ▁همین +- ▁gewisse +- ▁foolishness +- ▁aparece +- ฮ +- 폰 +- ▁barked +- ▁tumultuous +- жда +- ▁johnston +- 国家 +- balala +- ▁dawson +- ▁ئاي +- віць +- ▁вуліцы +- ▁bothered +- ԥс +- ▁llum +- cheza +- праграм +- ▁reprend +- ề +- ▁inconceivable +- ▁vosotros +- ▁얼굴 +- ▁mislead +- ▁thrive +- ▁cheated +- ▁mightily +- ▁meses +- qat +- ▁fatigued +- ▁caspar +- جى +- ▁verandah +- ▁algemeen +- ёў +- ▁sceptic +- گشت +- ▁Congo +- ▁principalmente +- هد +- ▁venera +- ▁cuyo +- ▁criado +- стру +- ▁wszystko +- 멀 +- ▁théâtre +- ▁waterfall +- رُ +- kı +- ▁recede +- qqel +- color +- ▁squall +- bieten +- ▁Mehr +- ▁eugenia +- ▁erfahren +- ▁embroidery +- ▁cleverly +- ▁Dopo +- pter +- ▁sinclair +- ▁footprints +- ▁trägt +- ওয়া +- ▁outright +- ▁squeal +- lector +- ▁Rang +- ▁самае +- ▁тых +- ▁пур +- ▁exaltation +- ▁ҡалды +- ição +- ähnlich +- ▁serena +- ▁Sprache +- мір +- hri +- ▁pequeña +- ീ +- ▁humbug +- 之一 +- ▁праблемы +- ▁Zahl +- ▁нашых +- sammelt +- ▁Kara +- ▁каманд +- ▁vestit +- ▁valle +- ▁Route +- ▁quickness +- ▁couvert +- ▁Kial +- ▁आणि +- ▁observant +- ▁chère +- interno +- ▁axis +- ▁confessor +- ▁gayly +- ▁diagram +- ▁vornehm +- ستە +- ▁continuously +- vich +- ▁arrivé +- ▁eaves +- ▁hulp +- 능 +- alda +- ▁disconcerted +- ▁juncture +- ▁incarnation +- глу +- فل +- sthetic +- யம் +- ▁laughingly +- article +- ▁nailed +- ▁Buri +- ▁Мі +- ▁assemblage +- ▁ceasing +- ▁ręk +- ▁Eigen +- ▁Social +- ناس +- ▁그쪽 +- ▁прыклад +- ▁differing +- ▁corrupted +- ▁hassan +- ▁blickte +- ▁resolv +- ▁Мар +- ▁incoherent +- ▁straggling +- ▁mosaic +- ▁policia +- ▁고민 +- ▁Aql +- ▁навуча +- 너 +- cji +- ▁assyria +- ▁auction +- ▁laurence +- ▁yiḍ +- ▁dangling +- ▁Plej +- ▁plumb +- वर +- ▁Aŭ +- ▁Guillem +- ▁pinnacle +- ▁sanctity +- ▁withheld +- ▁finestra +- ▁갔어 +- ▁classifica +- ▁Four +- ہوں +- ▁comune +- ▁resisting +- ▁نق +- ▁flicker +- mér +- ▁беларус +- ▁badger +- ▁nkaho +- ▁glint +- ▁potomac +- 퍼 +- ▁disabled +- ▁présence +- ▁quitting +- この +- ▁wrest +- ▁legisla +- US +- ▁بزن +- ▁guile +- ▁semaine +- ▁tugend +- ҿ +- semblant +- ▁deinem +- ▁sterling +- ▁reformed +- ▁Жы +- ▁criticise +- ▁dinero +- дов +- ▁característic +- ▁carretera +- ▁flattened +- 드라고 +- ▁boswell +- ▁exclusion +- ▁devoir +- цо +- vî +- mugenzi +- ▁compelling +- ▁incidentally +- teganya +- ▁agatha +- ▁Gel +- ▁schwarzen +- ▁новыя +- ▁Zum +- ▁facilitate +- ▁tenido +- ▁fitness +- hivern +- ▁droll +- ▁niche +- unze +- Ü +- letto +- ▁encroach +- ஐ +- ▁쪼금 +- ▁flexible +- ▁foretold +- ▁частцы +- ▁kwizera +- ▁voeten +- ▁fitful +- gurira +- ▁уке +- ▁simile +- ▁хәл +- ▁kennel +- ▁applauded +- möglich +- ▁graphic +- fleisch +- نَا +- ▁Пу +- ▁semana +- вали +- arzt +- ▁anxieties +- ▁fluctua +- ▁olhos +- ▁steak +- ▁wintry +- த்தின் +- ▁pecho +- ▁snail +- umbre +- ▁vreemde +- ▁venga +- ▁사는 +- ത് +- ▁rigorous +- ▁chatting +- ▁боло +- 쉽 +- ▁немагчыма +- 便 +- ▁wiederholt +- kadde +- ▁gereed +- பின் +- èria +- স্ট +- ▁strolling +- ▁comical +- 밥 +- ▁Test +- ▁okw +- 라면 +- ▁Brand +- ▁rodolphe +- ▁бяс +- ▁Tin +- ცი +- élection +- zika +- icamente +- mette +- łę +- ▁ekzemple +- ubucuruzi +- ▁intoxicated +- ▁مثل +- ▁foresaw +- ▁Produkt +- ▁malgrandaj +- ▁safeguard +- ▁канца +- głos +- isait +- ▁llevaba +- ▁bewildering +- ▁huguenot +- ▁unwonted +- akati +- 배 +- ▁shaved +- ▁razor +- ▁befell +- ▁philosophic +- ▁ntashobora +- ▁omission +- ▁retaining +- khu +- ▁Бул +- ▁lionel +- ▁sufiĉe +- ▁compañero +- ▁leopold +- cello +- abaturage +- ▁Нават +- ▁binne +- 생활 +- atac +- ▁дух +- ої +- ▁Nieder +- grä +- ▁rivalry +- ▁hyde +- метр +- weight +- èche +- organise +- ພ +- dokument +- แบบ +- 源 +- ▁contingent +- ▁galilee +- 可能 +- ▁прыма +- ▁cyumweru +- ▁Jetzt +- uchte +- ▁Hano +- ▁пайшоў +- ▁заўва +- ▁acerca +- arrel +- ▁ignora +- სა +- ▁housemaid +- ▁contending +- gift +- ▁meditating +- ▁provoking +- 션 +- ▁아닌데 +- ▁Вер +- '500' +- ▁savait +- ▁specifically +- hindura +- zamuka +- ▁verließ +- யோ +- iɛ +- кӧ +- hook +- ▁martian +- ▁Акрамя +- ▁architectural +- begotten +- ▁mantelpiece +- ஞ +- ▁diente +- ▁frightfully +- ▁마시 +- ött +- ▁Wil +- ้ง +- ▁amply +- sprung +- ▁weiteren +- важа +- ▁кары +- ▁puzzling +- ▁kerak +- ▁ministro +- ▁pensamiento +- unterricht +- ▁shaving +- นี่ +- ▁mohammedan +- ▁옆에 +- ี้ +- discrimin +- ▁derecho +- თა +- ▁redoubled +- ▁Déu +- ssant +- ▁sabia +- بط +- ใ +- яўляюцца +- ▁Yessefk +- ▁mahogany +- ▁schweigen +- ▁swedish +- ▁proporciona +- няя +- ών +- ▁retira +- rigg +- ▁separating +- ▁monopoli +- ەکەم +- ▁சில +- ▁사이 +- ▁деч +- 氏 +- çant +- ası +- ▁mão +- ▁alban +- effa +- ▁Leben +- cele +- ▁pluto +- ▁astounded +- ▁unlawful +- édition +- ▁kečč +- kenewe +- зла +- ▁crafty +- ▁correspon +- powiedz +- ▁cherub +- ▁выраб +- Š +- ▁хачу +- ▁bullock +- ▁leider +- ▁então +- ▁protesting +- eẓra +- ▁elias +- ▁Valley +- ▁tendre +- น่า +- larning +- ▁நாடக +- 극 +- ▁Card +- 游 +- kämpfe +- ▁suivant +- giriye +- ▁stoutly +- насцю +- ▁fremden +- ▁demeure +- ▁boasting +- ababa +- ▁operates +- ▁بازی +- ▁schle +- ▁그랬어 +- วั +- ▁mkuu +- өм +- ▁трыма +- ़ +- ▁audacious +- ▁covetous +- ▁statistics +- ▁dismayed +- lynn +- ▁suspiciously +- ▁conveyance +- brä +- amant +- nię +- өрө +- ▁асноўным +- ▁lilac +- ▁quantitat +- ▁oczy +- ▁franchise +- selmad +- ▁있어서 +- ▁matern +- ▁unbearable +- ▁jeffrey +- ▁Wakati +- ες +- нуцца +- ▁Рэ +- wyth +- ▁kehrte +- ohereza +- ▁harassed +- genzi +- ▁fervently +- ▁compon +- ▁끝나 +- ▁acest +- ▁commodore +- ▁implacable +- ▁percentage +- ▁پیدا +- ию +- ▁olma +- ikiriza +- errant +- ▁lluita +- ▁좋아해 +- ▁دەپ +- ҿы +- варот +- ▁يوق +- ▁terrifying +- ▁premio +- ▁panama +- ▁obstruction +- ଦ +- ▁charakter +- forgetful +- wurf +- 했거든 +- ▁imperfection +- เด +- ▁Italian +- ▁swarmed +- ▁uneven +- ▁муж +- ▁barney +- wold +- ىشى +- chiq +- ށ +- ▁econòmic +- ▁Беларус +- ▁gaarne +- ▁ehemalige +- educated +- ▁mesura +- ▁Kö +- ▁Stelle +- ▁jesse +- ภ +- ▁indefinitely +- ▁organize +- ▁Schwa +- чку +- ibanze +- іна +- kulembez +- objectiu +- ▁fácil +- ▁музыкі +- kanwa +- ▁drone +- ▁atomic +- वे +- ▁obuta +- sòl +- huka +- ▁defender +- verwandt +- 잔 +- ▁blurred +- Opera +- ▁Look +- ▁тудо +- ęż +- beshya +- ▁leeren +- ▁bombi +- ович +- ▁puffing +- ▁кня +- ▁pestilence +- ▁unintelligible +- wasser +- ▁toestand +- ▁analogous +- ▁carnal +- ▁herzog +- ▁під +- ▁tying +- ▁mogen +- ▁becky +- ▁llista +- ▁réuni +- ▁Ин +- ▁windsor +- ▁eenigszins +- ▁intrinsic +- ▁knuckle +- ▁satisfactorily +- 落 +- ▁elijah +- ▁değil +- ▁scold +- ▁presided +- шаш +- ▁داره +- ▁cinque +- ▁Isi +- dług +- ▁انجام +- nyubako +- พวกเขา +- 客 +- warte +- ▁кажуць +- ىتى +- dollar +- ▁chink +- ▁reprove +- ▁nähe +- ▁borst +- থা +- loof +- され +- ▁sard +- ▁vivacity +- ▁сусед +- ▁respondis +- ▁brutus +- ▁moody +- oiseau +- ▁ferma +- ▁fleeing +- ▁frans +- ▁outwardly +- keza +- ▁científic +- ▁unwelcome +- ▁trustworthy +- ▁París +- ▁улар +- ▁шулай +- sacrificing +- ніку +- ▁Good +- ▁goldsmith +- eetaaga +- ▁будучы +- ▁midway +- ▁gravy +- ▁harrington +- ▁tercera +- ▁Imir +- დე +- ▁moh +- ▁draußen +- ҕ +- ▁kathleen +- 育 +- ▁georg +- ▁rajah +- ▁voyez +- ▁Titel +- ▁einiger +- 보다는 +- first +- century +- ሰ +- 숙 +- ▁capricious +- 呢 +- ▁aandacht +- ӓл +- ▁timidity +- гле +- သော +- ▁caval +- logique +- ▁messieurs +- ▁Johnson +- ▁povo +- ▁rosalind +- ▁doorstep +- ▁hieß +- 돼 +- λο +- ▁terrain +- riez +- ĉambro +- ▁wesentlich +- fisch +- ▁bradley +- ▁الان +- ▁calico +- ▁aerial +- ▁nathan +- connor +- ▁parasite +- ตาม +- ▁swiftness +- lje +- ▁periodista +- ▁Avenue +- ▁disitulikiti +- 走 +- ▁diplomacy +- ρε +- juring +- ström +- ▁restoring +- ▁Parizo +- ▁еще +- ▁faccia +- ρί +- ▁scolded +- ▁scolding +- ▁Over +- ▁willingness +- günstig +- ▁шинча +- 승 +- ▁stammt +- фра +- ▁roller +- ▁profuse +- ▁Ferran +- ▁debtor +- ▁foresight +- ▁كې +- ▁McC +- َنْ +- ▁kumw +- 들을 +- ▁barefoot +- 봉 +- ▁zufrieden +- ▁stratagem +- ▁synonym +- ▁ballet +- ▁compliance +- ▁bezonas +- گەر +- ▁fasado +- ▁fala +- ▁propia +- ▁forthcoming +- ຍ +- ▁rudder +- يۇ +- estiu +- patikana +- approche +- ▁pista +- ουν +- ▁caer +- yec +- schafft +- zimu +- ▁Яшчэ +- 每 +- ▁reflex +- ▁preparatory +- ▁якасці +- лот +- ▁embodied +- exécut +- ▁быть +- ▁چەند +- ▁mkubwa +- 몰 +- vith +- huru +- oppo +- ệ +- ▁overhanging +- 政府 +- ▁aesthetic +- ▁pageant +- ètica +- ▁lagoon +- ▁comple +- satisfied +- த்திர +- ▁hurd +- ▁funció +- ▁battlefield +- ▁grati +- ▁koloro +- ▁Komp +- جَ +- babwira +- ▁Academy +- ▁aproxima +- ▁inexorable +- ▁невядома +- 짐 +- schluß +- 华 +- ▁pooh +- ▁Melmi +- wiɣ +- ▁miedo +- rizo +- ▁стары +- ngwe +- ▁sebastian +- ▁wrench +- 류 +- ▁friendliness +- verbindung +- ▁저기 +- ashize +- ibiti +- ▁doleful +- ḍan +- ▁kutokana +- digd +- ▁праўда +- ▁venas +- ▁applaud +- ▁краін +- ▁gewone +- ▁Forma +- 인이 +- 떠 +- zerstör +- ▁bobbsey +- garagaje +- ▁може +- ▁없다 +- ▁liberality +- ▁overthrown +- ▁казаць +- ▁israelites +- ábamos +- ▁arbust +- ▁barrow +- ▁brewer +- ▁personatge +- ▁çi +- дчас +- ▁raving +- elimin +- ▁posi +- ▁pisa +- 촌 +- ▁дапамог +- indege +- ▁courteously +- ▁llança +- ▁Latin +- fika +- ▁heiligen +- ▁Йә +- ▁unanimous +- 請 +- ▁formació +- முத +- ▁дазвол +- ▁schrecklich +- ▁commended +- ▁inexperienced +- ▁caza +- ações +- ▁Grau +- ▁pregunt +- ▁ствара +- дү +- ▁애가 +- demonstr +- шып +- ▁geneva +- ▁charlemagne +- ▁shrivel +- ▁tränen +- ▁cascade +- ▁tyrann +- ▁pianist +- ▁cleek +- ▁없는데 +- ▁ўдзел +- رض +- ▁scorned +- ▁unmöglich +- 當 +- ▁wharton +- ▁lobster +- ▁verhaal +- ▁réussi +- ▁tidak +- ▁presencia +- ▁drawback +- Т +- ▁bacu +- ▁Кар +- ▁elaine +- ▁pleine +- flow +- ▁broil +- ▁hullo +- plaça +- ਇ +- ▁connecticut +- ▁impediment +- ▁overturned +- ▁ancestral +- ▁oblique +- ▁dived +- мәгән +- ▁lessened +- ▁godmother +- ▁inspected +- ▁typically +- ▁construit +- ▁Senat +- ▁moje +- ▁niagara +- ल्या +- wią +- ▁beneficent +- ▁cipher +- leiten +- ▁maritime +- shyiraho +- ரும் +- ທີ່ +- ▁adherents +- ▁obertures +- ▁flounder +- ▁civilised +- Sch +- ני +- ▁idées +- ▁growling +- лась +- 끄 +- 識 +- ▁sinjoro +- ḥi +- вядзе +- steuer +- ▁tipped +- ▁prag +- ▁surname +- ▁diversity +- ▁aperas +- ɣeḍ +- ▁financier +- ▁Insel +- ື +- ▁Bedeutung +- ▁dissipated +- ▁gikorwa +- ▁antiguo +- accueil +- верс +- ▁selves +- ▁Totes +- ▁unglück +- ▁Zug +- ▁conformity +- গা +- ▁publique +- ▁blev +- ▁einzelnen +- idge +- ىگە +- ▁рэгіён +- 착 +- ▁irritable +- તા +- ▁priceless +- ▁ڕۆژ +- ▁biddy +- ங்களை +- ▁gaston +- ▁glimmering +- шок +- ▁urboj +- ▁އޭނާ +- ▁antelope +- ▁grooten +- ▁occult +- ▁loathsome +- habitude +- ▁randolph +- ▁mournfully +- ▁calais +- ḍeɣ +- ▁restlessness +- ▁fleece +- ਅ +- ทุก +- ▁물론 +- ▁nuovo +- bottle +- ▁razón +- ▁lachend +- ▁socket +- ंत +- ▁건가 +- ▁abolish +- ▁refl +- yanyu +- ▁caleb +- ▁kunlabor +- અ +- ▁توانید +- ▁froh +- ▁navarre +- ▁detall +- ▁konsiderata +- ڵی +- ▁чтобы +- ț +- anunci +- ▁crank +- போல +- foil +- ▁nyingine +- ▁danish +- ▁imatge +- ೊ +- ▁particulier +- ▁suspension +- ▁encompass +- ▁хвілін +- ▁zoeken +- ▁carrington +- تاب +- ▁лічы +- خۆی +- ▁cobert +- ▁Encara +- ssait +- ▁directeur +- zahlen +- advocat +- ▁voraus +- ▁بىز +- ▁일을 +- ▁livelihood +- ▁dispar +- ▁뭐냐 +- ▁zorn +- guka +- État +- саў +- ▁beauté +- தம் +- ▁perturb +- ▁sabía +- thier +- ишет +- ▁tutlayt +- ▁저녁 +- Hospital +- ื่น +- ▁erzählen +- bavubuka +- аваныя +- genossen +- existence +- ▁newgate +- integra +- ▁composing +- stimul +- ▁traje +- efecte +- ▁jerome +- ▁whaka +- өҙ +- kład +- ▁பூ +- ▁Public +- gregor +- ▁mutually +- لُ +- extrême +- ▁Zanzibar +- ▁Mundial +- ▁재미 +- 風 +- ▁того +- ▁brenn +- favor +- ▁scheu +- ▁بودم +- ▁caràcter +- ▁vacancy +- ▁savi +- шьа +- ▁demostra +- ▁volgenden +- ▁아니면은 +- ▁blithe +- вымі +- ▁copious +- ▁konstrui +- 服 +- யான +- ▁iedereen +- entrer +- ▁glob +- bilder +- ▁momentous +- blooded +- defined +- ▁Ли +- ▁mutiny +- ▁devouring +- ggett +- ▁blinking +- ▁selecting +- ▁الع +- ▁digestion +- cini +- rów +- ▁өз +- meaning +- ▁yambaye +- ▁kigo +- ▁При +- ямі +- ▁안에 +- fabrik +- ▁unjustly +- िया +- _ +- ▁مادر +- ▁قبل +- ที่จะ +- ▁royalist +- ▁amplia +- ▁rigor +- bidde +- varen +- ▁беларуска +- ▁drunkenness +- 뜨 +- 베 +- 计 +- ▁gresham +- ▁вызнача +- strecke +- ▁destinies +- ▁маё +- ▁가면은 +- ▁tiro +- ▁친구가 +- дадзены +- ▁Maybe +- ensimbi +- remy +- ▁ryose +- morf +- ▁prostitute +- ▁zwanzig +- hàbit +- đ +- 具 +- ▁چقدر +- ▁shameless +- ▁defective +- ▁budget +- zijn +- ▁melhor +- ▁larmes +- ▁Icya +- ▁Dé +- ▁markham +- คา +- ڏ +- ፣ +- ▁unbekannt +- ▁혹시 +- ▁dilemma +- ▁spitz +- ologische +- ▁орган +- ▁pounce +- ▁legislator +- ▁Праз +- لۇق +- ▁Francis +- ▁lingua +- ología +- ▁translate +- freund +- ▁드라마 +- ▁coughed +- ▁mitjança +- ▁infinity +- ▁στο +- ▁instructive +- ▁robbing +- ▁jardí +- ▁cleanliness +- saidia +- ▁älter +- ísima +- ▁occup +- ▁Emil +- ▁nächste +- Oce +- ド +- ▁applicable +- ▁rheumatism +- ▁territoire +- 萨 +- 从 +- ▁größere +- ▁தலை +- رة +- ร์ +- ▁Ehe +- هایی +- abbé +- völker +- ृ +- дзяржаў +- ▁thermometer +- ლებ +- ▁Kind +- ▁gemüt +- ▁widened +- ▁discours +- اصل +- 存 +- ▁چۈش +- adura +- ▁muller +- reaching +- ▁bashobora +- ▁parched +- ▁giggle +- ɣar +- ▁exposing +- بوون +- geordnet +- ▁eblas +- ผล +- abanyeshuri +- ▁lebhaft +- ▁stiffened +- ский +- adresse +- ▁okufuna +- تور +- czego +- ▁subjective +- pasi +- ▁Since +- ▁Our +- ▁gefahr +- 빨 +- ▁dificult +- ▁buddhist +- 電 +- ▁analyze +- ▁licence +- ▁Geld +- ▁furchtbar +- ▁appropriated +- schade +- ▁부분 +- ӣ +- 북 +- ▁тэатр +- بات +- ▁ensuing +- ▁collo +- ▁Thi +- ▁groped +- ▁spurred +- ▁அவர +- ▁Klein +- accord +- ▁Hast +- прос +- ▁bryan +- ▁splashed +- тэры +- umwana +- ▁gemakkelijk +- ▁proksime +- કે +- ▁crackling +- ▁veneration +- ▁metaphysical +- ▁Passa +- ▁얘기하 +- amore +- ▁yahise +- sätze +- ▁refrained +- weḍ +- ▁Wolf +- ғо +- ▁Бар +- ▁Fest +- stoßen +- ▁arrogance +- ▁odysseus +- ▁crumbling +- ▁caius +- хвал +- avocat +- λα +- ۇرۇ +- ▁ponder +- лығы +- Wei +- ▁알았 +- ઓ +- ▁intrusted +- ▁tangible +- 应 +- ▁bombard +- આ +- ▁mallonga +- ▁asteroid +- ▁renewal +- kontakt +- ▁woollen +- ுவது +- ▁qualitat +- ▁froth +- ḥeɣ +- ilgan +- gelegd +- ൾ +- ▁Anschließend +- ▁hetzelfde +- ▁infallible +- ろ +- 奥 +- ▁erinnert +- ▁umher +- geraho +- driver +- ▁seguro +- streich +- ▁při +- ▁teraz +- ▁같고 +- ▁revista +- ۆر +- ▁brutality +- ▁supra +- ▁terminate +- ▁sonata +- ގު +- ▁сэнс +- унда +- spoon +- み +- 講 +- сол +- ▁frugal +- هِ +- żon +- ▁Как +- ▁gevoelde +- ▁Tab +- ▁crema +- رات +- ▁supple +- ▁sembra +- ▁ursprünglich +- ▁verborgen +- 才 +- ▁cambio +- lishi +- ▁clinch +- ▁Hotel +- ▁spon +- ær +- ▁violated +- ▁forecastle +- లో +- ▁approximately +- ▁legally +- ▁tableau +- ifier +- ରେ +- erekeza +- ▁матч +- ▁espada +- ▁Nicht +- ଲ +- ▁República +- ▁pregnant +- ▁shrubbery +- ▁unarmed +- ▁journée +- ুল +- partij +- ▁yenye +- tegeeza +- evolució +- ▁ڕوو +- ▁voorkomen +- вши +- ▁Jana +- isola +- ▁Brook +- ribb +- ▁übrig +- consciousness +- ▁meditate +- alliber +- ▁Kolo +- 說 +- ▁Tek +- ▁grootste +- ▁الب +- ▁povus +- eggwanga +- ▁wrestling +- λι +- buja +- ▁Creek +- ▁coveted +- ▁sourire +- ▁oblong +- ▁알지 +- ▁starb +- ্ৰ +- chev +- 樣 +- 土 +- ▁따라 +- ▁сезон +- ▁Yiwen +- ▁работы +- ▁kruroj +- ▁Vallès +- ▁entreaty +- ▁Manuel +- ▁danach +- ▁chinaman +- ▁recog +- ▁என்பது +- жӹ +- icyubahiro +- ▁anarchy +- ▁trend +- peta +- ▁maul +- ▁mwezi +- ▁popolo +- avance +- ▁gallantly +- ސ +- ▁resounded +- ▁acemma +- ▁schnee +- ▁repaid +- ▁klug +- ▁завод +- ▁sequel +- ▁feliz +- ▁bunting +- иться +- ▁فرا +- ▁scissors +- hartig +- ▁harvard +- ிருக்க +- ▁dyed +- ▁nominated +- ▁advert +- دانی +- ▁icumi +- ▁servile +- ▁occupant +- ▁reducing +- ▁cron +- länge +- вр +- ▁wavered +- épi +- ▁accede +- ▁Kata +- ▁semejante +- ▁mujyi +- ▁паста +- ไหม +- ▁candlestick +- ▁voyant +- ▁heightened +- ▁juliol +- ▁twitter +- ungura +- tuufu +- ▁evermore +- rasse +- ▁hört +- ▁scramble +- ▁zili +- ▁fastidious +- ▁cannibal +- ▁infection +- ▁anarchist +- ներ +- ▁neef +- জি +- ▁sueño +- finita +- ▁admiralty +- ಕ್ +- ▁respectability +- maan +- ţi +- ▁disparu +- படுத்த +- 级 +- ▁laissé +- ▁краіна +- ически +- educació +- ▁embryo +- 하자 +- ▁아니지 +- ▁Bou +- ▁wierp +- ▁Mutter +- ▁sanders +- ▁pisos +- 活 +- Τ +- ▁peregrine +- ▁усіх +- ▁thrush +- ▁scipio +- ▁droeg +- ▁clustered +- ▁unused +- 번에 +- ▁fianc +- ▁viento +- spezi +- panda +- ▁Ekzistas +- ▁gloucester +- ▁rehearsal +- ▁зямлі +- ▁considéré +- ▁hermosa +- ▁drudge +- ▁bête +- ▁withdrawing +- ▁paltry +- ▁supervision +- ▁widespread +- ▁stowed +- ▁ascetic +- മാ +- ▁кас +- Plat +- ▁casket +- โร +- trokken +- lako +- ▁brat +- ▁brûl +- 與 +- ކާ +- ▁betook +- ▁roemische +- ▁старо +- ▁Board +- ▁numberless +- ▁buggy +- gène +- ▁jakie +- аводле +- leuchtet +- why +- ▁ensign +- ▁havoc +- ▁mwene +- ▁stimulate +- ▁Fol +- ▁Kuna +- qbayli +- ▁무조건 +- ով +- முறை +- ▁яҙ +- ▁간다 +- ▁Scott +- ▁blinked +- ▁joyously +- ▁airship +- लो +- füllt +- ▁greu +- ▁authoritative +- ▁bungalow +- ▁deficiency +- ▁indictment +- 置 +- ▁누나 +- ▁witwa +- petu +- voller +- ▁쓰는 +- ▁Pac +- ▁bramble +- ▁germà +- exprim +- альных +- ▁treurig +- ▁cabal +- ▁Krom +- ▁blumen +- ▁Off +- ▁preside +- ▁Магчыма +- ▁unpopular +- ▁atrocious +- ▁маленьк +- czne +- havia +- られ +- ▁flapping +- 하는데 +- ▁Sí +- হি +- gifite +- ▁comprar +- дән +- ▁clem +- ▁survival +- ▁насельніцтва +- ▁jennings +- 始 +- ▁vorhanden +- ▁movimiento +- ▁mynheer +- ▁segueix +- ijnen +- ▁scraped +- ▁Reka +- ელი +- ▁dóna +- ▁nimmer +- ▁somethin +- ▁enriched +- ▁firmament +- 딩 +- ▁website +- ње +- ▁philosophie +- rozum +- ▁magnus +- ▁barbarian +- umutima +- ▁sharpened +- oportun +- ކ +- ەرە +- ▁loaves +- ▁zuvor +- லே +- broken +- ▁dismount +- ։ +- ڊ +- ▁països +- އާ +- 堂 +- ither +- ▁olivia +- ▁könnten +- ▁landmark +- owski +- solut +- ▁ferguson +- livyo +- ▁pondering +- ▁دڵ +- ▁தீ +- егеҙ +- ▁jarojn +- ropa +- ▁dissipation +- ▁Junior +- ләй +- ▁emigrants +- ▁esperanza +- ▁coordina +- acak +- ▁мене +- ök +- ▁kultura +- rwana +- ibita +- ▁Пакуль +- 곤 +- 牛 +- ▁infamy +- woon +- ▁flushing +- ▁gestern +- ▁mortality +- ▁consell +- ▁nellie +- ҙө +- ▁geweld +- ▁portico +- ▁obstinately +- ▁kubanga +- ▁аўта +- ▁snub +- ▁besuchte +- original +- небудзь +- ▁tapestry +- ▁хутчэй +- ▁innovation +- ▁restlessly +- ▁runaka +- ▁رسید +- crust +- ▁quarrelling +- ▁kufanya +- ▁blotted +- ára +- ▁lamentable +- ▁zeven +- ▁Omuntu +- Eulàlia +- ▁jufvrouw +- ▁drenched +- ▁límit +- ▁embers +- kenntnis +- ▁miteinander +- ▁bowels +- ahandi +- ▁originality +- ▁теле +- ▁oscil +- ▁reunion +- ▁spartan +- legio +- ▁exhibiting +- ែ +- ▁disconsolate +- ▁жывёл +- ▁insurgent +- ▁unbounded +- မှ +- ▁equilibrium +- ▁Programm +- ▁jamás +- ▁Тэ +- ▁vertrouwen +- ▁okubeera +- ▁verdient +- qel +- Liber +- brique +- mbuga +- ▁exhortation +- ▁roh +- ▁politie +- ▁tribal +- ▁deceiving +- ▁gunpowder +- 研究 +- ▁detestable +- hê +- ▁besuch +- ▁fazer +- ▁airport +- ▁vinger +- σα +- ▁leafy +- ecaj +- ▁vertoon +- energia +- ▁rusa +- ▁Rais +- ĵoj +- ण्या +- ▁calamities +- ▁caterpillar +- ▁swagger +- ▁renkont +- ▁isaiah +- ▁딱히 +- ▁Volks +- 자리 +- ▁verlangt +- slavery +- ығыҙ +- ▁ruffled +- һына +- ▁fluent +- ▁idolatry +- ▁practising +- 根 +- 만큼 +- ▁tito +- weka +- ▁clase +- ▁aferoj +- ▁deplore +- ▁faisant +- window +- ▁interesse +- біт +- ▁midsummer +- ▁remporte +- ▁Manchester +- ▁sybil +- ▁framework +- ▁legde +- ▁thun +- ▁shrub +- ั่ง +- ▁ulls +- ▁Henderson +- த்திற்கு +- ▁strak +- ▁vicomte +- ది +- ▁feia +- ▁recipe +- ▁Eble +- ▁inferred +- ▁wengine +- دَ +- ರು +- samkeit +- ▁рэчы +- зіць +- ▁urwo +- весці +- rending +- ચ +- ▁compañía +- ▁몰랐 +- ▁smoky +- ▁Frauen +- afashwe +- ▁plurajn +- ▁underlying +- ▁ijwi +- pełni +- ▁Йы +- ▁parut +- ▁advertised +- ▁locust +- ▁llena +- trzyma +- ▁necessari +- hti +- ▁giraffe +- ▁locate +- örd +- туу +- ▁Mike +- ▁localities +- ▁clod +- ና +- ክ +- ▁protagonist +- ▁Guerre +- ▁convulsive +- ▁ведаў +- '90' +- erkannt +- ▁poche +- ▁copperfield +- ಿಸ +- ▁heilige +- ▁cleave +- نِ +- ▁enthusiast +- ணை +- лось +- Pro +- ▁chile +- ▁blair +- ными +- ▁tranquille +- ggins +- ▁ہم +- ▁senzill +- лівы +- ▁Dorf +- mushinga +- selig +- ባ +- ▁anatomy +- 편 +- ▁menjar +- мом +- ▁verbeeld +- ▁músic +- ▁trank +- ▁puisque +- ộ +- ▁disapproval +- ▁disclosure +- ▁persuasive +- ▁unobserved +- 武 +- މެ +- ▁Questo +- дең +- ▁jingle +- ▁yabonye +- ▁motiu +- anstalt +- ▁calves +- ▁Tier +- ▁hemisphere +- ▁сюды +- ▁гатовы +- ▁frisk +- ▁زمین +- ▁benannt +- ▁Зараз +- ▁Jud +- ▁spotless +- ▁якую +- ▁corse +- ▁purport +- ▁drip +- ▁cinder +- 県 +- ▁panorama +- ▁wriggle +- ▁employees +- ▁моцны +- ▁forenoon +- bonera +- ▁kleur +- رۆ +- ▁negli +- ▁mpan +- ▁byonna +- tzea +- ▁llac +- ▁gloss +- தில் +- drang +- ▁embellish +- ▁arduous +- julikana +- talk +- ▁repeal +- ▁jagged +- ▁만들어 +- geschw +- ▁mêl +- ▁ravish +- ▁Baldaŭe +- ▁основ +- гәы +- ▁impracticable +- ▁invaluable +- ਸ਼ +- цам +- ▁transmission +- бей +- ▁querido +- ▁combien +- ▁ukuntu +- ▁rosto +- аграфі +- өк +- wett +- distinguishable +- crates +- ▁pintura +- étranger +- ျ +- ▁unearthly +- ▁vaixell +- ▁louisiana +- setzte +- ▁reproof +- দেশ +- ▁identific +- ▁Fiona +- ▁ritter +- vuka +- ▁ئوي +- kundig +- مانی +- ▁넘어 +- ável +- ▁arroj +- ▁тый +- ▁Schiff +- బ +- ▁abomination +- ▁stalwart +- fitanye +- ▁decayed +- ▁deteriora +- 었잖아 +- ▁murugo +- ▁jules +- ▁serai +- ▁wär +- abayeho +- ▁implant +- ▁krui +- ▁clambered +- ▁matrimony +- 습니다 +- 像 +- ▁Hung +- ▁stirrup +- ▁dispensation +- träger +- ▁corpus +- جو +- ▁vanishing +- ▁recol +- ▁okulaba +- ęci +- mvugo +- ಾಗ +- ▁physique +- ▁Wor +- ▁Сёння +- 示 +- ▁stealthy +- ▁reeled +- ▁iceberg +- ▁pantry +- ▁Czy +- ▁evergreen +- ▁impunity +- ŵ +- ▁가끔 +- ▁inanimate +- ▁erwartet +- ▁paarden +- aŭri +- లు +- тыкі +- ▁موقع +- ▁intrepid +- тлумач +- ▁exercising +- ▁akamaro +- ▁plupart +- ▁reverently +- birwa +- ▁pulp +- ▁successively +- ▁crypt +- ҳо +- ไทย +- ▁kensington +- ▁constitue +- rindi +- ▁dentist +- vallig +- ▁incorporated +- ▁retail +- ▁каманды +- yemera +- ▁cumpli +- ▁utiliza +- ▁Мә +- ▁stability +- ▁novaj +- ุม +- kiriye +- கம் +- ▁Catholic +- ▁прафесі +- ▁accusing +- ▁telkens +- ▁Vari +- ▁sussex +- ság +- ▁hauteur +- ▁identification +- க்கா +- ▁Mazal +- ▁zauber +- ▁relaxation +- ▁unuaj +- Europa +- gefährlich +- ▁batch +- 얼 +- Association +- ▁devonshire +- ▁voluntad +- 无 +- ▁ваҡыт +- ▁knapp +- geschlecht +- ▁strangled +- ▁vieillard +- ▁diving +- kolera +- uğu +- ▁grotto +- wiesen +- kwano +- iquement +- ▁salió +- ▁Kama +- ▁assemblies +- ▁côte +- gord +- ▁souhait +- ▁خواهد +- ▁쓰고 +- ▁stationary +- ้อ +- emerewe +- ▁regelmäßig +- hughuli +- ▁albeit +- ▁angenehm +- ▁edad +- lə +- ▁كۈن +- ▁мая +- ▁yelp +- ▁juny +- ▁yiman +- 놓 +- ▁tulip +- ▁vun +- ▁uninterrupted +- ▁بچه +- ▁contar +- ▁mühe +- ▁aimé +- ▁bevond +- ▁départ +- ▁imprint +- ▁zonderling +- รถ +- ▁Antonio +- ▁categories +- ▁circumference +- ▁Service +- ദ +- ▁dombey +- ▁Major +- ▁handicap +- ▁saucepan +- ▁fickle +- ▁sprout +- ▁literaturo +- ▁Young +- ▁mammy +- ▁Middle +- ▁gukina +- ▁wisse +- ▁tracing +- 에서는 +- ▁craven +- agricultura +- களும் +- gebäude +- ▁crítica +- ▁preoccupied +- ▁travaux +- ▁варыянт +- ▁ممکن +- ৌ +- ▁назад +- ▁enliven +- ▁дек +- ▁һыу +- ▁flakes +- ▁novice +- ▁zweck +- ▁pumpkin +- ▁wrenched +- ▁bohemian +- ▁Song +- treppe +- ▁жаль +- ຄ +- ယ +- ▁dexterity +- ▁irritating +- ▁wünscht +- ▁бацькі +- シ +- структур +- 纳 +- ▁witchcraft +- abati +- ▁slammed +- ündig +- ▁permitting +- ▁submerged +- ▁roadway +- ▁разумею +- robed +- ▁veía +- ▁discut +- ೀ +- ▁prominence +- 商 +- ▁اگه +- ▁lucius +- ridi +- ▁strawberries +- ▁மன +- continent +- ▁raporo +- ▁тул +- ahoze +- ▁kiedy +- gevangen +- träge +- Ғ +- ▁susie +- ▁repas +- ▁estuvo +- ▁investigated +- ▁йәшә +- ▁woodpecker +- ▁vestibule +- ▁stifling +- ▁bumble +- ▁contradictory +- ▁consistency +- drücken +- ماق +- ▁шога +- ▁louvre +- พิ +- ▁gènere +- ▁reparti +- lju +- ▁extensively +- ▁populous +- 部分 +- ▁confidant +- ▁overgrown +- ▁Mateu +- tafel +- ঙ্গ +- ▁transgress +- ▁impertinence +- umujyi +- phar +- ▁여러 +- ของฉัน +- ▁Gö +- ▁shoemaker +- ában +- бри +- ▁Aufgabe +- ▁میکرد +- 택 +- 맥 +- ▁την +- ▁Japanese +- ކަން +- kord +- ▁парк +- aktion +- บ้าน +- ▁Кор +- ရှိ +- 놨 +- 店 +- writing +- ▁pedant +- ifié +- ிடம் +- ▁feigned +- ▁паверх +- ▁nenion +- ▁Western +- ▁ensue +- شتی +- கொள் +- ▁echoing +- ແ +- ▁jaune +- ▁vivian +- ▁griechische +- هُمْ +- ▁churchill +- ▁palmerston +- ▁Blanca +- ▁вайны +- 장이 +- ▁erzählt +- ُوا +- ▁café +- ▁bloc +- ▁школы +- ▁કર +- ▁ningún +- ▁perdido +- geber +- ƙ +- ▁emptiness +- ▁Kultur +- ▁دنیا +- ▁Mannschaft +- ▁tattered +- ▁extinction +- ▁repressed +- ▁situacio +- ▁신기 +- ▁stork +- ▁grog +- ந்தது +- ுக் +- ▁Water +- ▁dismissal +- ▁disobedience +- ▁responsable +- 群 +- 器 +- ▁ignoble +- ▁bazaar +- ▁hoist +- larini +- ވ +- ▁nationality +- isierung +- ▁Beide +- ▁enjoined +- ▁poteva +- ▁Umuhungu +- ▁chwili +- ▁whipping +- ▁folle +- ▁peso +- arrivée +- ▁guardar +- lender +- ▁marvelled +- ▁refresh +- ▁Group +- ▁vivir +- ▁ballot +- twaro +- ▁کمک +- ▁керт +- ▁sandwiches +- ایت +- ▁libraries +- ▁pernicious +- ▁галоўны +- 雷 +- 门 +- ▁depressing +- ▁rispose +- ▁españa +- στη +- gäste +- ▁sioux +- मे +- ▁heretics +- ▁remonstrated +- ▁jazz +- ▁eingesetzt +- ଏ +- ▁recipient +- ▁conceited +- ▁uncouth +- ▁encircled +- ▁Alexander +- ▁өҫ +- ▁propped +- ▁jessie +- ▁fouquet +- ▁goblet +- ▁Lola +- ▁biography +- ▁Earl +- ▁ruido +- style +- ▁utilitzar +- ೂ +- ▁exchanging +- ▁ineffectual +- ▁mpuzamahanga +- ▁quijote +- ▁sanctified +- ეთ +- ▁troll +- 校 +- ىدىغان +- ▁Гу +- گان +- ▁көт +- ▁знаходзі +- ▁stumble +- ▁nsonga +- ▁untrue +- ndiye +- ▁clime +- offizier +- ▁patria +- artiste +- kirch +- ▁römischen +- ▁adversaries +- ▁сітуацыя +- 附近 +- ▁initiated +- ▁Ĉinio +- ▁Society +- ▁mourir +- ▁거기에 +- ▁چون +- aîné +- fungwa +- логі +- رج +- ▁перамог +- ▁congrega +- 족 +- ▁irrational +- ▁penniless +- ▁imbabazi +- ▁Cloè +- ▁lattice +- ▁denomination +- ▁diminishing +- ▁agonies +- ▁graag +- ▁distribute +- ▁occurring +- ▁Clo +- ▁organised +- épaule +- төн +- ▁harass +- ییە +- analyse +- 락 +- ▁incomparable +- 川 +- ▁gedeelte +- ▁Һин +- nniḍen +- ▁glut +- смотр +- ▁chargé +- тҡан +- ▁cramped +- ▁incite +- ▁prêtre +- imyaka +- ▁Ша +- ात +- قابل +- ▁Kreis +- ▁capella +- ▁breakers +- வில் +- ▁parasol +- ▁frey +- ▁ҡыҙы +- nyamuryango +- Ҡ +- ▁engraving +- ▁domingo +- ▁ludo +- ▁аңла +- ▁racket +- ▁shingle +- ▁داریم +- ӹнь +- значэнн +- ▁surged +- Imperi +- ▁augur +- ▁infrequent +- ▁jüngling +- teekeddwa +- дзейніча +- 务 +- ▁של +- ▁beverage +- ▁plundered +- ▁investigator +- тую +- réis +- ▁bezit +- ▁Inama +- ▁좋았 +- meeting +- ▁viure +- ▁flea +- mouthed +- 항 +- geworfen +- 논 +- ▁лёгка +- ▁ӱдыр +- ▁alpine +- olymp +- ▁bambaye +- َهُ +- weebwa +- 들도 +- ▁bataille +- ▁distasteful +- ▁advertise +- ▁matrimonio +- liad +- ▁According +- ▁indulging +- ▁melbourne +- ▁duplicate +- ▁Gau +- ▁alguma +- ▁greenland +- ▁desertion +- ▁confiding +- ▁trasport +- avond +- ▁һора +- ▁Де +- ▁buster +- irland +- fiɣ +- ơ +- ▁derision +- ▁crass +- ▁ruff +- ▁hustle +- troffen +- ▁Baba +- öv +- amezi +- ▁Stre +- ▁nourished +- ▁absicht +- ▁итә +- С +- ىڭىز +- ▁ئەمە +- akorwa +- 一個 +- ۽ +- ▁Toronto +- ▁سۆز +- 初 +- ▁onmogelijk +- ▁malkovr +- ▁들고 +- ▁ҡыҙ +- ▁அடி +- ▁scandalous +- ▁waarlijk +- ▁культуры +- ▁kirby +- ▁Web +- ▁chasse +- Geschäft +- ▁sauntered +- ▁بولۇپ +- sacrifice +- ▁snarled +- ▁Pop +- пуска +- ▁Nyamuneka +- 썼 +- ▁Energie +- 境 +- ▁registaro +- ▁sobretot +- ▁infected +- haranira +- ▁persistence +- ▁durf +- ▁fumes +- ▁باشی +- 돈 +- spagnol +- ย์ +- ▁contractor +- praat +- ▁име +- ▁faig +- ฉ +- 管 +- lowooza +- ▁condescension +- ▁exultant +- ▁fairfax +- ▁inexhaustible +- ▁كىشى +- disposed +- ▁praising +- جل +- ▁nervousness +- জে +- ▁elicit +- brug +- ▁loĝis +- ▁இத +- zmir +- ▁Yari +- 짝 +- যোগ +- ▁shirley +- ▁адрозніва +- ▁circulated +- ▁Ble +- ங் +- ▁adept +- сына +- ▁predict +- gumu +- 铁 +- ▁denunciation +- ▁그것 +- ▁Tiutempe +- ▁público +- ▁propaganda +- ▁sewed +- ▁résolu +- ecek +- ▁clink +- ▁может +- حل +- wanyi +- 팀 +- ▁Fernseh +- ▁лічу +- ▁daraus +- шьҭ +- ▁mı +- ▁wakened +- modul +- ▁cornelius +- ▁judici +- ▁matrimonial +- ▁홍대 +- 뒤 +- pewa +- ▁hugged +- ▁enorm +- ▁mostrar +- ▁힘들어 +- oscope +- motiv +- ritu +- ▁lungo +- ▁serviceable +- ▁religio +- ▁Leur +- ▁piedi +- ▁paroxysm +- ▁filino +- ▁bleed +- ▁гандл +- ▁알았어 +- வரும் +- ▁داشته +- ouvrit +- ▁kwirinda +- ▁similarity +- ▁rayon +- ▁scholarship +- cumbi +- ▁droop +- ▁Guarda +- ▁padron +- ▁Ви +- குறி +- ▁Kati +- ística +- ▁laddie +- ▁Deshalb +- чага +- ення +- ▁flitting +- ▁housekeeping +- ▁shule +- ถ้า +- ▁vomit +- ▁negativ +- ▁ferret +- flanke +- ▁yawn +- ▁lourd +- ച +- ▁apologies +- ▁fervour +- 希 +- ürzt +- ▁fergus +- ▁Township +- გი +- ▁wunderbar +- pressió +- اڵ +- ▁discount +- ▁estilo +- رق +- ▁greenish +- ▁Riu +- ។ +- ▁reiterated +- 工作 +- ▁sophy +- пісаў +- ▁wordsworth +- ▁recite +- களின் +- lford +- ▁blister +- ۇپ +- зван +- ▁chewing +- ▁starch +- ▁admonition +- ▁cleveland +- ▁fletcher +- ▁inscrutable +- ▁impersonal +- anglès +- 说 +- ▁comenzó +- 传 +- ▁exclamó +- ▁hopkins +- üş +- 개월 +- ▁placé +- ▁않았어 +- ▁सम +- ▁الح +- ▁category +- ▁uitdrukking +- ▁gravitation +- ▁будынка +- ող +- ▁рашэнне +- ▁extremities +- ▁vertellen +- ются +- ▁mountainous +- shoes +- ▁роўна +- ▁marrow +- blind +- เค +- ▁thinner +- цкай +- ▁uplift +- ▁hinab +- präsident +- ګ +- ▁enterprising +- ▁lancaster +- ▁sapphire +- ▁proffered +- 木 +- ▁της +- rugamba +- üll +- বার +- мәҫ +- ▁collins +- ▁callous +- stätte +- ▁mercies +- ▁revision +- ▁schat +- ▁presentiment +- ▁rempli +- geht +- lunaku +- 冇 +- эканомі +- 클 +- ▁inderdaad +- ▁légère +- ▁차라리 +- ▁honeymoon +- arribar +- ▁drought +- ▁sportsman +- ▁mongol +- ▁jeugd +- ▁impious +- ▁sallow +- ▁jamii +- ▁diffused +- ▁agreeably +- بینی +- ▁Colom +- flucht +- тәре +- шон +- ▁распа +- ации +- ຊ +- ஓ +- ဖ +- ▁commodities +- ▁impassable +- ▁неабходна +- ▁dissimul +- ылда +- ▁mantenir +- ambwiye +- bücher +- ▁җи +- ▁shyness +- 분이 +- zigera +- ▁articulate +- koreye +- 兩 +- ▁intoxication +- ▁futbolista +- ▁Хоць +- ▁texan +- ▁flitted +- ▁slapped +- ▁جوان +- ▁weasel +- particip +- acqu +- geslagen +- gabanya +- ▁embankment +- ▁가족 +- ▁سن +- ▁모르겠 +- крэт +- ▁briggs +- পে +- ▁Harry +- ▁bohemia +- கார +- ▁kulturo +- ▁ҡар +- ▁praten +- ▁iceland +- ▁удзел +- ▁Silva +- ▁scherz +- ട്ട +- ୋ +- ▁reservoir +- ▁cançó +- 限 +- 团 +- ▁respite +- ▁Vieno +- ▁schoolroom +- scape +- ▁schijnt +- safiri +- iéndole +- ▁digne +- beug +- ▁Verwaltung +- ▁caricature +- ▁diabolical +- ▁vocabulary +- ▁امروز +- ▁일어나 +- ▁verbunden +- ▁projection +- ▁entscheiden +- ▁bashful +- لاش +- ▁kupata +- ▁disdainful +- ▁bagenzi +- ▁amends +- umvira +- zoni +- 활 +- ▁interne +- ▁paralyzed +- ▁preocupa +- ▁그랬는데 +- ▁Jaume +- ▁nijoro +- ▁ئۆت +- rundi +- ▁Petro +- ▁shedding +- ▁거니까 +- ▁chronic +- ologic +- ৰ্ +- รุ +- сіў +- ível +- 瓦 +- 实 +- fluß +- ▁йӧр +- ▁coronation +- pół +- zzu +- ▁guteza +- ▁Хә +- ▁Unit +- ▁gdzie +- ▁scop +- ▁jambo +- ▁Ру +- 빼 +- 빌 +- ▁marlborough +- ▁interfering +- ▁chevaux +- ▁vince +- ▁дроб +- ▁indessen +- ▁cuadr +- ▁rolf +- экс +- ▁größer +- ▁bewegt +- ▁koud +- ଣ +- complacency +- ▁poignant +- ▁wolsey +- ▁کشور +- gaciro +- ▁sentido +- ▁pâle +- 景 +- ▁procuring +- روس +- ▁poop +- ▁Jugend +- кола +- гь +- ▁enquire +- ▁아빠가 +- ▁desenvolupament +- ▁postmaster +- ▁아침에 +- künstler +- ▁overpowering +- ▁вялікая +- κό +- ▁Einsatz +- ையை +- kandi +- ▁nikolaas +- ▁withdrawal +- enthusiastically +- ▁Carlos +- ▁immigration +- 即 +- 米 +- ▁chand +- ▁ecco +- ▁வழி +- ▁mobili +- ▁palacio +- ▁llegado +- ▁winkle +- ▁ingenuous +- ば +- 技 +- ▁groove +- ▁bavaria +- lî +- ▁goethe +- ▁paix +- خاطر +- ▁compromis +- bulu +- ▁merlin +- ▁firelight +- ▁geometry +- ▁އަހަރެން +- ▁ontvangen +- ▁энерг +- ލް +- ▁Такі +- ▁пеле +- ▁denounce +- ▁curtly +- ▁esperar +- ▁Тик +- ۋى +- ▁بچ +- ▁ўсход +- ▁eenmaal +- ▁garage +- huizen +- ▁continuació +- lome +- ▁buzzing +- ▁pillage +- 乐 +- ▁번도 +- ▁myinshi +- ▁syrup +- ▁houdt +- ▁Theater +- ▁маці +- lân +- ▁creat +- τά +- ▁menschliche +- ▁pràctica +- ▁armstrong +- ▁immigrants +- ▁kifo +- ▁lounging +- галас +- สอง +- ▁rotation +- ▁inducement +- ▁هەڵ +- ▁preferable +- ғанда +- ▁kidd +- gá +- ▁festivities +- ▁secession +- ޔާ +- iridde +- ▁discrimination +- ▁stalked +- ▁monastic +- ▁deepening +- ▁reconstruction +- ▁bizarre +- ▁ludzi +- ▁같다 +- нӑ +- ▁prospective +- ▁Wij +- blanc +- fruit +- މީހ +- genoot +- yini +- ▁marchmont +- ▁unaffected +- ▁nuclear +- ▁upturned +- 支 +- ▁alegre +- ▁هفت +- ▁Little +- ілася +- tsinze +- ▁malade +- zusehen +- ▁ئار +- печ +- yasi +- جار +- ▁vlak +- ▁Licht +- ლე +- umwihariko +- ▁Carolina +- ▁오히려 +- ▁adversity +- ▁lovelace +- ▁pecado +- ▁Она +- ▁merton +- ensonga +- ▁bosch +- ▁filmoj +- ▁tratta +- ▁deceitful +- ▁forecast +- ▁Beat +- оит +- ▁rousing +- 書 +- 故 +- ▁conspire +- ▁갔을 +- ▁utawala +- ▁eskimo +- ▁ukomeye +- wirt +- struction +- ▁Drei +- ▁marié +- ▁Gott +- ▁Monat +- escut +- ▁modify +- ▁inicial +- ▁ўвесь +- 環 +- ▁macedonia +- ▁Bamwe +- ▁ugliness +- ▁boatswain +- second +- ▁фасад +- ▁utilise +- ▁maxwell +- ित +- ▁trotting +- 的人 +- ▁fàcil +- ▁Ḥemmleɣ +- ▁તે +- ▁Vilanova +- ▁forbearance +- ▁trimming +- ▁indruk +- ▁boog +- О +- ▁Це +- ▁cypress +- block +- зон +- 늘 +- ▁bedding +- Jacques +- ▁reindeer +- reißen +- ▁имеет +- ▁adorable +- ▁있다고 +- bright +- ▁rivier +- ▁savor +- ▁krä +- eastern +- ▁silhouette +- ▁venía +- ▁послед +- zanye +- тҡа +- ▁arriv +- fight +- খন +- ayagala +- ▁Metro +- ▁ئۇلار +- ுகிற +- ▁senat +- Κ +- ဖြစ် +- ▁mumbled +- ▁veronica +- 시켜 +- laki +- ▁enviar +- paß +- ▁tinkle +- աց +- ▁popola +- ▁devint +- ▁shave +- ▁krijg +- ौ +- ▁clennam +- 料 +- èvre +- ▁даўно +- occupe +- รัก +- ▁지나 +- ▁Пе +- ▁கால +- ਦਾ +- ▁álbum +- 낫 +- ▁constellation +- тэх +- ▁walpole +- ▁grapple +- ▁Burundi +- ড়া +- ▁piteously +- ▁pijn +- ▁marina +- gelöst +- чнымі +- واقع +- ▁dritten +- imprim +- 쿠 +- exèrcit +- љ +- 며 +- ▁errichtet +- เม +- ▁Потым +- ▁chaud +- ▁خونه +- ▁lurid +- ▁Staats +- oeil +- ildi +- ▁Пі +- ▁Original +- ètic +- ▁любо +- ▁alcuni +- ▁husky +- ▁umusaruro +- ▁студэнт +- ▁scanned +- நிலை +- leiter +- ▁bahari +- ▁equipage +- ▁nocturn +- ▁purified +- mesi +- що +- coated +- tsje +- ▁powie +- eignet +- ▁жыцці +- ခဲ့ +- ▁schlafen +- ▁эксп +- ▁muhimu +- ngula +- ▁zinc +- ▁cessation +- ▁Italio +- ▁trow +- ▁wohin +- ылды +- ▁dutiful +- ▁antigua +- ▁Bernat +- ▁Flo +- ▁shutter +- ▁draped +- offre +- ▁tripped +- ▁naskiĝ +- گری +- ૂ +- ▁título +- ▁Projekt +- ▁mahomet +- ▁eŭro +- ▁magnify +- kute +- ▁exceptionally +- نجا +- ▁justicia +- ubahiriza +- ▁laboratori +- ருக்கு +- ▁marcella +- ▁teasing +- িয়ে +- ▁hampshire +- ▁incompatible +- ▁prettily +- ފާ +- ▁podría +- ocell +- ▁malay +- ▁cramp +- muth +- глядзе +- utilizza +- ▁хү +- ▁actuellement +- ▁prettier +- џь +- ▁ntekereza +- ▁succour +- ▁gerust +- ▁außerdem +- ▁Japó +- ▁нібы +- ▁fortikaĵo +- ▁knotted +- ▁Deux +- ▁тине +- 었지 +- білі +- indiqu +- ▁durfte +- ▁Dienst +- ການ +- ▁bethlehem +- ▁precipitous +- ▁igitabo +- ▁gauge +- ▁celebrity +- றிய +- ▁서로 +- ▁quoique +- ukwezi +- blatt +- ▁longitude +- vrouw +- ρά +- ▁südlich +- ▁provisional +- ▁خط +- ▁arcade +- 하거든 +- ▁anointed +- ▁патрэбны +- 毛 +- ▁күңел +- ทั้ง +- 否 +- ▁فیلم +- ▁visitation +- ▁navega +- ▁gamble +- گذار +- X +- ▁rhai +- ▁reduc +- ▁lavora +- ▁projekto +- ▁estudia +- ▁vortoj +- возмож +- ޕ +- ▁öffnete +- kräftig +- ▁카페 +- 半 +- 越 +- ▁zwijg +- ▁swerve +- ▁actividad +- ▁mammals +- ouvrir +- зал +- ▁rind +- ▁gaire +- ҩы +- comers +- ▁komuni +- ▁contended +- ▁футбол +- ▁пачынае +- ▁살아 +- ▁blauw +- ▁chopin +- तो +- ▁qualité +- ണ്ട +- ορ +- ▁contour +- ▁palate +- ▁tinker +- rijden +- ▁completing +- тең +- ▁poursuiv +- ▁Rest +- ▁годзе +- ▁пачала +- ▁erfüllt +- ▁konservi +- ▁surging +- ▁apache +- ▁پیر +- ហ +- ▁jeremiah +- ▁avalanche +- ەتی +- ކޮ +- ▁bunker +- ▁enchanting +- ▁fête +- ▁slanting +- ▁meaningless +- чэнне +- ▁mornin +- শ্ +- ▁самым +- œil +- cruci +- ▁scherp +- ymmetri +- ▁Show +- ▁olga +- ▁rudeness +- ▁libertad +- ▁mediaeval +- ▁eligible +- ▁텐데 +- 史 +- ▁varnish +- writer +- ▁Quirze +- ▁sprinkle +- ▁armament +- ▁rubanda +- ▁sloth +- ▁playmate +- ▁nachbar +- ▁articul +- ▁lokoj +- seminar +- ▁Chile +- bodied +- ▁stilte +- タ +- ▁тэрыторыі +- 進 +- ▁delirious +- ▁pixie +- ▁мусіць +- ▁drukte +- leitung +- ▁해봤 +- ▁வந்து +- או +- important +- arrêta +- 찾 +- ▁famiglia +- рэшце +- ▁revolucio +- schränkt +- ▁lugha +- ▁scull +- ▁wolken +- ўрад +- ▁گوش +- 악 +- ▁principalement +- ▁complète +- ▁imperceptible +- ▁dowager +- ▁Үҙе +- ▁Professor +- ▁melville +- ური +- ▁pří +- rzę +- võ +- ▁Polisi +- யில +- ▁peevish +- 必 +- injiye +- ▁malheureux +- ▁seraph +- gewiesen +- ▁пакры +- ▁Ple +- ▁transient +- ▁alderman +- ▁먹으면 +- الله +- ▁analog +- esdeveniment +- ▁belangrijk +- ▁некаторыя +- ▁교수님 +- ▁당연히 +- ▁Ontario +- ▁wohnung +- ▁ineffable +- ▁schwieg +- ▁hefyd +- ▁workmanship +- ▁froze +- ▁forgave +- ▁civic +- ▁twese +- stehenden +- acteur +- ชา +- ▁selecció +- 거리 +- ▁sigue +- ▁merci +- дон +- ▁prosecutor +- ▁마지막 +- ▁porcelain +- ▁근까 +- ある +- ▁cravat +- ▁அவர்கள் +- ▁пяць +- ▁பேச +- ▁atheist +- prés +- äger +- ▁swindle +- آمد +- elaar +- ▁eunuch +- ▁пытанні +- gebrochen +- ▁genesis +- contra +- ▁comprendre +- ▁skilfully +- risti +- ಿಯ +- righteous +- ಲ್ಲಿ +- ▁tablets +- ▁kemp +- ▁njia +- ▁dinar +- ގެން +- cucu +- ▁capitán +- ▁irrelevant +- ▁tècnic +- ▁southampton +- ▁mulberry +- เวลา +- ▁skunk +- ▁выконва +- ▁выраз +- ▁mugore +- ▁bluntly +- ლის +- ▁Game +- ▁электр +- เว +- ▁gutwara +- kamba +- бярэ +- ▁fend +- ▁feign +- ▁Hauri +- ▁кашт +- ৰি +- 쁜 +- jord +- ▁brooklyn +- ▁gukura +- ▁Freund +- ▁میشه +- ▁drugi +- ▁panie +- handle +- ▁reag +- ▁пераклад +- ▁وەک +- ▁membership +- ۈش +- ▁پشت +- ▁tolerated +- buuza +- ▁partoprenis +- ▁uwiteka +- އްޖެ +- ▁huwelijk +- teekateeka +- ▁атрымалася +- ▁amabwiriza +- ▁بگیر +- ▁развіва +- ▁karibu +- ▁astonish +- ▁abandonment +- ▁ferris +- ▁veinte +- عِ +- ▁sensibili +- ▁Azio +- ▁sphinx +- ddwaliro +- ▁corporeal +- ▁элемент +- ӹш +- ▁bepaald +- akundaga +- attaque +- ▁cordiality +- 형이 +- ▁kitu +- ▁ebony +- ▁llamado +- ▁Tren +- ▁பார்த்த +- Mfite +- ▁champlain +- ▁dorothea +- ▁phoenix +- ▁connaissance +- ▁période +- 议 +- ▁prelude +- ▁rambling +- ▁espacio +- getreten +- ▁Bona +- ▁شام +- ▁cię +- fated +- ▁madison +- ▁ventana +- ▁پسر +- ҩа +- ▁цікавы +- ▁Bruna +- оўцы +- ޔ +- ▁Thaís +- ▁blasphemy +- ▁intercession +- ▁puppet +- ▁exerci +- ▁diminution +- ▁tegenover +- ▁revenis +- ▁uretse +- спорт +- ቀ +- nyigisho +- ▁freckles +- ност +- ▁هاو +- ▁beispiel +- ▁사람은 +- thür +- content +- uvieron +- கல +- ▁Scotland +- ଗ +- ▁siegfried +- 味 +- ▁degrading +- ▁lavender +- arenze +- ▁calculating +- ▁eddie +- ▁sanft +- ubutaka +- ▁ibyiza +- ▁sulky +- ▁Fisch +- லு +- ▁Guard +- ▁transgression +- ▁obliterated +- ▁dunno +- ▁minnie +- ▁eventful +- ▁äußerst +- 語 +- ▁appellation +- цця +- ▁finalmente +- ▁vosto +- ▁hendrik +- ▁paese +- ▁boor +- baruwa +- ▁militant +- ▁spasm +- sichtlich +- ▁chiesa +- ▁redeemer +- के +- ▁bedstead +- ▁shrouded +- mäh +- ဘ +- ▁gikomeye +- ▁impassioned +- ▁monasteries +- 냈 +- quatorze +- ▁algebra +- komera +- प् +- ▁nuntempe +- ▁Meister +- ▁enlightenment +- ▁faithless +- amato +- ▁resultado +- ▁Altres +- ▁ideo +- ލި +- usuari +- ▁teoria +- ▁Down +- ▁gitaramo +- ▁Algunes +- ▁unfriendly +- ▁invariable +- ▁тэкст +- ▁myrtle +- ▁cheerily +- ▁totalment +- ціў +- giza +- ▁keerde +- guza +- diplom +- arangije +- ▁parental +- where +- ▁hampstead +- ▁incumbent +- ▁replenish +- ▁لەسەر +- 选 +- ▁lorry +- ▁இர +- ▁tolerate +- ▁spelen +- ▁schulter +- ▁பழ +- слуг +- ▁emplea +- ിക്ക +- ▁retirar +- ▁іншым +- ▁compulsion +- ▁மனித +- ▁thunderbolt +- ▁illumined +- เกิด +- ▁erwarten +- bereich +- ▁hinunter +- ▁depict +- ▁axiom +- ▁праблем +- ▁ogw +- basomesa +- ының +- ▁réveil +- stitució +- ▁counteract +- သည်။ +- ജ +- ▁oblivious +- ▁societat +- ▁свои +- 视 +- ▁underwent +- ▁Mountain +- пэўн +- ตก +- ▁objectionable +- ▁connaître +- ▁corrent +- 茶 +- 歌 +- 则 +- ▁dîner +- uriye +- ▁мастак +- ▁sahib +- ▁nettie +- として +- ▁lokal +- herrschaft +- ▁verslag +- dream +- алагічн +- ▁gelten +- ▁impassive +- ▁palpable +- ▁அவள் +- ▁wearisome +- レ +- 亦 +- ▁vizaĝo +- нення +- ▁Under +- ▁Адзін +- ▁کي +- ▁bocca +- ▁funnel +- шел +- macher +- ▁patting +- ▁davie +- ந் +- ▁rhythmic +- ▁unutterable +- ▁ĝenerale +- ▁અને +- ▁대학교 +- ▁heinrich +- ▁normalment +- शा +- ▁جهان +- ▁shatter +- ▁самых +- ibigo +- ▁Pobla +- ▁stink +- ▁ilustr +- ▁좋지 +- ▁ҡур +- ▁représente +- ▁retinue +- ▁symphony +- ▁mukobwa +- ▁сустрака +- 零 +- 望 +- ▁voldria +- 塞 +- 图 +- ▁высока +- ▁étudie +- ▁götter +- ▁fissure +- ▁аҙ +- ▁هذا +- ▁دەبێت +- ▁yasabye +- miterere +- ▁mermaid +- ▁daarbij +- ▁сфер +- ▁kleider +- mique +- ▁thir +- ▁nazareth +- ▁norwegian +- ▁superfície +- ▁extricate +- އީ +- gebleven +- ပ် +- ▁musketeer +- ▁sexton +- ▁Einige +- ▁gunboat +- ▁ruĝa +- ▁Kubera +- ▁Mariona +- ▁Augen +- 其他 +- ▁voorstel +- àrea +- ццё +- ▁Itsinda +- ▁Democrat +- ▁evinced +- ▁irresolute +- ▁unsettled +- ▁antagonism +- ▁agility +- ▁ہی +- ▁acquiescence +- ▁généralement +- ▁востра +- ▁Wild +- ▁intervened +- ▁справе +- ▁boscos +- feḍ +- ▁argi +- ▁taula +- hiver +- влӓ +- ▁mcclellan +- ▁navigator +- ▁зразумела +- 莱 +- 밀 +- 专 +- ▁endowment +- ▁Rosó +- 세요 +- ▁Stock +- izeye +- ▁navire +- ▁запіс +- ▁merged +- ▁ausdruck +- ▁realism +- ▁rufus +- ▁گیا +- ▁sucked +- ▁달에 +- ਆ +- ណ +- sufficiency +- ▁نزدیک +- ധ +- 彼 +- ▁mishap +- ▁reputed +- ▁grumble +- verband +- ▁Consta +- riff +- topped +- ▁йом +- ▁sorrowfully +- ▁gregg +- ▁тавар +- ▁besluit +- umani +- vaardig +- ▁pheasant +- జ +- ▁ṣṣ +- ▁urubanza +- ▁대한 +- ▁darkening +- schot +- ▁hitting +- رود +- ▁volum +- phil +- ▁تێ +- ▁abashed +- ▁regionoj +- ▁bedenk +- leger +- லம் +- bisha +- 익 +- ਉ +- ▁waarschijnlijk +- 쩌 +- އ +- babaza +- ▁Такім +- ▁shirk +- ларға +- ▁rimark +- vité +- litten +- ▁carlton +- ▁arabic +- ▁tronc +- ▁mathematician +- ▁ecstatic +- ▁seymour +- それ +- ▁moindre +- ▁ekonomia +- ▁matri +- ▁булған +- ▁Alice +- ▁மக்கள +- ▁없었 +- ▁Ужо +- ▁informal +- ▁tourne +- ▁sociolog +- letten +- кою +- ▁décide +- ੈ +- ▁absorption +- ▁dissatisfaction +- ▁moccasin +- ▁fabulous +- ▁آهي +- ▁unanimously +- ▁рабі +- ▁superficie +- ▁drummond +- 정이 +- ▁produeix +- ▁ҡала +- onyine +- fizik +- ৃত +- ▁houding +- ▁giovanni +- ▁pretense +- ▁idrimen +- ▁deafening +- ▁tofauti +- ను +- ▁inquiringly +- aşı +- babaje +- bụ +- ▁masonry +- ▁korto +- ▁vizier +- ▁fakte +- taub +- ▁உலக +- ▁slipper +- вілі +- ▁cwiṭ +- ▁яҡшы +- ▁furcht +- ledig +- ாவது +- àcia +- ▁dehors +- ▁educate +- 응 +- ▁substanti +- runde +- цамі +- ություն +- ▁botigues +- ▁glasgow +- ▁réponse +- ▁genutzt +- ▁repudiate +- ▁languor +- ▁інтэр +- ▁primrose +- ▁wrth +- ▁Patrick +- ▁Zoè +- ▁цэлы +- ▁benediction +- ▁Partei +- yiɣ +- kül +- ▁warrington +- ▁unconsciousness +- ▁شدن +- ▁Lwak +- ▁contempla +- això +- ▁príncipe +- ▁Наций +- ▁gobierno +- ▁klimato +- ▁combatants +- ▁kabi +- caire +- ▁viaj +- ▁가기 +- ▁finit +- ▁вечар +- ▁malaria +- ▁forfeited +- ▁harri +- ። +- ټ +- ▁chastity +- ▁باشد +- ▁thresh +- ▁literari +- ▁mistaking +- ▁nafasi +- λε +- ▁fuori +- ▁erreichen +- ▁regime +- ▁tottering +- ▁optimist +- ▁Umuyobozi +- ▁brigadier +- ▁tawny +- ▁ماشین +- ▁sauvage +- ▁billion +- ▁clog +- ▁erscheinen +- ▁Album +- ▁Handel +- غیر +- ▁Cuba +- gurisha +- ▁stupor +- ▁сара +- ▁puddle +- ▁monmouth +- 버리 +- exclam +- ibyishimo +- ▁Antônio +- ▁Объединенных +- ▁campionat +- ▁tudert +- yambere +- ▁étrange +- basajja +- ▁anatomi +- simbi +- ▁стал +- ▁مخ +- ▁gutekereza +- ▁quartz +- ▁noiseless +- ▁kvazaŭ +- ▁겨울 +- ▁deputation +- ▁revuo +- ▁مهم +- ▁ububasha +- ▁месцамі +- zustellen +- ▁presidential +- ▁departament +- ▁hallway +- श् +- ikoranabuhanga +- ▁grandchildren +- ▁mountjoy +- ▁diumenge +- angled +- ▁griffith +- ▁wawal +- ▁klaar +- فهم +- ▁hashize +- ▁renom +- ▁hanover +- arranged +- ▁نشان +- уеит +- ▁demselben +- заходні +- ▁yabwiye +- ▁cetera +- ▁واقعا +- ▁گل +- qal +- ▁proche +- amazzi +- ▁падыход +- ▁بولسا +- ▁birimo +- ▁comporte +- ▁condensed +- ▁renaissance +- ▁мастацтва +- 龙 +- 火 +- 结 +- ▁спо +- ▁décor +- ▁banter +- ните +- ▁arizona +- ▁hobson +- verhältnisse +- ுதல் +- maschine +- ▁glean +- ▁reĝo +- ļ +- ီး +- ▁Ukrain +- ▁primavera +- ▁cairn +- ▁yabaye +- ▁Быў +- ▁mortified +- રી +- ▁Ву +- ่อน +- ambwira +- ▁gully +- ▁derrota +- 进 +- ▁cochrane +- 书 +- ▁candour +- ▁apparten +- ▁Segons +- ▁Баш +- ▁thankfulness +- ▁coisa +- bica +- Vir +- ▁blankaj +- ▁Davis +- ▁swimmer +- ▁адбылося +- առ +- ▁immobile +- ▁کتاب +- rhein +- ▁pilate +- ٿ +- ▁recor +- ▁slouch +- ▁аднак +- гәндә +- ▁treaties +- ▁hawthorn +- ▁gibi +- разу +- ▁йӱк +- Amèrica +- ตั้ง +- ▁consecutive +- ▁rippling +- ▁précipit +- 塔 +- ▁слабы +- ▁reizen +- ▁tradesmen +- ▁Egypt +- wissenschaftlich +- ▁verified +- ▁marqué +- Ministr +- ▁ouvi +- ▁вада +- szą +- ▁babona +- ▁ntebe +- ൂ +- ▁besitzt +- ầ +- ▁efficacy +- ▁Chinese +- ▁poliisi +- ന്ന +- ▁خبر +- ▁않은 +- economia +- ▁loot +- ▁respekt +- ▁maniac +- ▁lebendig +- ▁blackbird +- ▁lucha +- ▁misled +- раст +- ▁squander +- 「 +- 찌 +- ▁affinity +- ▁phoebe +- ▁ŝatas +- ▁baadhi +- klasse +- ▁بېرى +- ijver +- ▁här +- fatanye +- ▁маю +- ▁aggie +- ▁umubare +- ຂ +- ଟ +- ▁moeilijk +- ▁залежыць +- ಜ +- ▁coaxing +- 失 +- ্ম +- ▁apostolic +- тычныя +- ▁stau +- ▁Front +- ▁ilaq +- bireba +- ▁linton +- ▁tebɣ +- ▁discontinu +- ▁meinung +- ▁glorify +- ائی +- ▁suburban +- ▁lágrimas +- ▁nyandiko +- 幾 +- ▁sensibly +- ▁einzelne +- ▁transact +- ▁whist +- ▁bustling +- ▁barter +- 也是 +- abandon +- ξε +- ▁colère +- lecció +- ▁sandstone +- მის +- obéi +- ▁quelqu +- ▁Jersey +- ▁haughtily +- ▁Вельмі +- ▁collier +- ▁osiris +- ▁arbor +- νο +- ▁activa +- ▁roderick +- ▁remis +- ▁świat +- François +- ▁conflagration +- ▁dissension +- ▁취업 +- ▁impulsively +- acya +- ▁gaudy +- akarere +- авых +- ▁discouragement +- 보니까 +- ▁ліку +- бө +- ▁giorni +- ങ +- ▁kwishyura +- ▁faltering +- өгө +- escalier +- ▁Betrieb +- ▁selben +- ▁Work +- ▁canviar +- ▁драўлян +- ส่วน +- ▁bristling +- ▁cavity +- ▁netherlands +- 빵 +- 쌤 +- 画 +- ▁걱정 +- ▁staunch +- ▁مورد +- ▁zitandukanye +- ▁verkeerd +- ▁solitari +- နေ +- ▁banishment +- ▁protective +- ruption +- ▁хацеў +- ▁drummer +- bivuze +- ▁생각보다 +- ที +- ▁raphael +- 求 +- ▁dasselbe +- 包 +- ▁دختر +- ކި +- ▁specified +- टा +- ▁princi +- ▁Tochter +- ▁ҡына +- 마다 +- ▁heerlijk +- добр +- ▁griffin +- ▁vertrouw +- ▁тағы +- توانی +- ▁galloway +- ▁melodious +- ▁kirekire +- ▁worries +- 功 +- ▁bifite +- ▁appropriation +- ▁mwenye +- ▁uttermost +- ▁Abagabo +- ْتَ +- attendre +- ▁flickered +- ▁snorted +- ggling +- oxid +- ▁chivalrous +- ▁rapturous +- ▁unprepared +- 段 +- ▁membrane +- রী +- ▁газ +- ächtig +- ▁можам +- ▁фіз +- ậ +- ▁Canadian +- ▁assiduous +- ▁hoeveel +- ▁beinahe +- müde +- developed +- chukua +- pě +- mbyi +- ▁reconstruct +- ▁Saison +- umuyaga +- ▁trapped +- claim +- ultimo +- ▁ajudar +- springen +- ڤ +- ▁résultat +- 」 +- bě +- ▁protracted +- ▁물어보 +- ▁шырока +- ތީ +- ością +- ▁Mount +- велі +- disciplin +- ії +- ▁shapeless +- ຮ +- ▁сказаў +- ▁indomitable +- 別 +- ▁randall +- ▁unerring +- ▁antwerp +- ▁annul +- ▁noemen +- ▁municipality +- ▁hazy +- सि +- ▁Stone +- ▁nocturnal +- ▁자주 +- স্থা +- ▁Greek +- ▁alrededor +- ▁triangular +- ▁Santiago +- ਿਆ +- ▁onderwerp +- ▁crec +- ▁füßen +- ▁Llop +- amateeka +- ▁genoa +- க்கப்பட்ட +- ▁себе +- ▁новых +- achtung +- ▁graveyard +- ĥo +- ▁lightened +- ▁sediment +- digger +- ▁fez +- ▁ebiri +- ▁Wissen +- Umuryango +- ▁абавязк +- ▁Gabriel +- ▁läuft +- ▁болгон +- ▁retrospect +- ▁Walter +- ▁discordant +- ▁portland +- ▁بهتر +- ▁Nick +- illumina +- ▁nausea +- ▁clergymen +- umuhondo +- ▁fraternity +- フ +- 線 +- vermögen +- لارنى +- ৰে +- ▁지원 +- ▁symbolic +- roux +- ▁briar +- ▁bosc +- ▁hamburg +- staw +- épreuve +- ▁intuitive +- ▁omstandigheden +- ▁peremptory +- ▁знайсці +- ▁ਹੈ +- ▁Rauric +- ▁lèvres +- ▁lorenzo +- ▁venison +- ▁aptitude +- ▁stroking +- ▁resposta +- ▁undoubted +- patia +- ▁nzego +- ▁lunar +- өйө +- ▁mwy +- 往 +- ▁cobweb +- ▁관심 +- ▁consegui +- ▁entdeckt +- ▁jeunesse +- ▁veille +- ▁diventa +- ▁rakonto +- льнік +- ▁sommige +- ▁Telm +- ▁كۆز +- ▁morrison +- scrive +- ▁croix +- ▁меў +- ▁башла +- ▁ruinous +- ▁Sicherheit +- ▁Afegeix +- 把 +- Olymp +- ▁prowling +- ▁galerie +- ிலிருந்து +- ехаў +- ▁화장 +- żał +- рэз +- ▁defiantly +- ▁repute +- ▁refute +- schicht +- ▁dalton +- ▁bwite +- ▁sundown +- ometr +- flüster +- ▁equation +- ▁lazarus +- ▁puny +- ▁kapitein +- ється +- ▁athlete +- ṭṭef +- ների +- ▁бөтә +- ▁reassure +- ▁جای +- ▁hazardous +- క్ +- ▁tagoj +- квет +- ▁нимә +- ▁dringend +- ▁enamel +- ▁bedeuten +- ▁gervaise +- ▁knapsack +- 司 +- ▁예전에 +- ▁Хәҙер +- ▁victoire +- ▁싶었 +- ތައް +- ▁animate +- ▁لل +- ▁moshi +- ▁religió +- آخ +- בי +- ్య +- ▁tracy +- ▁surrey +- ▁складзе +- গে +- ▁Straßen +- ক্র +- مەي +- ▁Harris +- ▁brock +- ▁nucleus +- 딸 +- kurikije +- ▁자꾸 +- ▁dervish +- ▁michigan +- ▁загад +- ▁хво +- ▁parlement +- interès +- ланды +- ехаць +- ▁España +- ▁ئىككى +- 園 +- ▁외국 +- 份 +- ▁sponsor +- ▁julio +- ▁nectar +- ▁내용 +- ▁lynch +- ▁chaperon +- ▁summed +- ▁süße +- ▁아닌가 +- ▁gigante +- стэ +- ▁fracture +- ▁lengthened +- valuation +- ೋ +- ▁damascus +- ▁muganga +- rzuc +- ▁تص +- ▁posteriorment +- ▁ehrlich +- абара +- ▁эше +- وەر +- ▁пасад +- gewaltig +- ▁igitekerezo +- ▁shimmering +- ▁utensils +- ▁ўвогуле +- 飯 +- ▁therefrom +- หลัง +- ▁incarnate +- gå +- ▁kubw +- ▁Гэтыя +- спява +- দ্ধ +- க்கம் +- ▁نہ +- ▁boeken +- yetta +- ▁Prze +- ▁endorse +- ਚ +- ▁reconnaissance +- ▁vídeo +- abahungu +- ▁Joel +- ▁rupture +- apercev +- ▁reproachfully +- ▁eliot +- ▁yiruka +- humour +- ▁ningú +- ▁يە +- ▁sterven +- ▁gioca +- italia +- ▁Korea +- siedel +- ▁plotting +- ▁unavoidable +- 딴 +- ▁괜찮아 +- ▁свята +- ▁seminary +- ▁shopkeeper +- ▁austral +- ▁nomiĝas +- ▁castillo +- ▁Mario +- approcha +- ▁hovel +- fitted +- ▁boulogne +- ▁ignominious +- ▁dibuix +- 查 +- ▁bandera +- ▁clinton +- ▁музычн +- ▁repelled +- ▁hanaud +- ▁citizenship +- ▁Bord +- ▁waffen +- wijl +- ▁Ohio +- ▁classified +- ▁voldoen +- 쪽에 +- ئەت +- لەم +- 關 +- 头 +- 目前 +- ▁urugero +- ▁roving +- ▁brooch +- ▁барып +- ▁banjye +- ▁Sibil +- ▁konstruita +- ▁работа +- ▁practica +- speaking +- مەس +- 됐 +- ▁yavutse +- ▁taddart +- ▁titular +- 黑 +- ▁Aṭas +- ▁відаць +- ▁flinch +- ہر +- ▁дзеці +- ▁bubbling +- ▁incurable +- ▁далёка +- vò +- ▁soaring +- ▁Kurz +- 었다 +- ▁bashaka +- ▁concede +- އިން +- ▁corso +- ▁glänzend +- 門 +- ▁deborah +- ▁palavra +- ▁بیرون +- ▁infirmities +- ▁проект +- ▁margot +- ▁иде +- ▁ärger +- ▁Institute +- ▁تق +- ▁kadar +- ▁cress +- 억 +- ▁ئێمە +- 域 +- ▁ricardo +- މަށް +- ▁parlant +- адносін +- ▁continuation +- һенә +- ▁sendepend +- ▁mínim +- ▁divisió +- ▁orchid +- ▁défini +- brengen +- entorn +- ekebwa +- ▁выпадку +- ▁میکند +- ▁vuelta +- ▁nördlich +- ▁conmigo +- ▁petrol +- ▁felipe +- ▁Olympic +- ▁situe +- ▁counterpart +- ▁Кол +- ▁troost +- ▁purify +- ▁armenian +- ▁gukorera +- ൽ +- edició +- ▁всех +- ▁personaje +- ▁entender +- ▁contraction +- 범 +- ▁Richtung +- ▁upbraid +- 号 +- ▁infirmity +- ▁beetje +- kette +- ▁Francia +- ▁거기는 +- ɛem +- ▁estaven +- ▁мясцова +- ▁alessandro +- ▁beauteous +- ▁nuptial +- じ +- 勒 +- ▁назіра +- ▁школе +- ▁такую +- ্রি +- ▁Modern +- ▁blijft +- стаць +- ▁беларускія +- ▁babble +- ▁politiko +- wifuza +- ▁tiaj +- ▁பெயர +- था +- ▁dolphin +- ஹ +- ெல்லாம் +- ▁adjective +- ▁только +- 宗 +- ▁Fluss +- いた +- ▁даследава +- ▁runners +- ▁Мен +- ▁cyanjye +- ▁génie +- ▁Georg +- рыста +- லில் +- ▁airplane +- ▁milliner +- ▁johannes +- ▁bertie +- ▁preferring +- 런 +- ▁parsley +- linder +- ޖ +- 深 +- ယ် +- ▁accosted +- 整 +- ▁broadside +- 城市 +- ▁blissful +- àstic +- ñó +- ▁rejection +- accions +- скры +- ▁fauna +- 만한 +- bibwa +- ჯ +- ▁ejército +- ▁problème +- 씬 +- ▁filozofi +- ▁phyllis +- ▁дрэва +- 苏 +- ▁Garbí +- gusta +- pacient +- ▁pollard +- ▁йән +- ାର +- ▁absolv +- wulira +- ▁vraiment +- mereye +- னம் +- ▁amorous +- ▁apologise +- ▁pantomime +- ▁паўночна +- пита +- ▁francès +- ▁scarcity +- συ +- ▁amakosa +- rechnet +- ▁Шмат +- ▁haalde +- ▁зрабіў +- ▁chrys +- ▁бында +- ▁clew +- ▁ajili +- cepció +- ▁fracas +- ▁strategic +- ▁coloca +- ▁вуліц +- ▁adlis +- sidér +- ▁ancestry +- 算 +- ▁peintre +- akawuka +- ▁kwibuka +- ▁strategi +- ▁paraules +- ҙер +- atinze +- ▁impure +- amabuye +- ▁белә +- 돼서 +- ntambwe +- ັນ +- ▁newfoundland +- ▁паўднёва +- ▁upheld +- なる +- ▁பற்றி +- ▁Fatima +- ▁extens +- лёт +- ▁discret +- monger +- ▁jewelry +- ▁Ninde +- ▁sukceso +- ▁잘못 +- ▁daŭre +- ▁mythology +- ▁tiber +- ▁burundu +- ▁angular +- ென்று +- வாக +- ▁acusa +- ▁mategeko +- Office +- investigació +- ▁implicitly +- urukiko +- thwaite +- җ +- สิบ +- ▁sagacious +- ▁prophesied +- ลูก +- ▁multipli +- communicati +- ▁ҡайҙа +- ость +- ▁verlor +- ɣem +- волі +- ▁formality +- ▁archway +- аҩ +- Π +- ▁tennyson +- 容 +- ▁meridian +- ▁antikva +- yewwi +- ▁peterkin +- ▁pluvas +- ▁제가 +- ▁chteau +- creatures +- ▁Begriff +- зеи +- ▁першым +- ขอ +- daughter +- kitongole +- ▁repugnance +- ▁vielmehr +- 實 +- ▁тауыш +- ▁gideon +- ▁lettuce +- ▁intensified +- ▁раёнах +- ▁குழ +- لىقى +- ▁лиеш +- ▁storehouse +- ▁шчыра +- ▁détail +- ול +- ▁positi +- عاد +- ▁عن +- بُ +- سلام +- ਈ +- ▁disbelieve +- ▁veröffentlicht +- ബ +- ▁mmoja +- ▁deportment +- ▁cançons +- ▁sonderbar +- ▁definitiva +- гляда +- ▁schönheit +- ▁neigh +- ▁inutile +- ▁دنبال +- ▁شروع +- agerageje +- лічыць +- ▁reclaim +- ▁risa +- ▁rabble +- ▁언니가 +- ▁неяк +- lusion +- glück +- vunaanyizibwa +- ▁phosphor +- ▁ethereal +- ▁প্রতি +- 不同 +- عام +- umunani +- ▁Ekde +- 就是 +- ▁venez +- ▁Из +- ▁seront +- ▁haciendo +- ▁hauptsächlich +- ▁magnanimous +- ▁vacuum +- ▁aladdin +- வல +- ▁pencroft +- ▁Sinzi +- haguruk +- ▁morceau +- ▁bombardment +- ▁copp +- ▁recul +- ▁lernen +- ▁convulsively +- ▁Vietnam +- ▁Llavors +- ▁negligence +- ▁meredith +- ένα +- ▁capturing +- ချ +- ▁sombrero +- ▁kawaida +- ▁مست +- ▁알려 +- ▁Kopf +- ража +- ▁refugi +- کردنی +- ▁Вас +- ▁projectile +- ▁мець +- ▁glace +- source +- ۈرۈ +- ▁dexterous +- ▁sukcesis +- ▁пазней +- မှု +- ▁portsmouth +- ▁lḥal +- ▁perusal +- étroit +- ▁poise +- ▁actuated +- ▁magnetism +- ▁일단은 +- ▁mittag +- shooter +- ▁oculta +- র্থ +- ▁Ду +- ▁cercle +- ▁hablaba +- interpret +- ▁Mwaka +- ▁Фа +- atelier +- ไว้ +- ▁Schrift +- ▁escuela +- 中心 +- ▁kuburyo +- ▁입고 +- bî +- ▁korpo +- ▁meekness +- ▁komuna +- ▁arrêté +- ▁papist +- ▁larva +- ▁Ergebnis +- ▁inflexible +- ▁twitching +- ▁бліжэй +- ▁پێویست +- ▁ssomero +- ▁Аўтар +- ▁ŝtata +- ▁rosamond +- ▁lleno +- ▁pronuncia +- ▁footstep +- ▁grouse +- ▁wicket +- ▁entró +- ▁portier +- ▁dissuade +- ▁prophecies +- ▁meṛṛa +- gląda +- ▁plor +- ▁особ +- ▁brawl +- চে +- ▁astronomi +- ▁twari +- ▁Elizabeth +- ▁célèbre +- ▁fictitious +- ▁sorcerer +- 拿 +- ▁stafford +- roofed +- ▁Ошо +- ▁спя +- ▁qualify +- ▁bedeckt +- ▁Núria +- ▁receptacle +- ಣ +- ▁cuckoo +- ▁viscount +- ▁Fernando +- ▁commissary +- 单 +- étude +- ▁stieß +- ▁манеш +- ▁peces +- ▁knox +- ▁radar +- ▁savoy +- ▁cashier +- ▁expres +- ▁unconcerned +- விட்டு +- stecken +- ▁آپ +- ढ +- ▁quelquefois +- ▁продолж +- ▁pitiable +- ▁mataifa +- ▁provost +- ▁다섯 +- ▁магчымасць +- ▁Student +- ▁அறிவ +- ▁Clark +- ▁voleva +- ▁그렇고 +- catcher +- ▁twirl +- ▁inhabitant +- ▁overwhelm +- ▁Bernard +- ▁fung +- ▁municipo +- 應 +- 汉 +- ސާ +- ▁ҙур +- iferous +- igiti +- '35' +- ayı +- ▁turismo +- ▁bingi +- ▁movi +- ▁narration +- ▁chercha +- بەت +- gestanden +- ▁viking +- ▁província +- 吃 +- ண்டி +- ▁concret +- ▁stallion +- ▁reprodukta +- لة +- ▁харч +- ahisemo +- ▁pedestrian +- ▁очень +- ▁درباره +- ▁puissance +- ▁없으니까 +- ▁chciał +- ▁fiŝ +- ▁comparable +- appeti +- ▁лийын +- ▁terreny +- ▁blitz +- ▁پێش +- ▁fidel +- ▁galop +- ▁bibazo +- ▁ماه +- ફ +- ครั้ง +- ▁nevada +- ▁oswald +- ▁kabisa +- يار +- ▁racial +- ▁wuz +- ▁liberated +- ▁tooneel +- ▁hansom +- водзіць +- ится +- ▁kuliko +- ▁erkannte +- Austràlia +- ಷ +- ▁ejemplo +- ▁exchequer +- ▁incredulity +- ▁memphis +- ▁unmittelbar +- ▁Forschung +- ▁unravel +- 沒有 +- 학과 +- ▁unholy +- ▁Serie +- ▁кум +- ▁bedaard +- ▁argaz +- ▁signifi +- ҥга +- ▁methodical +- beleid +- ▁tresses +- ాల +- ▁Bour +- カ +- sleeve +- ▁пӱ +- ▁Kä +- amaanyi +- wendig +- gemerkt +- effray +- ▁beryl +- ▁zaś +- ▁Bush +- рова +- ▁Columbia +- ▁возера +- ជា +- ▁wollt +- amafaranga +- ▁اومد +- ▁قور +- ▁narrated +- menyesha +- anjou +- ▁پہ +- ლა +- 他的 +- ▁devolve +- ▁памят +- ▁parecia +- ▁benefice +- ▁voile +- রো +- ジ +- ћ +- ▁Massachusetts +- ▁cimetière +- ▁animosity +- ▁систем +- ▁henriette +- ▁bryce +- ▁беларускага +- ▁plaid +- ედ +- wallow +- ▁hartelijk +- ▁gibraltar +- ▁appalled +- ▁línea +- ▁pamela +- ▁патрэбна +- ਦੀ +- ▁lanyard +- famed +- presión +- ▁rebuilt +- เพ +- ▁graaf +- ▁levelled +- втор +- ▁خوبی +- வரை +- алда +- ▁bewail +- 带 +- ▁кӱлеш +- ▁verbergen +- ▁அரச +- ▁storage +- nęła +- ▁سۇ +- sluiten +- ▁rojo +- لارنىڭ +- ости +- ▁porridge +- czny +- ▁unhealthy +- ▁voluptuous +- ▁доўгі +- ▁likelihood +- ▁incorrect +- ▁saturated +- ▁yelhan +- ▁drapery +- duid +- ▁Arthur +- ▁devenas +- ছা +- fluas +- ▁bookseller +- ▁schande +- ▁Station +- magezi +- ยุ +- मान +- ▁christened +- ▁moonlit +- típic +- ▁அதை +- ▁bankrupt +- ▁необходимо +- ▁irresponsible +- ▁kaminuza +- ▁আৰু +- ▁jenoside +- ▁suzanne +- sababisha +- ▁popped +- ▁relativament +- Alemanya +- ▁equity +- मी +- badili +- ▁tliat +- ▁dynamite +- ▁Information +- abanje +- ▁있나 +- nscrit +- ▁flux +- ских +- ▁anniversary +- ▁magnanimity +- ▁некаторых +- ▁hemlock +- ▁tevreden +- ▁mój +- ▁여름 +- ▁girlhood +- ▁шунда +- льнікі +- ulph +- ▁kernel +- ▁hungary +- ibile +- ▁voters +- ▁heimat +- ▁costumbre +- ▁apathy +- lösen +- ▁옛날 +- ▁gauw +- ▁área +- possessed +- ▁prétend +- ▁laurie +- actual +- ▁wunsch +- kwemera +- ▁snore +- basaba +- ▁절대 +- ▁flick +- ▁stockade +- ▁রয়েছে +- ▁выкарыста +- ▁Ṛebbi +- ▁glistened +- ▁posséd +- ▁bengal +- 것 +- یاری +- ▁englische +- ً +- ▁freddy +- ▁Dazu +- échappe +- ம்பி +- ▁tallow +- ▁chasseur +- ▁proportionate +- نوع +- starred +- ູ +- ▁practitioner +- ▁stupefied +- ▁Zealand +- ▁negotiate +- 案 +- הי +- ▁Hinter +- ▁Gemma +- ನ್ +- ▁ŝanĝi +- ବା +- ові +- ▁platter +- fusió +- ▁килде +- ▁venomous +- ▁darcy +- テ +- ކޮށް +- ▁acknowledging +- ๊ +- 念 +- ▁màxim +- ▁வாழ்க்கை +- ▁주말 +- కు +- ▁bordeaux +- ▁бачыў +- ▁bluster +- ▁persuading +- ก่อน +- ▁knitted +- готов +- ▁römische +- ▁hammond +- ▁خۆش +- ▁সম্ +- পি +- bikoze +- drukt +- utilitzen +- ▁plebeian +- ▁enquiry +- ▁javelin +- ▁нисек +- ▁saunders +- ▁scorched +- ▁башҡа +- મા +- ▁ўсяго +- ▁entièrement +- ▁fwy +- шука +- ▁plaything +- ত্ত +- ▁multitud +- ▁voegde +- ▁perpetrated +- ▁vergnügen +- ▁epidemic +- ▁insomuch +- ▁proksima +- ▁deferred +- ▁alligator +- ▁Nigeria +- ▁सु +- ▁modera +- ▁strato +- umusore +- ▁strapped +- ▁maîtresse +- ▁continuity +- ӑн +- വി +- ▁rubies +- ▁levity +- ▁piace +- ▁bolton +- ▁மற்ற +- priva +- ▁zorgen +- ਖ +- ▁Although +- ▁addicted +- ▁conservé +- ▁responsive +- ▁buoyant +- ▁signification +- hawk +- ▁virtual +- علم +- 듣 +- ▁skiff +- ▁ziren +- ▁durable +- ▁الك +- ሪ +- ▁inarticulate +- ▁makumyabiri +- ▁일주일 +- nsgesamt +- ▁dinosaur +- ▁responsabili +- ▁recursos +- ▁harangue +- ▁superhuman +- ▁부산 +- ▁شاید +- Straße +- ▁vibrating +- aglia +- Arc +- ▁табы +- ิด +- ▁Thus +- ้ม +- aceous +- ▁jurist +- ការ +- สําหรับ +- ▁Hodiaŭ +- ▁blizzard +- ▁obnoxious +- ▁santiago +- 排 +- ingofero +- ▁stimulating +- ▁heaviest +- ▁mutilated +- mukazi +- ▁Despi +- ▁finch +- ▁сцен +- dessous +- ▁oordeel +- ▁hobby +- ▁necesita +- ▁Grab +- ▁fermé +- hagarariye +- ▁proyecto +- ▁purchasing +- ▁undecided +- ▁uruhushya +- ▁хәҙер +- ▁tiberius +- migabane +- ándome +- ▁cologne +- 大家 +- ▁sophie +- ▁gdyby +- تەر +- ▁gracie +- ▁acaso +- ▁Че +- mahon +- ▁championship +- ▁pending +- झ +- ▁prophesy +- 센 +- ▁flossie +- ▁bɣiɣ +- kakiiko +- ▁cosí +- ▁zoolang +- овую +- दी +- ▁guhindura +- ▁frigid +- ▁penetration +- ▁Consell +- ▁koncert +- ▁Estados +- ▁Nubwo +- ▁международно +- ▁kenshi +- ▁Reihe +- ҟә +- rashaka +- پرسی +- ▁Publi +- ▁folgende +- ▁yaliyo +- ▁гісторыя +- ▁halifax +- ▁nauwelijks +- ▁свабод +- Uhoraho +- ▁Daher +- ▁huntsman +- ▁пыр +- ▁spesso +- ▁faudra +- ▁primeiro +- ▁trembl +- ▁primeval +- ▁glacial +- ▁hillock +- ▁hervatte +- ▁poul +- faß +- ▁tegenwoordig +- ောက် +- ▁shortened +- ▁psic +- ځ +- ై +- ▁bewitched +- ▁uninteresting +- ▁актыўна +- ▁verwonderd +- 問 +- 班 +- ▁retrieve +- 速 +- ▁vergangen +- größe +- ▁რომ +- ▁modifi +- ifuzaga +- مین +- verfahren +- ▁escucha +- ▁아프 +- ▁vogue +- ▁morose +- ▁parsonage +- 보자 +- ியது +- ▁gazette +- ▁immaculate +- 議 +- 告 +- ▁betreft +- ▁standstill +- ▁partiality +- ▁தூ +- И +- ▁commentary +- ▁Truca +- ▁träume +- ▁atmospheric +- 탄 +- اللَّ +- ▁hulk +- ▁hungarian +- ▁kalif +- ▁gewählt +- ▁barbarism +- biseera +- ▁fosc +- weichen +- attache +- ▁trampling +- ▁Oxford +- ▁vicksburg +- vugizi +- Àfrica +- ▁kkooti +- ▁scrawl +- 领 +- deutschland +- ▁blackmail +- ▁Gaël +- zaliwa +- ▁Wilson +- ▁qilish +- ილ +- läg +- ▁Techni +- чки +- speise +- sistance +- ▁cloche +- ▁finster +- À +- маған +- َةِ +- ▁استفاده +- 油 +- ▁adquiri +- ▁astronomical +- ▁inclouen +- ▁chandler +- μο +- ▁variance +- nyola +- ▁springfield +- ▁və +- attention +- ▁Llobregat +- ખ +- ሉ +- 資 +- 洛 +- ▁possono +- ▁tattoo +- ▁கேட்ட +- ▁reddened +- ▁physiological +- ▁vijand +- ▁innerhalb +- ▁сдела +- רו +- ▁afflict +- ▁شیر +- ▁tassel +- खा +- ▁archipelago +- ▁nekhludoff +- ▁scrambling +- ▁exulting +- ▁Txell +- ▁tambien +- basket +- ▁deposed +- ▁củ +- '21' +- ▁яңы +- ▁brilla +- ▁않고 +- ▁geometr +- ساز +- ▁вакол +- ▁Unió +- ▁retorn +- ▁indiscretion +- ▁diocese +- 象 +- 該 +- ▁Ireland +- villiers +- ▁pivot +- ▁Holz +- shimisha +- ▁Ikindi +- ▁Было +- 할려고 +- carriage +- ▁aŭtoro +- ▁ауыл +- ▁résist +- ▁көс +- ନ୍ +- ھى +- ▁lorraine +- ▁아무래도 +- ▁hicieron +- 印 +- ▁forgery +- ▁penrod +- adequa +- ▁circo +- ▁implication +- พระ +- ▁процесс +- ▁irresistibly +- ▁nautilus +- ▁redoubt +- ▁admittance +- ▁결국 +- ▁unsteady +- ▁aubrey +- scripció +- spreken +- čí +- lund +- ▁historique +- ▁frustra +- กิน +- vacu +- ▁bagize +- ായി +- ▁calcutta +- ▁stagione +- 嘢 +- ▁شرکت +- ▁insinuate +- ▁الق +- ▁Shar +- ▁saucy +- ubukungu +- ▁fuente +- ▁libér +- ▁domination +- ▁Ҡо +- particulièrement +- ▁sawdust +- ▁denkbeeld +- ▁Hungario +- quête +- ▁된다고 +- ▁карыста +- ▁journalism +- ▁সম +- চিত +- ▁cherche +- ▁claudius +- ▁excavation +- ▁магчымы +- ▁падабаецца +- ▁alegría +- ▁imitating +- ▁dmitri +- ▁atteint +- 重要 +- บอก +- ▁않나 +- ▁obgleich +- ▁alcoholic +- ▁succor +- eleza +- ▁wreckage +- bazadde +- ▁것들 +- ayotgan +- เรียน +- ▁kemm +- 剧 +- ▁எண்ண +- ▁Jacob +- ▁Cambridge +- ▁кнігі +- ▁этап +- ▁사람들은 +- ▁modoka +- ▁rebelled +- dimensional +- ▁ailment +- ▁ўваход +- garri +- ▁humph +- ▁фарма +- ▁человек +- ▁demasiado +- ▁mètode +- ▁shuffling +- 嗎 +- ▁Tradition +- ▁Primera +- brücke +- mutungo +- енән +- kurikiza +- اوی +- ▁працягу +- ▁Cross +- ▁expel +- नु +- ▁piccadilly +- 팅 +- ▁әллә +- 引 +- alfabet +- ▁карцін +- ▁سفر +- ▁profesor +- ▁hackney +- mettait +- ▁theodora +- ▁liberté +- மண +- ▁prosaic +- ▁geschah +- ▁homesick +- ▁cumber +- gelesen +- rwanya +- ▁procede +- ▁blackguard +- ▁verschillende +- ▁waarmede +- ▁presidency +- ▁ballast +- ▁Finanz +- ▁நல்ல +- ▁семын +- பட்ட +- stift +- ▁topmost +- ▁shanty +- ▁deftig +- ▁prowl +- 야겠다 +- sieur +- ▁nestled +- Ο +- ▁ephraim +- ▁Некаторыя +- ▁އޮ +- ▁ggwanga +- ▁miejsc +- ▁듣고 +- ▁muhanda +- ▁enhavas +- ▁enseña +- ▁Sinshobora +- альная +- ▁иаа +- ▁composició +- ▁gewinnen +- ▁усім +- ▁stigma +- ▁gelooven +- ▁victuals +- 캐 +- ▁indolence +- ▁بولىدۇ +- getrokken +- ▁amphib +- ▁protestations +- ▁algú +- ▁selva +- рван +- ▁말하는 +- ▁mercer +- ▁opzicht +- ▁아무것도 +- ▁잠깐 +- ▁প্ৰ +- 统 +- ▁ljewin +- ▁iḍelli +- ▁میکنی +- ▁Bridge +- wirken +- طب +- Serv +- iadau +- ▁Sydney +- ▁coneguda +- ▁suivre +- 留 +- 导 +- ▁хочацца +- ▁Historic +- ▁cherries +- ▁niedrig +- әрә +- ▁reflexion +- skew +- ▁venido +- ▁utah +- பார் +- ในการ +- পু +- रू +- لەرنى +- ▁aggregate +- ▁symmetry +- 康 +- ▁descobri +- 易 +- ▁ninguno +- その +- aracter +- ▁Airport +- ▁uniformity +- ▁windmill +- ▁accumulate +- ▁없어서 +- ▁shuttle +- ▁bitaro +- ▁gladys +- ▁ketch +- ▁plenum +- ▁Kampala +- 찬 +- َأَ +- ▁традыцый +- ▁giovane +- трымлівае +- abayizi +- ▁palisade +- ▁monica +- ▁Sign +- ▁அதிக +- ละ +- ▁kreskas +- خواهی +- σει +- عُ +- ddagala +- ம்ம +- ▁aanzien +- ▁weldra +- ▁Nàdia +- ▁begegnet +- ▁roosevelt +- ▁technology +- ▁время +- 핑 +- ▁watalii +- ▁maddened +- ▁maddening +- ▁Lakini +- ▁arsenal +- ▁һәр +- Ш +- ▁detach +- ▁Questa +- ▁жок +- gunstig +- ▁jammed +- ▁cadena +- ▁Ре +- ▁هێ +- ▁கண்ட +- ▁diplomo +- ▁vetur +- ▁Taylor +- ▁vecchio +- ▁enabling +- ใคร +- ▁talisman +- 术 +- ▁новага +- zinnig +- ▁coś +- ▁Unidos +- ▁schlich +- ▁wicker +- czek +- нуты +- ардын +- ▁думае +- ▁زیر +- ▁Aquell +- рэнне +- ▁arabia +- 칠 +- หมาย +- ▁아무튼 +- 节 +- ▁recommence +- ▁rosemary +- ड़ +- ފި +- ▁kwandika +- ▁corporate +- ▁tomatoes +- ▁Abakulembez +- дзець +- ▁ähnliche +- ▁килгән +- یەکی +- arriver +- aspetta +- ткан +- ▁Gleich +- treize +- ▁frenzied +- ▁penitence +- ▁чатыры +- ประเทศ +- 助 +- abahanzi +- ▁sociable +- ▁Malgrat +- ▁prachtig +- ▁wulf +- ▁скл +- રા +- atanzwe +- ▁Typ +- ▁auspicious +- ▁icyizere +- ▁swarthy +- ▁grizzly +- 區 +- コ +- ▁вялікай +- ವು +- ▁utopia +- ▁Earth +- ▁bianca +- ▁diablo +- ডি +- ▁күтәр +- ▁uɣerbaz +- ▁perplexing +- ▁부모님 +- ▁최근에 +- 층 +- ▁гаварыць +- ▁Rand +- jointed +- います +- ▁basketball +- ▁якое +- ▁sympathetically +- ▁clifford +- ▁цел +- ▁форма +- ▁abagize +- ▁ҡарап +- َمْ +- ▁detrui +- ▁wrathful +- gerissen +- ળ +- บาง +- ጠ +- ▁begleitet +- ▁diminutive +- Е +- ▁ruffle +- ▁fertility +- concentr +- ▁Privat +- ▁respiration +- komunik +- ▁lovable +- ▁rumina +- ▁dreamily +- 닐 +- ▁northumberland +- ▁பெண் +- ▁ஒன்று +- ▁ndashaka +- ▁barbaric +- ▁àwọn +- anlage +- ▁vertelde +- ▁많고 +- ▁Faint +- ▁ôf +- ▁dynamic +- ▁erinnerung +- ▁Һеҙ +- ▁marchioness +- ▁Frankfurt +- 约 +- ▁دەکات +- ▁espècie +- ▁salutary +- ahitamo +- ▁curving +- ▁bulge +- ▁торған +- ▁soared +- habitatge +- cursion +- ▁templ +- ڪي +- ੂ +- 떨 +- 뛰 +- 埃 +- ▁entsprechend +- چار +- ▁працаваў +- ▁sonorous +- ▁whitney +- projekt +- சொல் +- บน +- ▁dissabte +- ▁और +- 왕 +- ▁réalisé +- 候 +- ▁কৰি +- ▁відэа +- ▁voĉo +- کشید +- علي +- ▁چشم +- ▁grandpapa +- 프로 +- ▁venturing +- αρ +- hitaji +- ▁ведаюць +- ▁годым +- gefahren +- ▁모르겠다 +- ▁أنا +- ▁الج +- ikki +- ▁Mexico +- ▁gukemura +- прыгожа +- ▁alyosha +- ▁armitage +- ▁ongeluk +- ▁tribulation +- ▁складаецца +- слоў +- ▁idiom +- wurk +- ▁interred +- 레이 +- ▁chatted +- rühm +- ▁lutte +- himba +- дагы +- ▁murphy +- ▁securities +- ▁ئێستا +- ▁crammed +- ▁slackened +- ▁virtud +- Honor +- ▁газет +- աս +- ŝtono +- ▁fraai +- ▁طور +- ▁fundador +- ▁Ғә +- বো +- ▁Imperio +- انداز +- ▁már +- 第二 +- ▁Otto +- ▁incongruous +- ▁sullivan +- 搵 +- 聽 +- 密 +- ▁кожнага +- ▁criança +- ▁comparti +- ▁китә +- populated +- ▁Уның +- асобных +- ▁breng +- ▁অব +- ▁іншы +- ▁tyrannical +- цёр +- ▁dilapidated +- เหมือน +- ▁défense +- ▁forsooth +- 運 +- acompanya +- ▁olivier +- ▁zenith +- ▁축구 +- ▁ляжа +- ▁désert +- ▁uhuru +- ▁разоў +- ▁visionary +- ▁пэўны +- wirtschaftliche +- ▁disagreement +- ▁añadió +- ▁brittany +- ▁circumstantial +- ▁indefatigable +- ▁predicament +- ▁scrooge +- ▁থাকে +- impanuka +- ▁tayoga +- ▁untimely +- мяні +- ▁ئوقۇ +- نامه +- ▁مص +- خیر +- ▁pittsburg +- ▁jarred +- ▁Broad +- ▁Andrew +- ▁Regional +- کەوت +- ▁obsolete +- ആ +- ▁portmanteau +- ▁behaving +- 哥 +- 处 +- ▁pontiff +- ▁dinsmore +- ▁José +- ▁ursache +- killer +- ▁Graf +- ▁bemerkt +- ▁ripped +- ларҙың +- шей +- ▁reinforced +- ▁puerto +- àmbit +- bishaka +- ▁Quintí +- स्त +- geleid +- सी +- ▁analys +- minota +- ▁ئەوەی +- ▁concoct +- ▁démon +- umubyeyi +- ▁নির্ +- ▁stör +- கிறான் +- sanyu +- ▁ئور +- ▁خواب +- ▁دەتوان +- kapu +- ▁agamemnon +- ▁intermittent +- ▁overjoyed +- ▁jamaica +- ▁Guinea +- ▁domestique +- ▁possum +- ▁fraught +- ▁Golf +- ▁너랑 +- ▁좋을 +- ▁snowball +- ▁récit +- ▁dépend +- ▁afscheid +- ካ +- ▁carthaginian +- ▁нацыянальна +- ▁француз +- ▁sprawling +- 依 +- gewohnt +- ▁Организации +- ▁spijt +- کێش +- ▁consejo +- ▁Fru +- ▁hapless +- ▁nannte +- ▁vouch +- ▁paquet +- чэння +- instrument +- ▁Hü +- гийн +- ▁llamaba +- ▁plicht +- ▁casino +- ▁discourage +- ▁kipindi +- ▁Urashobora +- ▁Literatur +- ▁muntanya +- ஷ் +- ▁Хаця +- ▁commendation +- ▁weiterhin +- ▁Franz +- ӗн +- phyl +- IS +- ▁bathroom +- ▁antoinette +- ▁circonstance +- ▁صحبت +- ▁자체가 +- ▁Friedrich +- ▁Stephen +- ▁jünger +- ▁Мала +- ▁만났 +- ▁prueba +- ▁일찍 +- ▁policies +- ▁diploma +- ▁Lewis +- ▁greenwich +- เอา +- ▁circa +- ▁Mö +- ▁velho +- 집이 +- ิก +- ▁Archi +- ભ +- 资 +- ▁glamour +- ಮಾ +- aveu +- ирован +- ▁quartermaster +- feier +- ▁strenge +- ▁가게 +- byumva +- наход +- ▁Zude +- ▁prescribe +- ▁됐어 +- viertel +- ▁হিসেবে +- ▁residing +- થી +- ▁linio +- ▁cubic +- piskop +- ▁재밌어 +- rzecz +- ▁mlima +- ▁presumptuous +- 員 +- 运 +- ▁taciturn +- ▁영상 +- ▁агульна +- ▁retract +- ▁왔어 +- ▁одно +- gosto +- ▁übernahm +- ▁Mlima +- rupp +- لەپ +- ▁nuova +- wählt +- ▁туман +- 줘야 +- እ +- ▁brougham +- ▁igikorwa +- ▁biographer +- ▁ukuboko +- ▁أي +- ▁zilizo +- ▁arbeitet +- ▁paddy +- ەیەکی +- keeping +- ▁zweimal +- ډ +- ▁twitched +- ▁reclining +- ▁για +- เดิน +- ▁tradesman +- ▁solicitous +- fuata +- ▁descrit +- wundi +- ▁chuckling +- ▁bedacht +- ▁balsam +- ubuntu +- ▁zanjye +- ▁knack +- ▁anselm +- ớ +- ▁agrikulturo +- enseignement +- ▁berkeley +- ▁scabbard +- ▁magnified +- 肉 +- ▁상황 +- Australie +- ▁poussa +- ▁indecent +- ▁frankfort +- ▁pollute +- ▁signifo +- ▁Сә +- ▁solamente +- ▁industrio +- ডা +- ▁Júlia +- ▁despicable +- ▁굉장히 +- ▁persevering +- ▁irksome +- ▁сообщ +- ▁resuming +- ▁merkwürdig +- ▁portentous +- ▁clemency +- ▁pantalon +- ▁esperantistoj +- ▁падобны +- ▁Dadurch +- ▁adultery +- ▁Hungara +- nnyini +- ▁artístic +- napatikana +- स्थ +- ▁tolerant +- มีความ +- ▁motley +- ӹр +- ▁شتێک +- тельно +- បាន +- ▁cinnamon +- ▁fröhlich +- ▁scribble +- ▁абсалютна +- 퇴 +- ▁mtoto +- evitch +- ▁habría +- scuttle +- ▁школа +- ▁помнік +- ificado +- ▁Maur +- ▁그날 +- ீர் +- ▁cragg +- ▁unprecedented +- ▁τον +- ▁insieme +- ▁gryce +- ▁그래가지구 +- examen +- ▁insistence +- خوان +- ▁aŭdi +- ▁accomp +- ▁pareció +- ▁fretful +- ▁licking +- ▁مردم +- ▁ejaculation +- ▁Кожны +- attività +- ▁maurits +- ▁vegetarian +- ▁Шулай +- ▁verkauft +- ოვ +- ▁winced +- ▁ліст +- ▁cyumba +- ▁subtlety +- ▁untold +- ▁Ofte +- ización +- ▁цела +- ▁размешчаны +- ▁jumble +- ▁detriment +- ต่าง +- ▁Rusio +- feito +- ▁venom +- ▁Früh +- ▁뒤에 +- ▁fréquent +- ▁opportune +- 配 +- ▁pudiera +- 鱼 +- ▁invading +- ভি +- liegenden +- ▁يول +- ▁ziemi +- gebroken +- ▁wrapper +- ▁festive +- ▁asunto +- ▁stessa +- ▁Ça +- міністра +- ▁promontory +- ▁unacquainted +- ▁undesirable +- ロ +- 步 +- ▁spectral +- ▁мэта +- авыя +- ▁сістэма +- ▁simultaneous +- ▁complimentary +- leuchten +- adulte +- ట్ +- ▁totdat +- لىرىنى +- ▁gupfa +- ӗр +- ▁dilute +- honnête +- ▁disloyal +- ▁gingerbread +- ▁نبود +- 周 +- ▁default +- ▁traddles +- fanyika +- ▁verbreitet +- ▁greedily +- ▁striding +- ▁памыл +- ▁proverbial +- ახ +- ▁meteorite +- ▁ubufasha +- ▁existeix +- ▁kinsmen +- ▁Nadal +- ▁wabivuze +- ▁tradició +- ▁dronk +- gymnasium +- ▁bismarck +- ▁complacently +- ▁undaunted +- ▁ниндәй +- 존 +- ▁vertrauen +- ▁ralston +- ▁surpriz +- ▁comunicació +- wakoze +- ▁linguist +- occasion +- ▁манын +- pflege +- ▁notebook +- ▁हैं +- ▁stelde +- emperador +- ▁deluded +- સા +- ▁channing +- ▁راست +- ▁redakt +- '70' +- кавыя +- ▁negotiation +- ضا +- behalten +- ▁inexpressible +- ▁журналіст +- ▁ندارم +- ▁بگیرم +- ▁epigram +- ▁cartwright +- ▁дажджы +- ▁Disney +- では +- ▁Georgia +- பதி +- ▁Hollywood +- ▁jacqueline +- ▁relieving +- ▁unmistakably +- ▁balustrade +- 咗 +- ▁spasmodic +- umukunzi +- 投 +- ▁planchet +- mateeka +- ▁coût +- ▁anthea +- ubukwe +- ▁toledo +- ▁reconstru +- ▁cornice +- цярп +- ▁Farb +- ช่วย +- 結 +- ▁Прычым +- ▁whirr +- ouille +- ▁Welche +- balwadde +- ▁деле +- ினார் +- isirayeli +- ញ +- ጥ +- ▁Salvador +- ▁усилия +- 幫 +- ▁überzeugt +- 시키 +- getrieben +- ▁labios +- ▁geografi +- ▁refine +- ▁রাজ +- ▁걔도 +- direktor +- ▁infano +- ▁glaubt +- จริง +- ▁abhorrence +- ▁disengaged +- ▁наогул +- 裡 +- ▁эмес +- ▁цэнтры +- ▁diagonal +- ▁dejection +- существ +- ▁knaap +- ▁receding +- ของเขา +- ِينَ +- ▁semblable +- ▁voorbeeld +- ▁compens +- ▁virusi +- stappen +- درس +- ▁sichtbar +- wigeze +- ▁líder +- ▁similaj +- gelli +- ▁ausgezeichnet +- ▁aŭskult +- ▁brilliance +- ▁néanmoins +- ▁undergrowth +- 睇 +- 博 +- ▁capacities +- ▁writhed +- ▁detroit +- ▁다녀 +- த்தான் +- ▁huld +- ▁dankbaar +- ரோ +- ▁roubles +- shakisha +- ▁verliet +- ▁рукі +- ▁München +- ▁retribution +- ▁umusatsi +- yanditswe +- ▁relevant +- ▁crusoe +- byokurya +- inyamaswa +- ভাবে +- ▁بدهید +- ▁arundel +- '22' +- ▁magpie +- ▁neutrality +- ▁Ruh +- corp +- ންނަ +- ▁gulden +- government +- ▁européen +- ▁Möglichkeit +- ▁birmingham +- ▁freqüent +- ▁hickory +- 雞 +- เงิน +- ▁melodrama +- ▁merveille +- ▁depraved +- ▁glazed +- ▁Ayɣer +- ▁ofschoon +- ▁seaweed +- ехалі +- ▁wielki +- ▁munich +- wände +- ئان +- ▁Bevölkerung +- ▁bugufi +- ▁rappresenta +- ▁Omwana +- ไหน +- ▁Друг +- ▁gemoed +- ▁sheldon +- ▁еҥ +- itangazamakuru +- ▁choc +- เสียง +- ▁vessant +- ▁occupé +- ▁بگو +- ▁churn +- ▁density +- gewinn +- ▁elevate +- ▁رنگ +- ▁그다음에 +- 립 +- 派 +- ▁negligent +- ▁brocade +- ▁Italy +- ▁нейкія +- icaye +- ▁stereo +- ▁Kugira +- ▁wakeful +- 蛛 +- ৎ +- ▁Lincoln +- ▁Venezuela +- ▁evaporate +- ▁eerbied +- ▁henderson +- ▁evacuate +- ▁jesús +- ▁bauern +- ▁беларусаў +- глыб +- ымаш +- fumbi +- صف +- ▁heimlich +- strafe +- ড় +- 접 +- ▁இவ்வ +- ▁beckoning +- ธรรม +- ▁cholera +- ▁embedded +- ▁பாட +- 委 +- ▁살짝 +- ▁snoring +- ▁Lleida +- ▁irving +- ▁тигән +- ານ +- ▁få +- ▁ranald +- ▁aviator +- 튼 +- myitwarire +- ሳ +- ▁appreciative +- ▁confiscated +- شخص +- プ +- ▁pharmac +- 儿 +- ▁rameses +- ▁camarade +- λά +- ▁burgess +- ▁Selbst +- ▁сін +- ▁assimilate +- ▁vlucht +- быз +- ▁толын +- ▁aspiration +- 이까 +- ▁makabila +- ▁neptune +- ▁pinocchio +- ▁unfailing +- ▁Science +- Itàlia +- 千 +- 举 +- 曲 +- ▁delacour +- ▁incentive +- ▁cuisine +- จัด +- ▁Ausbildung +- প্রা +- ▁가봤 +- ▁அரு +- sexual +- ▁bedeutende +- homicid +- ▁betsey +- ▁Neniu +- ▁Southern +- geschoss +- ▁올해 +- ▁famoso +- ▁کنه +- ▁Hifadhi +- ▁affluent +- ▁پاکستان +- 갑 +- ▁nitrogen +- ราย +- ▁재수 +- appartement +- பொருள் +- loque +- ▁voyageurs +- ▁எழுத +- ▁Vertrag +- นั่น +- ▁Seiten +- ▁ئالدى +- ▁reparation +- ▁georgie +- ubushakashatsi +- ▁регион +- ▁грошай +- 馬 +- 吉 +- ▁어쩔 +- ▁tebɣiḍ +- ▁gripping +- ▁presuppos +- ໍ +- atandika +- ▁resentful +- টো +- स्ट +- ▁vreemdeling +- Ĝ +- ತ್ತ +- ▁Künstler +- ▁requisition +- ▁миңә +- ▁volonté +- ▁பெரிய +- ▁omtrent +- ▁운전 +- ▁suffolk +- 時間 +- mitglied +- ▁vrijheid +- ▁muchacho +- ▁Commission +- ▁appena +- ▁беҙҙе +- ▁haastig +- לי +- ▁raconte +- ଆ +- ▁toutefois +- ▁Museveni +- ▁doivent +- ▁consonant +- ▁irgendeine +- ▁fumbling +- ▁astute +- ผม +- ▁bombay +- ▁diminu +- ▁gratuitous +- ewweḍ +- ▁mumkin +- ▁excelled +- kimara +- ▁musée +- length +- ▁valise +- ▁тэма +- ուն +- بیر +- ▁balancing +- ▁brunswick +- ▁immensity +- ▁insensibly +- ▁molasses +- ▁porcupine +- ▁vagrant +- 렌 +- ▁Benjamin +- ▁Saɛid +- ▁dediĉi +- ▁Gründe +- ▁polizei +- ▁verdriet +- ▁snob +- ▁ruefully +- ▁craze +- ▁Pedro +- ▁vlug +- ▁suprema +- သည့် +- ▁Google +- ▁disillusion +- ▁expulsion +- ▁hallucination +- ▁rivulet +- ▁coquette +- ▁zigzag +- ▁révolution +- ▁podobn +- ▁stimulant +- ிருப்ப +- ▁demeura +- ▁dormant +- ▁interrogation +- ▁subtil +- ▁башын +- ▁мур +- Î +- ▁Football +- ▁anticipating +- ▁resplendent +- ▁tributary +- ▁unoccupied +- ▁প্রথম +- ▁gayety +- 气 +- ្ល +- ▁portret +- ▁veranderd +- escenari +- อาจ +- ▁umugisha +- بور +- byaye +- gemeinschaft +- ստ +- вёў +- ▁Habita +- ▁mignon +- ▁religiosa +- ုံး +- मध्ये +- ▁despondency +- ▁imprudence +- ▁miliyoni +- 考 +- ఉ +- ▁Sometimes +- ▁utalii +- 构 +- ▁kapitän +- posizione +- ▁Meinung +- ▁보러 +- ▁много +- ▁pertany +- szczę +- ▁forsook +- ishusho +- ▁recrea +- ▁drückte +- ▁cuore +- ▁клуба +- ▁confuse +- ▁senyal +- ▁cincinnati +- ▁medelijden +- ▁midwinter +- 듯 +- 皮 +- ▁늦게 +- ▁эксперт +- ೆಯ +- opération +- ▁bonacieux +- ▁quoting +- ▁Scottish +- ▁прыйшоў +- ▁docile +- َرَ +- ▁tothom +- ▁implicit +- ▁Einzel +- ▁charred +- ▁страшна +- ▁stagger +- ▁compétition +- ▁eensklaps +- ▁inveterate +- ▁savannah +- ▁Daneben +- ▁якога +- ▁تغییر +- ▁moitié +- 記 +- ▁noisily +- ▁simeon +- ▁Magí +- ▁acquis +- ொரு +- เคย +- ▁geraldine +- ▁Corona +- 홍 +- dzę +- ▁күрә +- évêque +- ▁fumbled +- ▁humiliated +- ▁ocasión +- ▁repugnant +- ▁sacrilege +- ▁архітэктур +- ▁паўднёвы +- ควร +- ▁schüttelte +- ▁tomahawk +- katholisch +- ▁Schweiz +- ▁gleichzeitig +- ▁presiding +- '39' +- ▁constructive +- บุ +- เจ้า +- ▁subconscious +- ▁fecha +- груз +- ▁químic +- ▁Judit +- ▁கட்ட +- ಶ +- ▁tameṭṭut +- Illinois +- ▁Jewish +- ▁laundry +- ▁면접 +- 威 +- ▁combining +- ▁maneuver +- ▁reprezenta +- ▁같긴 +- ▁Profes +- ▁thrall +- ▁Ҡу +- ▁Проста +- ▁electronic +- ூர் +- แม +- Ҳ +- ଜ +- ▁Michigan +- ▁tragedies +- ▁Конференции +- ▁будынак +- ▁decorum +- ▁dressmaker +- ▁curiosities +- ▁emigration +- ▁risc +- ▁Чо +- 같 +- ganwa +- ▁기분 +- ▁junio +- ▁갔었 +- ဝ +- ở +- ▁demeanor +- ▁onslaught +- ▁sutherland +- ▁unforeseen +- ▁сэрца +- ▁geschiedenis +- ▁ئەگەر +- ▁impetus +- ▁हे +- ▁Carlota +- ▁crainte +- änderung +- ▁병원 +- ▁Among +- ▁pennies +- ▁gutsinda +- ταν +- ▁kirere +- ▁régime +- ▁الش +- araufhin +- ▁Gesellschaft +- ▁indústria +- ▁kaskazini +- ▁Мәҙинә +- 他们 +- ▁uchumi +- kitiibwa +- ▁ایران +- ▁минең +- ▁casualties +- ▁guzti +- ▁تنها +- ▁белән +- ▁істот +- ▁turc +- wandiko +- TA +- technic +- ▁connaît +- ▁executor +- शि +- နှင့် +- ▁persecutor +- փ +- ▁Microsoft +- язджа +- ▁pouvez +- ▁escapar +- heggi +- ▁overhaul +- ▁yanayo +- ▁ripened +- examination +- ▁skulk +- ▁طرف +- ▁créé +- throat +- 报 +- ▁incredibly +- 탕 +- ▁zärtlich +- ▁bellowing +- ▁orinoco +- ▁precedence +- ▁coloniz +- ▁zocht +- kukozesa +- முன் +- ଯ +- expérience +- shidikanya +- ▁insbesondere +- ▁recuerdo +- ▁uncompromising +- ▁كېيىن +- ▁descriptive +- ▁elfreda +- ▁insidious +- ▁مشکل +- ▁nombrosos +- ▁pennington +- เซ +- ▁가격 +- ▁coronel +- agaragaye +- ต์ +- ▁riveted +- ▁його +- ▁poppy +- assegura +- ▁Japon +- leistung +- ▁revenir +- ▁рэч +- алтыш +- держа +- ▁disreputable +- ▁mercantile +- 짓 +- ▁insinuation +- 知道 +- લા +- ▁Schloss +- esquerra +- misión +- ▁verdadera +- ▁Chemin +- ▁gurgle +- ▁애들은 +- ปี +- ▁Italien +- ίν +- ▁лёс +- ▁Pennsylvania +- ▁nutmeg +- ▁ostentatious +- 龍 +- ▁sorprend +- 超 +- 男 +- ▁гульні +- ▁ядерно +- ▁imputed +- ▁Feuer +- ▁paraissait +- ▁எழு +- নো +- ▁revue +- ▁трав +- ▁miranda +- рәт +- ▁ئۆي +- uvchi +- ▁buddhism +- ▁erklären +- ▁mémoire +- ▁wananchi +- ữ +- ▁미국 +- ▁oppressor +- ▁үҫ +- ▁propeller +- ▁Presenta +- '13' +- ▁pêche +- wrought +- 하겠 +- òleg +- ▁tombstone +- ებული +- mukunda +- ▁많았 +- ுகின்ற +- ▁Allerdings +- ▁discomfiture +- ూ +- ▁egotism +- န်း +- ▁scythe +- ▁ekspozici +- ▁maarufu +- musango +- ▁gauntlet +- ▁cunningham +- ▁보기 +- ▁unfavourable +- ▁احساس +- ▁ਵਿੱਚ +- 菜 +- 网 +- knüp +- ذلك +- ▁Girona +- ▁observatory +- ້າ +- espoir +- ▁lopez +- ▁teilweise +- ▁sparta +- ▁kaŝ +- ਼ +- ▁désormais +- ▁робіцца +- ▁Vinyet +- ▁برنامه +- 成为 +- ▁fretted +- ▁birthplace +- ▁caça +- ▁çok +- ▁ئەر +- ▁squalid +- 氣 +- ▁tenacious +- 眼 +- 黄 +- 条 +- ▁bracket +- ▁sociedad +- ▁amatwi +- ▁necesidad +- ▁fiesta +- ▁كىر +- چوو +- ▁fanned +- tausch +- yigira +- bɣi +- ▁Plusieurs +- 香 +- 效 +- ▁April +- ▁mundial +- ▁Karume +- 할까 +- ▁Nueva +- berri +- ▁vreeselijk +- ▁hierauf +- ▁aileen +- ▁قىز +- ҥгы +- ▁خون +- ձ +- ٻ +- ▁arkansas +- ▁করতে +- 阳 +- 如果 +- ▁Тудо +- әсәй +- ▁разумее +- ▁comarca +- ሆ +- ▁Cafodd +- ▁mercenary +- ▁дагэтуль +- ▁фота +- 決 +- 息 +- ▁friedrich +- ▁வைத்த +- די +- installation +- actiu +- ▁perturbation +- zaamheid +- ▁الص +- ▁Industrie +- 気 +- ▁galileo +- ▁plejparte +- 町 +- ▁unguarded +- ▁пӧрт +- ▁scorching +- 代表 +- ▁크게 +- ија +- ▁shunned +- ▁emitted +- ▁دۆ +- residencia +- ▁corredor +- ▁Amsterdam +- ▁anstataŭ +- ▁commodity +- ▁despondent +- ▁prostitution +- ▁Однако +- ▁অনু +- ▁እና +- ▁тиклем +- ▁pianoforte +- ▁daisies +- ▁recibió +- ▁먹었어 +- ▁referència +- ▁cadence +- ▁كېلى +- ▁trafik +- examine +- tariki +- ibagirwa +- küste +- ▁Fakte +- ▁unavailing +- ▁unprotected +- ▁незалежн +- 뉴 +- ชอบ +- ▁vandaag +- 房 +- ▁sneeze +- ിച്ച +- ▁kwinjira +- ▁torturing +- děl +- ▁erlaubt +- ▁coração +- ▁confiance +- ▁hummed +- ▁Alicio +- ▁왔다 +- ▁ferai +- ▁выхад +- ▁unendlich +- ▁걸어 +- ಚ +- ▁credulity +- ▁guillotine +- ▁중학교 +- ▁commencing +- ▁Pendant +- 论 +- ▁archives +- plasm +- mahirwe +- ▁kobiet +- ▁fehlen +- алтын +- против +- bafasha +- ▁garibaldi +- ▁rodriguez +- ▁josiah +- myitozo +- ▁kipps +- ▁Fahrzeug +- ▁deffir +- ▁Darüber +- ▁Ĉar +- ▁championnat +- ▁آدم +- ▁decorative +- huriye +- ▁ontario +- ▁urchin +- ҳои +- ▁Шә +- ▁consecration +- ధ +- ▁ўсю +- 致 +- ▁aylmer +- ▁símbol +- ▁формы +- ્યા +- ▁طول +- ▁gawaine +- ▁foreground +- ▁debout +- ▁kredas +- מו +- μπ +- ▁кид +- Escola +- ▁convulsed +- べ +- 标 +- 园 +- ▁Charlotte +- ▁motiva +- ▁danube +- ▁evangelical +- ▁가져 +- ές +- fric +- には +- ▁revoir +- ubwami +- 해봐 +- ▁hojas +- 축 +- ▁расійскі +- লী +- ▁immemorial +- ▁прамавугольн +- กล่าว +- ▁seizure +- ▁ointment +- ▁sultry +- ▁اینکه +- Alf +- ▁deftly +- ▁كېتى +- जे +- архі +- пові +- ujju +- ▁Howard +- เมือง +- ▁제주도 +- ▁Demokrat +- ▁귀찮 +- 論 +- ޓް +- inigeze +- ▁нәмә +- ▁cosmic +- ๆ +- ▁barcelona +- ▁binyuze +- ▁można +- 낮 +- ▁inestimable +- ▁lucifer +- absor +- ▁dundee +- ▁bovendien +- なく +- ▁encima +- ▁keizer +- ▁savior +- ▁repulse +- ერი +- ޗ +- ▁uncontrollable +- ▁песня +- 魚 +- 托 +- 照 +- ▁чакаць +- ▁mitjans +- ▁جدید +- förmig +- ▁lemonade +- वी +- onekana +- ▁promoting +- sicherheit +- ▁magistrat +- могай +- ▁سنگ +- ▁rebuild +- ▁كۆپ +- ជ +- ▁Städte +- ▁miscellaneous +- émotion +- ▁bietet +- sistir +- ▁있어야 +- arquitectura +- ▁Entscheidung +- ▁unbeliever +- ▁cucumber +- ▁znowu +- ▁untidy +- ▁exterminate +- биз +- mufasha +- ▁вылуча +- ▁granddaughter +- ▁ҡатын +- scribing +- ▁Kreuz +- Б +- ▁공부를 +- ▁exeter +- ್ರ +- ▁composite +- ɛeddi +- ▁scepticism +- چاق +- پەر +- addled +- креп +- ▁pueda +- ▁pessimist +- ▁propietari +- ▁sprinkling +- ▁ambiguous +- 登 +- ▁jsou +- ▁얘가 +- ▁좋다 +- isumbuye +- ▁hybrid +- kompani +- ▁kidney +- ässig +- czyć +- ▁podido +- ▁Point +- ▁Jung +- confidence +- ▁خودش +- ▁chapeau +- ▁leidenschaft +- สิ่ง +- ▁Менавіта +- эканамічн +- ▁annihilation +- ▁concierge +- ▁consummation +- ▁комплекс +- 곱 +- 體 +- 修 +- ▁numerical +- ▁gangway +- ▁전공 +- aeroport +- കാ +- stukken +- ▁Austria +- ▁бачу +- рҙар +- ▁reverted +- ▁exaggerate +- ั้น +- ▁impoverish +- Μ +- ▁trudged +- ▁Samuel +- ▁darrell +- ▁volition +- ▁priestess +- océan +- ▁아침 +- ▁blyth +- ▁whiff +- بخش +- ▁disciplina +- நாட்ட +- ួ +- ▁Tanganyika +- ▁thenceforth +- ▁behauptet +- 학점 +- ▁pekee +- выкарыстоўваецца +- ୍ୟ +- citizens +- ▁Pacific +- Empire +- emploi +- higgins +- ▁кенә +- ▁Republican +- ▁opaque +- ・ +- ഡ +- ứ +- ▁gepatroj +- ▁unprofitable +- 湖 +- ▁satirical +- ▁decisió +- ▁Liberal +- itorero +- verhältnis +- ▁банк +- incorpor +- ▁flauw +- গুলি +- ▁chauvelin +- ▁concurrence +- ▁onderzoek +- ▁delamere +- ▁음악 +- ալ +- ▁بولغان +- ▁다닐 +- ▁голас +- ▁macbeth +- ▁ordentlich +- kontroll +- ্ট +- solvent +- ▁flaunt +- قەت +- ▁이상한 +- ▁distribució +- ▁pietro +- రి +- ▁Sarah +- バ +- tamaduni +- ڳ +- ▁gekregen +- 粉 +- 切 +- 模 +- ▁Guerau +- ไฟ +- ▁vuol +- ▁справы +- ▁römer +- ▁enthalten +- ▁glutton +- ▁bonfire +- ▁vicarage +- ▁awning +- ▁lucian +- ▁Trump +- ▁Chap +- ▁pebble +- ▁Дарэчы +- 궁 +- ▁gobernador +- ▁verbaasd +- ▁دېگەن +- せ +- ▁cicely +- ▁modèle +- ▁apologetically +- ▁dobbin +- ▁skipped +- ▁adalah +- ogràfic +- ▁Britio +- ▁budding +- ▁горш +- ▁gloat +- ρό +- มือ +- ▁klage +- њ +- verständig +- ▁empresari +- schlüssel +- ▁bountiful +- ▁providential +- ▁Elektro +- ▁егет +- 索 +- ▁archibald +- ▁trophies +- ▁bluish +- ▁다니는 +- 였는데 +- ▁historiador +- ▁여기서 +- ▁Bwana +- qeɣ +- ▁poète +- ▁Mbere +- splitting +- พล +- δε +- ▁rooster +- ▁Book +- ໃນ +- ▁fuzzy +- ▁senhomiĝis +- ▁літаратуры +- ▁sustenance +- 台湾 +- ▁чыгун +- ▁হয়ে +- ▁pronunciation +- ▁greyhound +- ோர் +- ▁envelop +- ▁План +- práv +- eneral +- ▁discouraging +- ▁magnifique +- ▁decipher +- ▁imposible +- ▁verzocht +- ▁perverted +- ▁пісаць +- ▁chloe +- ▁ivanovitch +- cyclist +- ▁trolley +- ▁работу +- போய் +- ▁truppen +- ▁hindoo +- ▁bicaye +- ▁stärker +- ▁embarc +- haufen +- ▁Messi +- ▁раман +- ▁edziĝ +- 여서 +- ▁glorie +- ▁endémique +- ▁insupportable +- ▁unterstützt +- ubutabera +- ▁Quatre +- ▁ساخت +- ▁نفر +- ▁இரண்டு +- ▁gladiator +- beherrsch +- ▁ottoman +- ▁fakto +- ▁gedanke +- ▁کلی +- ński +- unternehmen +- característiques +- ▁поддержк +- ▁marjory +- ▁unterschiedlich +- ▁غذا +- ▁arbeid +- ▁housing +- ▁influencia +- ▁contemplative +- ▁동네 +- ▁молод +- ▁overhear +- kandidat +- ่าง +- ▁Шул +- ▁satisfac +- ▁blazon +- قانداق +- ▁britànic +- ▁dispelled +- ▁unseemly +- 견 +- autrefois +- iyemeje +- ாட்சி +- กลับ +- 社会 +- ▁bereikt +- ▁inverted +- ▁olympia +- ▁diffuse +- handkerchief +- ▁narrator +- казан +- ▁dilluns +- ▁gedurende +- ▁hubbard +- ▁unmolested +- ▁worcester +- 랐 +- ▁Karriere +- ▁futility +- ▁ĝardeno +- ▁mathilde +- ▁faubourg +- ကျ +- ▁condicions +- ▁தெரிய +- ▁alabama +- ▁behandelt +- ▁ibimenyetso +- իր +- ▁toleration +- ▁bemerken +- вания +- plätze +- ▁khaki +- ವಾ +- ▁enclose +- ubaha +- ▁osob +- ▁replacing +- 乡 +- 晚 +- ▁ddunit +- ▁alcove +- ▁évolue +- ▁quadrat +- ▁بله +- нюю +- ၏ +- Journal +- ፍ +- ▁Following +- ▁indiscreet +- ▁Можна +- vögel +- aleshores +- 室 +- актив +- ▁Kraft +- ▁foothold +- ▁가장 +- ▁malheureuse +- жение +- ▁kühn +- esmenta +- зілі +- ▁Fußball +- ▁более +- ▁advising +- ▁нізкі +- ▁ràpidament +- ▁soupçon +- ▁egiten +- amitié +- ▁Biblio +- ▁Ɣef +- ▁керек +- әсәк +- ▁метад +- ▁вуз +- ရာ +- stufe +- ▁bishoboka +- ▁Facebook +- ▁Funktion +- ▁depravity +- 李 +- 甲 +- ▁schrecken +- ▁petrified +- ▁gaudi +- ▁bulging +- gâ +- ▁parfum +- ▁ончал +- ▁Congress +- ▁foretell +- ▁economist +- ramutse +- ▁Renat +- ថ +- ▁acclamation +- สร้าง +- umuhanzi +- ▁gräfin +- ▁novembre +- 病 +- 风 +- ▁gibbon +- ▁nombroses +- ंड +- ▁obwohl +- ▁இருப்ப +- ▁europäischen +- ▁crucial +- ▁tristan +- 些 +- ▁presidi +- ▁күрен +- ▁чэмпіянат +- ▁aggression +- ▁compatible +- ▁immaterial +- ▁licentious +- ▁intusschen +- 权 +- ▁Alfred +- ▁mansoul +- ▁gewoonlijk +- écouta +- ▁нашага +- stärke +- ▁Baldaŭ +- ▁комп +- ▁suicid +- zugehen +- ္ +- ▁imbecile +- ▁многіх +- ▁спецыяліст +- ▁adolescent +- ▁dergleichen +- ▁despotic +- entrenador +- วด +- ▁gebrek +- ژە +- ▁Gemein +- ▁група +- ▁flughaveno +- ingénieur +- ใหญ่ +- ▁fabrizio +- ▁Stück +- ▁hortense +- ވީ +- ▁execu +- ▁throbbed +- ▁수능 +- 現在 +- ▁hinüber +- ▁fanaticism +- এ +- ▁Strecke +- ▁Karere +- ▁haraka +- квіт +- າຍ +- หัว +- ▁abstinence +- ▁inequality +- ▁многа +- 祖 +- ▁crevasse +- ▁Bulgari +- ▁Soviet +- ▁lavinia +- ▁mwalimu +- ▁strawberry +- ▁tinkling +- ▁Those +- ▁жыве +- десят +- sasula +- ▁hakurikijwe +- ▁будаўніцтва +- ▁상관없 +- 屋 +- ▁wafted +- ▁chakula +- ▁leeward +- ▁mascot +- ▁faßte +- ▁carácter +- ▁میشود +- ▁Думаю +- ▁undulating +- ▁wigwam +- ▁placard +- ▁fruktoj +- 没有 +- ▁yolks +- ▁туҡта +- ▁모든 +- ▁разгляда +- ▁mistook +- ▁innkeeper +- ▁башланы +- ▁mitigate +- போல் +- َعَ +- ▁montaña +- ▁habrá +- ▁flirtation +- ▁எனக்கு +- ቅ +- ▁embodiment +- ▁impregnable +- ▁thimble +- 徑 +- ▁verändert +- מי +- ▁imperio +- ▁prisonnier +- ▁належ +- ▁Japanio +- قص +- ▁antipathy +- 꼈 +- เครื่อง +- ▁empirical +- ▁sigurd +- ▁gukorwa +- ▁guhabwa +- ▁Antwort +- ▁suele +- ▁zinazo +- ▁tayri +- ▁mépris +- ▁보면서 +- ▁conosce +- ▁ofereix +- алізава +- ▁avanza +- beddel +- anyuze +- เสีย +- drž +- ▁incalculable +- ▁infatuation +- ▁reticence +- ▁genoemd +- ▁Taiwan +- ▁petulant +- लाई +- ្ត +- નો +- ▁emigr +- technisch +- 졌어 +- ▁avesse +- ▁Omusajj +- ▁stumm +- ▁Petr +- versammlung +- ▁contempl +- ြ +- lícules +- makimbirane +- ▁hacienda +- ▁irrepressible +- ▁mafaranga +- ▁obsequious +- ▁Ніхто +- 永 +- 計 +- 森 +- ▁spectrum +- ▁animaux +- ▁maanden +- ▁Pollando +- ▁gatanu +- ouverture +- ▁organisé +- ▁aumenta +- ▁princo +- ▁காட்ட +- ▁член +- ▁beliebt +- ▁malsano +- нню +- Anglaterra +- মধ্যে +- ▁unparalleled +- ▁дрэнна +- ▁galahad +- جێ +- ▁moistened +- ▁hallowed +- ▁даведа +- алогіі +- ▁conservatory +- ▁있긴 +- стэр +- ▁Luego +- ▁climatic +- ▁herzlich +- ▁eerlijk +- sinnig +- vuelve +- өнө +- ▁reçoit +- ഭ +- 㗎 +- ▁imputation +- ▁Hispanio +- 欧 +- gedragen +- ▁hispana +- hänge +- ▁Brücke +- апярэ +- ▁carreus +- ▁suceso +- ▁brittle +- πά +- ▁چهار +- Claude +- ▁religioso +- ଅ +- ▁dimecres +- பொழுது +- ▁seguretat +- ▁unheeded +- 低 +- ▁turnbull +- 担任 +- ▁draperies +- ▁хацелася +- ▁stürzte +- dzić +- Europe +- ึก +- wigisha +- ィ +- ខ +- ▁chipmunk +- ▁phronsie +- ▁sensuous +- ▁힘든 +- ▁ప్ర +- ▁espalda +- ▁erscheint +- ▁hungaroj +- ▁হয় +- ▁вӱд +- ▁놀러 +- prowad +- ▁steamship +- ▁entomb +- ▁officiel +- gratul +- ▁канцы +- знаў +- გან +- ▁ٿي +- yereka +- 힘 +- ▁gratuit +- ▁versteht +- ▁لباس +- ▁cudgel +- ▁barometer +- ▁inglese +- ނަށް +- ▁acclaim +- ▁optimism +- ▁چیست +- ▁bourgeoisie +- செய் +- twixt +- kulima +- ▁fortify +- ▁দল +- ম্প +- ▁Petit +- ▁gnome +- ပါသည်။ +- ▁Abatutsi +- ▁experiencing +- ▁omnipotent +- ▁існуе +- ▁стаіць +- ▁byanjye +- ▁buhoro +- ▁나올 +- нструкц +- ▁substitution +- ▁administrator +- ▁explode +- ▁außerordentlich +- ▁controversial +- ▁rouletabille +- ▁desselben +- 底 +- ▁malmulte +- ▁हु +- ▁stagnant +- ▁Außen +- ▁نگاه +- ▁València +- ▁выглядзе +- oficina +- ▁asocio +- ffordd +- ▁evasion +- ihuta +- ▁nachricht +- ульт +- ▁aufmerksam +- マ +- ▁aangenaam +- ▁appliances +- ṛeɣ +- ▁smeek +- ▁Sinshaka +- ▁verklaar +- organització +- 해가지구 +- Α +- ዎች +- ▁regeneration +- 빡 +- ▁Thank +- ▁debajo +- ต้องการ +- 唔好 +- ▁beaufort +- ▁carrots +- ▁sacrist +- ▁squatted +- ▁premiojn +- ▁frappa +- ▁авто +- accepta +- mererwa +- ໍາ +- profesoro +- ಆ +- ▁Mashariki +- ▁cavendish +- ▁divendres +- ▁Puerto +- 堡 +- ▁delvile +- ▁pungent +- sickness +- ▁facetious +- ▁brightening +- ▁młod +- ▁harcourt +- exerce +- ▁தோன்ற +- ساڭ +- ம்பு +- چین +- American +- ▁monaĥejo +- ▁expiration +- ▁quadruped +- 츠 +- ▁Lösung +- 律 +- 私 +- ▁prolific +- ▁shefford +- ▁effusion +- ▁três +- ▁tamurt +- ▁besaß +- ▁scarred +- ▁сыҡты +- adigan +- ▁ordain +- ▁சீ +- ikirere +- espérance +- ୀ +- ự +- ▁Acuɣeṛ +- ▁sorpresa +- ▁vicissitudes +- ▁entweder +- ▁alchemy +- 师 +- ▁lillian +- ▁athwart +- ▁typewriter +- ▁Yavuze +- 所有 +- ▁movable +- ▁hombro +- ▁établi +- ▁bereft +- ▁spook +- ▁Quart +- esclave +- ောင်း +- ▁தர +- rühren +- চি +- ▁ejerc +- 科学 +- ▁distemper +- byiringiro +- undzwanzig +- ▁Memorial +- ▁efficacious +- ▁equanimity +- ▁unsuspected +- verwaltung +- ▁Quixot +- ▁katolika +- ▁worauf +- ▁varietat +- ▁amherst +- ▁unbecoming +- ▁хлеб +- ▁hamsini +- ▁그걸로 +- ▁Olw +- ్ర +- ▁wechselte +- hirika +- ▁stifle +- ބާ +- europäische +- 혼 +- ▁morocco +- ▁protruding +- muwendo +- ▁lisbon +- สิ่งที่ +- ▁بدون +- ▁sleet +- ▁сімвал +- ▁filtr +- ▁один +- ޢ +- Índia +- ਨੂੰ +- ▁recapture +- ▁حو +- ▁establir +- 住喺 +- ▁overtuigd +- ▁figurative +- ▁heißen +- kkiriza +- ▁revint +- rócił +- insectes +- ▁Hamilton +- ▁Zwischen +- ▁preoccupation +- ▁rousseau +- ▁thackeray +- 쇼 +- ▁carnival +- ▁dimarts +- icyumweru +- ▁наведва +- ข้อ +- shyushy +- scritto +- noonye +- 젠 +- większ +- ▁pouvaient +- ▁unfavorable +- 观 +- ▁சோ +- دۇر +- ▁converge +- stärkt +- ходзіў +- ാണ് +- 준다 +- 깔 +- ▁nshingano +- ▁secrétaire +- ▁wszystkie +- ▁είναι +- ▁할머니 +- ▁pembroke +- ▁카톡 +- 맘 +- ▁يەنە +- läufig +- дзясят +- ▁vengono +- สาม +- ▁poesia +- ▁dowry +- wärme +- ոն +- ▁Бел +- ველ +- უნ +- ▁glaci +- ህ +- ▁clumsily +- ▁compunction +- ▁romianoj +- ▁гульня +- ▁lecoq +- 张 +- ▁헤어 +- Д +- 港 +- 將 +- 争 +- 似 +- 许 +- 쪼 +- ଶ +- ഇ +- ム +- 足 +- 링 +- 麻 +- 帝 +- ਡ +- 质 +- 隆 +- ӯ +- 불 +- ផ +- ភ +- 값 +- 種 +- 沒 +- ષ +- 贝 +- ဒ +- ഹ +- 职 +- 놔 +- 泰 +- 亲 +- 题 +- 탁 +- 覺 +- 牙 +- 염 +- 哈 +- 농 +- 防 +- ຸ +- 见 +- 推 +- 철 +- 松 +- ဂ +- 友 +- 版 +- 예 +- ず +- 讓 +- ዳ +- 렵 +- 播 +- 叫 +- 皇 +- 량 +- 침 +- 春 +- 尾 +- チ +- 蛋 +- ଡ +- 飲 +- 兵 +- П +- ଥ +- 组 +- 갖 +- ڙ +- 介 +- 诺 +- 样 +- 富 +- 护 +- 픈 +- ď +- 票 +- 蒙 +- 參 +- 别 +- 텐 +- 錢 +- 깝 +- 쓴 +- 척 +- パ +- В +- 宣 +- 꿈 +- 雨 +- 洋 +- ష +- 跟 +- 암 +- 连 +- 達 +- 旅 +- 섭 +- 鲁 +- 界 +- 除 +- 项 +- ắ +- 万 +- 历 +- 늦 +- ψ +- '&' +- ओ +- ಧ +- 辖 +- 察 +- 奇 +- 精 +- 久 +- ຖ +- 状 +- ờ +- 닭 +- 蕨 +- 규 +- 겁 +- Ա +- 餐 +- 련 +- 実 +- 记 +- 消 +- 嚟 +- Σ +- 황 +- 麵 +- 킹 +- ـ +- 対 +- グ +- 郡 +- ᱩ +- ޙ +- 쳤 +- 嘉 +- 授 +- '%' +- 務 +- 伦 +- 复 +- ஃ +- 率 +- 边 +- ઈ +- 良 +- 남 +- Ṛ +- ಇ +- ጋ +- 澳 +- 兴 +- ዋ +- 警 +- 咩 +- 設 +- 곡 +- 爱 +- ಎ +- 變 +- 麼 +- 효 +- 架 +- ニ +- 找 +- 蟹 +- 卫 +- У +- 航 +- 独 +- 声 +- 纪 +- എ +- శ +- デ +- ဲ +- ሚ +- 素 +- 强 +- 豆 +- 액 +- 완 +- Ӱ +- 梅 +- 톡 +- ഴ +- 껴 +- 먼 +- ໂ +- 內 +- 竹 +- 報 +- ャ +- 奖 +- 击 +- 愛 +- ଷ +- 朗 +- ଧ +- 创 +- 쎄 +- 例 +- 塘 +- 難 +- 훈 +- 施 +- 닌 +- 認 +- 企 +- 跑 +- 姆 +- ਧ +- 寺 +- ቶ +- 济 +- ធ +- ஈ +- 停 +- 装 +- ほ +- ໄ +- ウ +- 널 +- メ +- 另 +- ӳ +- ך +- 问 +- 덜 +- 給 +- ଭ +- 莫 +- 营 +- 믿 +- 強 +- 返 +- 隻 +- 决 +- 俄 +- ഉ +- ၊ +- 恩 +- 깜 +- 瑞 +- ռ +- 迪 +- 注 +- ಥ +- 連 +- 办 +- 덕 +- 턴 +- 笑 +- 库 +- 롤 +- 典 +- ਫ +- ઇ +- 姐 +- 흥 +- 낼 +- 假 +- 떡 +- ẽ +- 升 +- 紅 +- 총 +- 伯 +- 離 +- ኛ +- ޤ +- 曼 +- ៉ +- 꿀 +- 佛 +- ဟ +- ュ +- 较 +- ճ +- 닥 +- 뿌 +- ឹ +- 害 +- 席 +- 艺 +- キ +- 從 +- 热 +- Ž +- ణ +- 習 +- 酒 +- 农 +- ŷ +- ಭ +- 亞 +- 媽 +- 엄 +- ោ +- ặ +- 꿔 +- 红 +- ঙ +- 끔 +- ښ +- 待 +- 짤 +- ሮ +- 쌓 +- 웬 +- 試 +- 丁 +- 選 +- ᱟ +- ఆ +- ଳ +- 唐 +- 뻐 +- 鬼 +- 啊 +- ű +- ొ +- 術 +- 興 +- 잤 +- 承 +- ఇ +- 벽 +- 끌 +- 컨 +- 喜 +- 낸 +- 득 +- 背 +- Ĉ +- 엽 +- ږ +- 耳 +- భ +- ൊ +- 督 +- 控 +- 准 +- 셋 +- 數 +- 止 +- オ +- 찰 +- 열 +- ồ +- 렇 +- ブ +- ፈ +- 믄 +- 條 +- 롱 +- 什 +- 差 +- 述 +- 圍 +- 夜 +- 顿 +- 並 +- 戏 +- 쌍 +- 句 +- 温 +- 范 +- છ +- 探 +- サ +- 守 +- 拍 +- 跌 +- ຢ +- ਭ +- 善 +- 据 +- ઉ +- ጣ +- 树 +- 쟁 +- 녀 +- 람 +- ഫ +- ຽ +- 帶 +- 令 +- 馆 +- 雪 +- 賣 +- ୟ +- 测 +- ሊ +- 狗 +- 副 +- 备 +- 给 +- ჰ +- 즐 +- 突 +- օ +- 盟 +- Ε +- ‍ +- 橫 +- 혜 +- 統 +- 医 +- Ṣ +- 뜻 +- 协 +- 层 +- 玩 +- ţ +- 막 +- 乌 +- 话 +- 孩 +- 丹 +- 射 +- 离 +- 녹 +- 陆 +- 试 +- þ +- ൈ +- 튀 +- 庭 +- 浪 +- 뻔 +- ଚ +- 組 +- 銀 +- へ +- 宁 +- ည +- 켜 +- 樂 +- 創 +- ՝ +- 處 +- ժ +- 终 +- 且 +- 坐 +- 臺 +- 헤 +- ሎ +- 双 +- 팬 +- 졸 +- 랜 +- 午 +- ビ +- 写 +- 短 +- 证 +- Í +- 념 +- 萬 +- 让 +- 涌 +- 單 +- 飞 +- ੍ +- ゃ +- 沃 +- 季 +- 싱 +- 쭉 +- ợ +- 几 +- ჭ +- 楼 +- 植 +- 빈 +- 耶 +- 志 +- 极 +- 깐 +- 转 +- ஸ +- 橋 +- 弟 +- 响 +- ଇ +- 觀 +- 移 +- 封 +- 夏 +- 划 +- 瓜 +- 費 +- 켓 +- 均 +- የ +- 轉 +- 藏 +- 君 +- 솔 +- 싼 +- హ +- ഥ +- 観 +- ါ +- 怎 +- 船 +- 随 +- 푸 +- 云 +- 랬 +- 島 +- ừ +- 章 +- 微 +- 맡 +- 權 +- 聲 +- 环 +- 풍 +- 養 +- Ú +- ଉ +- Đ +- 骨 +- ଛ +- 端 +- ں +- 털 +- ៃ +- 既 +- 爸 +- 虫 +- 录 +- 겹 +- 뀌 +- 롯 +- 親 +- 干 +- 评 +- 仍 +- ਥ +- 뿐 +- 確 +- 婆 +- 잉 +- 師 +- 約 +- エ +- 邦 +- 웨 +- ೈ +- ॅ +- 炸 +- 義 +- 埔 +- ໃ +- 送 +- ђ +- 规 +- 폭 +- 洪 +- 終 +- 빙 +- 錯 +- 桥 +- 户 +- ฤ +- 休 +- 革 +- Η +- 奶 +- 屯 +- ๋ +- 땡 +- 苦 +- 含 +- 调 +- ሩ +- 華 +- ょ +- 質 +- Ọ +- 散 +- ڌ +- Γ +- 核 +- 冷 +- Ә +- 蒂 +- 칼 +- 辦 +- 急 +- ổ +- 냉 +- 银 +- 免 +- 発 +- 续 +- 吸 +- 類 +- 롭 +- 值 +- 冠 +- 习 +- 炒 +- 众 +- 么 +- 款 +- 遗 +- ሁ +- 細 +- 费 +- 唯 +- 旗 +- 若 +- 哋 +- 胡 +- 멘 +- 張 +- 坦 +- 슷 +- 슈 +- 엑 +- ぎ +- 埋 +- 宋 +- 降 +- 킨 +- ޚ +- ᱮ +- 調 +- 視 +- 婚 +- 럴 +- 编 +- 右 +- 帕 +- 增 +- 탔 +- 홀 +- 產 +- 互 +- 넓 +- 킬 +- 抗 +- む +- ݙ +- 샤 +- 摩 +- ຶ +- څ +- 螺 +- 尚 +- 刻 +- 盛 +- ଁ +- ኖ +- 떼 +- 죄 +- 透 +- 围 +- ੜ +- ቱ +- 網 +- 冰 +- 呼 +- आ +- Ò +- ঝ +- 징 +- 聞 +- 館 +- 黎 +- 許 +- 予 +- 细 +- ナ +- 雄 +- 概 +- ៅ +- 備 +- 飛 +- 肯 +- 糕 +- 젤 +- ご +- 搭 +- ၀ +- 咪 +- ダ +- 規 +- 흔 +- ݨ +- ङ +- 岸 +- আ +- 玉 +- 股 +- 율 +- 樓 +- ዝ +- 謝 +- 雲 +- 굴 +- 畫 +- 礼 +- 똑 +- 즌 +- ね +- 延 +- 继 +- 陈 +- Δ +- 健 +- 熱 +- 序 +- 显 +- 較 +- 酸 +- 救 +- ဉ +- 緊 +- 读 +- 弗 +- 兒 +- 團 +- ̍ +- 눠 +- 냥 +- ズ +- 準 +- 盾 +- ቃ +- び +- 숨 +- 콩 +- Г +- 认 +- ቢ +- 衣 +- 侧 +- 앉 +- ̣ +- 破 +- 딜 +- 號 +- 꽃 +- 흐 +- 鐘 +- 址 +- 쓰 +- 泊 +- 菊 +- 夠 +- 退 +- 责 +- 옆 +- 乜 +- 仁 +- ŋ +- ٔ +- 番 +- 隊 +- 닝 +- 軍 +- 词 +- 컴 +- 毒 +- ሲ +- 슨 +- ऊ +- 極 +- 職 +- 렀 +- ェ +- 轻 +- ຜ +- 紫 +- 燒 +- 滿 +- 宫 +- 媒 +- 앙 +- 컬 +- 適 +- 追 +- 异 +- げ +- 씀 +- 策 +- 谷 +- ョ +- ฆ +- 洗 +- 判 +- 攻 +- ዊ +- 韩 +- 仲 +- 采 +- ឡ +- + +- 標 +- 廣 +- 雅 +- 负 +- 蝶 +- 吧 +- 末 +- 練 +- 營 +- 板 +- 佬 +- Ó +- 付 +- 될 +- 醫 +- 庆 +- 确 +- ќ +- 远 +- ઝ +- 際 +- 遊 +- 盖 +- ዘ +- 赤 +- 泽 +- 纽 +- 归 +- ポ +- 콜 +- 籍 +- 享 +- 則 +- 델 +- 압 +- 콘 +- 署 +- 矛 +- 舰 +- ଂ +- 亡 +- 材 +- 杀 +- င +- 宝 +- 遇 +- 旺 +- 童 +- 茨 +- 똥 +- 뚫 +- 쌀 +- 乾 +- 黃 +- 몬 +- 씻 +- 映 +- 赫 +- 态 +- ଖ +- 걘 +- 벤 +- 춤 +- 申 +- ጊ +- 俾 +- 供 +- 批 +- 征 +- 奈 +- ಒ +- ᱱ +- 앤 +- 們 +- 益 +- எ +- 령 +- 遠 +- 寫 +- ఎ +- 汽 +- 왠 +- 폴 +- Ҫ +- 忙 +- ミ +- 菲 +- 낀 +- 哲 +- ः +- 랄 +- ઘ +- 优 +- ঞ +- 專 +- 荃 +- 럭 +- 캠 +- 維 +- 률 +- 봄 +- 웹 +- 폐 +- 届 +- 舅 +- 隔 +- 럼 +- ķ +- 깊 +- 덴 +- Մ +- ឺ +- 墓 +- 溪 +- ネ +- 舞 +- ఫ +- 努 +- ツ +- 뮤 +- 痛 +- 驻 +- 杨 +- ử +- 옥 +- Ĵ +- 秘 +- 續 +- 刺 +- 仙 +- ঢ +- 엘 +- 객 +- 岩 +- 쁠 +- 艾 +- Ż +- థ +- 볶 +- Հ +- 怕 +- 答 +- 池 +- ベ +- 藤 +- 눌 +- 葵 +- $ +- 랩 +- セ +- 뷰 +- 智 +-  +- 짱 +- ڀ +- 爆 +- 符 +- 룸 +- 윤 +- 템 +- 혈 +- 充 +- 积 +- अ +- 咀 +- 땅 +- 벨 +- 泡 +- ᱢ +- 켰 +- 血 +- 丽 +- ዲ +- 柏 +- 삭 +- ٺ +- 捷 +- 卷 +- 댓 +- 姓 +- 딘 +- 餅 +- 聯 +- 激 +- 齊 +- 杜 +- 変 +- 慢 +- 討 +- 헐 +- 默 +- ケ +- 兜 +- 泳 +- 眾 +- 冬 +- 宜 +- ฎ +- 剛 +- 津 +- 聚 +- 멍 +- 唱 +- ۍ +- 벗 +- 셀 +- ះ +- 峰 +- 홉 +- 卢 +- 韦 +- 巨 +- ০ +- ጉ +- 尖 +- 咖 +- ؛ +- 릿 +- ੌ +- 節 +- 妳 +- 컵 +- ዜ +- 傳 +- 乎 +- 某 +- Ё +- ऱ +- 텔 +- ሞ +- ڄ +- 놈 +- 엠 +- 刘 +- 優 +- ソ +- 穆 +- 僕 +- ऑ +- Ś +- 雀 +- 坑 +- 潮 +- 轨 +- 拜 +- 满 +- 验 +- 쟤 +- 픽 +- 载 +- 杰 +- ఒ +- 嶺 +- ᱚ +- 낭 +- 荷 +- 貢 +- 迎 +- 谢 +- 暗 +- 삶 +- 總 +- 幕 +- 价 +- 吞 +- Έ +- ၁ +- 趣 +- ଼ +- ỏ +- 밍 +- 벅 +- 借 +- 牌 +- 릉 +- ሌ +- 甘 +- 鐵 +- 튜 +- 價 +- 翼 +- 哪 +- Ս +- ័ +- 댄 +- 圆 +- ቦ +- 倒 +- 歡 +- 援 +- 珠 +- 豬 +- ぐ +- 堅 +- 균 +- 沿 +- 糖 +- 避 +- 摄 +- 알 +- 関 +- ハ +- ኑ +- 펜 +- ಫ +- 桃 +- 丰 +- 輕 +- 灯 +- 滑 +- 헌 +- 妹 +- 描 +- 贵 +- 腳 +- 協 +- 暴 +- 毕 +- ዛ +- 井 +- 積 +- 춘 +- 秀 +- 难 +- 杯 +- 灵 +- ボ +- 葡 +- 钱 +- ピ +- 엉 +- 穿 +- 途 +- ಉ +- 負 +- Ն +- 잃 +- ᱠ +- 断 +- 震 +- 闻 +- 拥 +- இ +- 紀 +- 豪 +- 삐 +- 枝 +- 鸟 +- 홈 +- 孙 +- ף +- 춥 +- 睡 +- 旋 +- 態 +- 药 +- 擔 +- 舊 +- 罪 +- ủ +- 챙 +- ਯ +- 柱 +- 漫 +- 麦 +- 胜 +- ဗ +- ģ +- ჟ +- 섞 +- ằ +- 薄 +- 旁 +- 严 +- 静 +- ៊ +- 빛 +- 導 +- 畔 +- 幸 +- 朱 +- ሽ +- 況 +- 걷 +- 싹 +- 흘 +- 疑 +- 余 +- 藓 +- 缺 +- 预 +- ኮ +- ガ +- 频 +- 虎 +- 桶 +- 錄 +- 題 +- 乘 +- 掉 +- 操 +- 靠 +- 监 +- 構 +- ៍ +- ฬ +- ぶ +- 萄 +- ਏ +- 껄 +- 닫 +- 뒷 +- 占 +- 딱 +- ዚ +- 陵 +- 좌 +- 略 +- 殿 +- 须 +- 皆 +- 햄 +- 례 +- 养 +- ጀ +- ᱫ +- ഒ +- 翠 +- 誰 +- 蘭 +- 찜 +- 喇 +- 阶 +- 芬 +- 圖 +- 쏘 +- 촬 +- 柳 +- 둥 +- 힌 +- 役 +- 旧 +- Ì +- 恐 +- 染 +- 析 +- 检 +- ዓ +- ዙ +- 束 +- 兹 +- 楚 +- 鄉 +- ঃ +- 밝 +- ฑ +- 預 +- 単 +- ៀ +- ỉ +- 곧 +- 昂 +- 孔 +- 洞 +- 厂 +- 软 +- 圾 +- 垃 +- Қ +- 腹 +- 盘 +- 弹 +- 섬 +- 滨 +- 宇 +- ペ +- 齐 +- 甚 +- 灰 +- ။ +- ޞ +- 齋 +- 묻 +- 틱 +- 佐 +- 盤 +- 御 +- 讲 +- ୂ +- 寄 +- 彩 +- 證 +- 餸 +- 醒 +- 큐 +- 聖 +- 危 +- 菌 +- அ +- ঐ +- 낳 +- Җ +- ళ +- ひ +- モ +- 泛 +- 请 +- 힐 +- Ł +- 粒 +- 顺 +- 辑 +- 鼠 +- 덩 +- 辛 +- Э +- ฐ +- ዎ +- 項 +- 넷 +- 첨 +- 팩 +- 露 +- 吴 +- ́ +- 庙 +- 臣 +- 郎 +- 症 +- 混 +- Β +- ሜ +- 桑 +- ಖ +- 搞 +- 꿨 +- 須 +- 敦 +- 歴 +- 願 +- 隶 +- 腊 +- ᱾ +- 탑 +- 撞 +- 简 +- 밴 +- ጎ +- 舉 +- 廷 +- 觉 +- 佳 +- 鹿 +- ឆ +- ワ +- 흰 +- 淡 +- 執 +- 幼 +- 迫 +- 顶 +- 仅 +- 磡 +- 泉 +- « +- ዶ +- ፊ +- 꽂 +- 떴 +- 툰 +- 羅 +- 昌 +- 坡 +- 帮 +- ሃ +- ቡ +- 칭 +- 妻 +- 获 +- 꼐 +- 珍 +- ٠ +- 驗 +- 슴 +- 仕 +- 吓 +- 乙 +- 董 +- 땠 +- 몽 +- 엇 +- 困 +- 佩 +- 艇 +- 違 +- 順 +- 锦 +- Ν +- 劳 +- 晋 +- 핀 +- 戈 +- 課 +- 괴 +- 둔 +- ֆ +- ഞ +- 码 +- 势 +- 横 +- 週 +- ১ +- 랙 +- 握 +- 乳 +- 斗 +- 炮 +- Х +- 唇 +- 끗 +- 램 +- ഏ +- 盡 +- 秋 +- 络 +- 緩 +- 칸 +- 鮮 +- 渡 +- 崎 +- 套 +- 败 +- 梁 +- 납 +- 휘 +- 浙 +- 荣 +- 兄 +- 熙 +- 簡 +- 壁 +- 農 +- 压 +- 々 +- 異 +- 鞦 +- 틴 +- 厅 +- 融 +- 霍 +- 翻 +- 낙 +- 韆 +- 꼴 +- 컸 +- 핫 +- 斷 +- ホ +- 况 +- 轮 +- 识 +- 뜬 +- ° +- 湯 +- Ṭ +- 톤 +- 浮 +- 産 +- 汗 +- 临 +- 兼 +- ၍ +- 렁 +- 忍 +- 犯 +- 迷 +- З +- ૃ +- 협 +- 尊 +- 甜 +- 또 +- 銅 +- 꼼 +- 측 +- 탐 +- 怪 +- 折 +- 畀 +- 跳 +- 響 +- 働 +- 残 +- 玛 +- Ф +- 換 +- ڇ +- 喎 +- 貴 +- 팡 +- 퀴 +- 咸 +- 頂 +- 逃 +- 附 +- 按 +- 뼈 +- 찐 +- 缽 +- 宿 +- 勝 +- 粗 +- 깃 +- 盪 +- 領 +- 魏 +- 纲 +- 섯 +- 護 +- 亮 +- 需 +- 깎 +- ಈ +- ቻ +- 讀 +- 驚 +- 抽 +- 鑼 +- 熟 +- 输 +- 啡 +- 썰 +- 傷 +- 訊 +- 陣 +- 紙 +- 忘 +- 伴 +- 呈 +- 执 +- 蛇 +- 没 +- 凤 +- ዕ +- Ե +- ਘ +- ᱤ +- 捕 +- 奏 +- 际 +- 勢 +- 惠 +- Щ +- ᱧ +- 禮 +- ᱛ +- 뒀 +- 庄 +- 爵 +- ୱ +- 晏 +- ఈ +- ኒ +- 喝 +- 낄 +- 혔 +- 瑟 +- „ +- 앨 +- 暖 +- 遭 +- 距 +- 囉 +- 弱 +- 换 +- ቤ +- 꽁 +- 팠 +- 놀 +- 혹 +- 扎 +- ጅ +- 亂 +- ऐ +- ዱ +- 샵 +- ቁ +- ゲ +- 닮 +- 液 +- Ј +- 裝 +- 説 +- ዩ +- 컷 +- 祥 +- 床 +- 袋 +- 雖 +- 圈 +- 琴 +- 贡 +- ノ +- 끓 +- 階 +- 蝦 +- 鲜 +- Բ +- 렴 +- 嘴 +- 짬 +- 冲 +- 긍 +- ቆ +- 턱 +- 庫 +- 復 +- 芒 +- 財 +- 徒 +- 冈 +- ギ +- 杉 +- 刀 +- Õ +- 偏 +- 릇 +- 钟 +- 製 +- 掌 +- ଠ +- 啱 +- 陽 +- 핸 +- '|' +- ଫ +- 덤 +- 쿨 +- 蓝 +- 售 +- 楽 +- 納 +- 빤 +- 밑 +- 級 +- 낚 +- 吕 +- 夢 +- 戲 +- 针 +- ቹ +- ኪ +- 劃 +- ゆ +- ヤ +- 练 +- 献 +- 겸 +- 뚱 +- ざ +- ៏ +- 寶 +- ฒ +- 厄 +- 柴 +- 脉 +- 廳 +- Є +- ਝ +- 끈 +- 얀 +- 樹 +- 腦 +- ፋ +- 쿄 +- ぬ +- Ԑ +- 磅 +- 앴 +- 據 +- 補 +- 烈 +- 続 +- 固 +- 讯 +- 枪 +- 硬 +- ဏ +- ᱭ +- 갚 +- 斑 +- 腐 +- 胞 +- ဇ +- 펴 +- 昨 +- 晒 +- 載 +- 脑 +- 篇 +- 尤 +- 苔 +- Կ +- 멜 +- 碼 +- 戴 +- 丝 +- 招 +- 险 +- 奉 +- 羽 +- 侯 +- ቋ +- 戰 +- 흡 +- 沉 +- ቲ +- 뚜 +- 恒 +- 湾 +- 堆 +- 逐 +- 绝 +- 잘 +- ሱ +- 쩍 +- ጨ +- 姑 +- 隨 +- 勤 +- 袭 +- 繁 +- ァ +- 乔 +- 찔 +- 函 +- 涅 +- 吹 +- 赖 +- 悪 +- 弄 +- 殖 +- 研 +- 侵 +- ሬ +- 岡 +- 葉 +- ၂ +- ẫ +- 責 +- 덮 +- 粥 +- 歩 +- 燈 +- 挑 +- உ +- 잖 +- ڃ +- 뽀 +- 혁 +- Я +- 闆 +- 覚 +- 갠 +- 뺐 +- 얇 +- 測 +- 梯 +- 诸 +- অ +- 잊 +- 貨 +- 厚 +- 欢 +- 补 +- ザ +- 갤 +- 棒 +- 爭 +- 绿 +- 雙 +- 裁 +- 臨 +- ਛ +- 患 +- 禁 +- 閉 +- 迁 +- 错 +- 抵 +- ኤ +- 葬 +- • +- 評 +- 땄 +- 벼 +- 哭 +- 漢 +- 坊 +- 适 +- 犹 +- 蜜 +- 宾 +- 够 +- 镜 +- ѓ +- 棋 +- 趕 +- 減 +- 诗 +- 《 +- 疫 +- 밖 +- 苣 +- 靖 +- Ê +- ഈ +- 阪 +- 涉 +- ፡ +- ० +- 왼 +- ञ +- 藥 +- 》 +- 寒 +- 怀 +- 랫 +- ٰ +- 닦 +- 卒 +- 邻 +- 莲 +- Ӹ +- ഖ +- 振 +- 歷 +- 凯 +- 杂 +- 牧 +- 禾 +- 椒 +- ഓ +- 팝 +- 坪 +- 汁 +- 肚 +- 齿 +- 释 +- 鳞 +- ኔ +- 곰 +- 뚝 +- 띠 +- 씹 +- 잇 +- ృ +- ሙ +- 貼 +- 熊 +- 甸 +- 豐 +- 莎 +- ፕ +- 屈 +- 聊 +- 泥 +- ١ +- ୃ +- 広 +- 댔 +- 빅 +- 짠 +- Ř +- ሀ +- ゴ +- 顧 +- 麥 +- 坚 +- 欲 +- ሻ +- Ҙ +- 濟 +- 訴 +- 担 +- 羊 +- 売 +- 経 +- 障 +- 묘 +- 替 +- ଓ +- ൺ +- ሶ +- ‟ +- 븐 +- 릭 +- 巧 +- 様 +- 累 +- 绍 +- 裏 +- 迹 +- 块 +- 听 +- 跨 +- 盐 +- 茂 +- 叙 +- 勸 +- ৷ +- 岁 +- 묵 +- 칙 +- 耐 +- 训 +- 扶 +- 脱 +- 培 +- 溶 +- 购 +- 徽 +- 겉 +- 敢 +- 欣 +- 顔 +- 买 +- 墨 +- 挥 +- 鳍 +- 획 +- 夺 +- 竟 +- 겪 +- 둬 +- 숭 +- 쫄 +- 뜯 +- 择 +- 샘 +- 愿 +- ൃ +- 租 +- 舍 +- 尽 +- 疗 +- Ό +- ፓ +- 煮 +- 뀐 +- 估 +- 紧 +- ኢ +- 묶 +- 캡 +- 檢 +- 醬 +- 繼 +- 翰 +- 巡 +- џ +- 価 +- 녕 +- 殺 +- 涼 +- 溫 +- 伤 +- 阻 +- Θ +- 꾼 +- 뽕 +- 碗 +- 読 +- 鼓 +- 勞 +- 逆 +- 毎 +- 絕 +- 뷔 +- ኝ +- 랭 +- 흑 +- Λ +- 桂 +- 徐 +- 竞 +- 敏 +- ሄ +- ỗ +- 땀 +- 寿 +- 扮 +- 穴 +- ଘ +- 랍 +- 붕 +- 諗 +- 搬 +- 畢 +- 筆 +- ఏ +- ฏ +- ぞ +- づ +- 贞 +- 昭 +- 崇 +- 径 +- 氧 +- Ĥ +- 犬 +- 귤 +- 썹 +- 尋 +- 促 +- 尺 +- 証 +- 械 +- 墙 +- 郊 +- ໆ +- 層 +- 렉 +- 锡 +- 契 +- 験 +- 箱 +- 긋 +- ሕ +- Ի +- 芝 +- 絲 +- 宅 +- 阵 +- 逊 +- 춰 +- 쪘 +- 乱 +- 敬 +- 赵 +- 쭈 +- 疾 +- 頓 +- 笔 +- 矿 +- 込 +- 늙 +- 닿 +- 힙 +- 狀 +- 碑 +- 梦 +- 财 +- 辐 +- 렛 +- 촉 +- 팟 +- 潜 +- 伸 +- 陶 +- 箭 +- 措 +- ឋ +- 础 +- ̀ +- 蚊 +- 偶 +- 헷 +- 蚤 +- 郑 +- 勇 +- 奴 +- 召 +- 税 +- 眠 +- ễ +- 꽉 +- 냅 +- 펙 +- 荔 +- 輸 +- 叔 +- 覆 +- 宏 +- 歲 +- 浦 +- 焦 +- ቴ +- ዮ +- 앱 +- 좁 +- Դ +- 範 +- 採 +- ォ +- 闭 +- 絶 +- 稱 +- 炎 +- 藻 +- 욱 +- 剑 +- 慣 +- 漏 +- 帰 +- 裂 +- 衡 +- 涯 +- ፖ +- 傾 +- ဦ +- ጃ +- 衰 +- 쉴 +- 쩐 +- 灭 +- 燕 +- 薯 +- ץ +- 뇌 +- 辽 +- 孟 +- ጓ +- 쫙 +- 埗 +- 漸 +- ೃ +- 秦 +- ጫ +- 귄 +- 뺄 +- 웅 +- 쨌 +- 캔 +- 嘛 +- 鳥 +- 污 +- 卻 +- 幅 +- 插 +- 筒 +- 灾 +- 忠 +- 疆 +- 戦 +- 릎 +- ጭ +- ヒ +- 曬 +- 榮 +- 塊 +- 晶 +- 遍 +- 课 +- 宪 +- 仪 +- 嚴 +- 殊 +- 댕 +- 띄 +- 샐 +- 蔡 +- 废 +- 译 +- 赞 +- 顾 +- 鞋 +- 碧 +- ሥ +- 뺏 +- 쫓 +- 究 +- 魔 +- 隐 +- 颜 +- 慧 +- 멋 +- ǹ +- 긁 +- 룹 +- Χ +- 讨 +- 誉 +- 応 +- 賺 +- 鴨 +- 갓 +- Գ +- 戶 +- 側 +- 妇 +- 險 +- 듬 +- 靜 +- 苗 +- 邨 +- 織 +- '*' +- ኩ +- 맹 +- 썸 +- 쵸 +- 톱 +- 昆 +- 猪 +- 鼻 +- 陸 +- 鏡 +- 饰 +- 娘 +- 榜 +- 磁 +- 却 +- Ά +- Ӧ +- ሴ +- 썩 +- 溝 +- 衝 +- 壞 +- 屏 +- 销 +- ຣ +- ϊ +- 덥 +- 爾 +- 傅 +- 篮 +- 货 +- 汇 +- ૌ +- 폼 +- 廠 +- 缘 +- 曹 +- 獲 +- 仓 +- 颁 +- 雕 +- 臭 +- 審 +- 뱅 +- 沖 +- 亨 +- 触 +- 郭 +- 肥 +- ຟ +- 굽 +- 쌌 +- 藝 +- 丘 +- 狼 +- 赏 +- ஆ +- 灘 +- 叉 +- ૉ +- 靈 +- 껌 +- 흠 +- 绪 +- 샌 +- 剩 +- 蒸 +- 娜 +- 拔 +- ឱ +- 蹄 +- ઠ +- ௌ +- 劇 +- 苑 +- 룩 +- 뺀 +- 额 +- 丸 +- 贯 +- 阴 +- 団 +- ਐ +- 믹 +- ᱞ +- ᱨ +- 纹 +- 콤 +- 慮 +- 獨 +- 骑 +- 勋 +- 抱 +- 猫 +- 枚 +- ៈ +- 衛 +- '=' +- 茄 +- 겐 +- 껍 +- 慈 +- 淨 +- 訪 +- 详 +- 拟 +- 峡 +- ౌ +- Į +- 敗 +- 柯 +- 掂 +- 爬 +- 濕 +- 芳 +- 笼 +- ឈ +- ẩ +- 陕 +- 맙 +- 缅 +- ૈ +- 舒 +- 堪 +- 孤 +- 钢 +- 碘 +- 談 +- 吗 +- 寝 +- 粵 +- ২ +- ሸ +- 鳳 +- 찢 +- 逝 +- 屬 +- 蒲 +- 邀 +- 卵 +- 丈 +- 蚜 +- ፎ +- 裔 +- 뜩 +- Џ +- ቂ +- 밟 +- 잼 +- 쪄 +- 팍 +- 핵 +- 垂 +- 栖 +- 轴 +- 剂 +- 茜 +- 拆 +- ٬ +- ဓ +- 啫 +- 擇 +- 煩 +- 脚 +- 舌 +- 붓 +- 쇠 +- 綠 +- ఖ +- 侍 +- 麗 +- 勃 +- 寻 +- 卖 +- ဥ +- 멈 +- 緒 +- 裕 +- 쥐 +- 쫍 +- Ո +- 坏 +- 屎 +- 虾 +- 刑 +- 刷 +- 즘 +- 錦 +- 宙 +- ଞ +- 맵 +- 쌈 +- ೌ +- 엊 +- 喔 +- 婦 +- 昔 +- 針 +- 签 +- 诉 +- 刚 +- 晴 +- 妙 +- ៗ +- ᱜ +- Φ +- 値 +- 숫 +- 윗 +- 擊 +- 饭 +- 匹 +- 拖 +- 監 +- − +- 閒 +- 넉 +- 넨 +- Պ +- 腸 +- 启 +- 薪 +- 邮 +- 伐 +- ふ +- 倍 +- ၉ +- 勉 +- 돋 +- Ι +- ጽ +- 寧 +- 柔 +- 懷 +- 軟 +- 潘 +- 凡 +- 玻 +- 陷 +- 転 +- 購 +- 쾌 +- 鹽 +- 乗 +- 耀 +- ڍ +- 吖 +- 뤄 +- 玄 +- 飽 +- 迈 +- 肝 +- 蛾 +- 撒 +- 硕 +- 堇 +- 沈 +- 胸 +- 陳 +- 낯 +- 듀 +- 臉 +- 押 +- 棄 +- 凍 +- 访 +- 册 +- ኦ +- 療 +- 챔 +- 允 +- 纸 +- ኋ +- 풋 +- ٢ +- 胆 +- 貸 +- 뻥 +- 쭐 +- 伞 +- 壓 +- 伏 +- 貌 +- 颗 +- 烏 +- 쩨 +- 岐 +- 麟 +- 尿 +- 扒 +- 訓 +- 郁 +- 췄 +- ਠ +- 뭉 +- 쩡 +- 펌 +- 悲 +- 祝 +- 壳 +- 岭 +- 贸 +- 伟 +- 夷 +- 锋 +- 祭 +- १ +- ឬ +- 旦 +- 윈 +- Ñ +- 뻤 +- 쉐 +- 텍 +- 혐 +- 訂 +- 腩 +- 藍 +- 倫 +- 妈 +- 惡 +- 霸 +- 孝 +- 减 +- ¥ +- 歸 +- 額 +- 윙 +- 찝 +- 텀 +- 宮 +- 遺 +- 멤 +- 煲 +- 潭 +- 烧 +- 煙 +- 몇 +- 헬 +- 扁 +- 줌 +- 览 +- 滋 +- 怒 +- 搜 +- 碎 +- 複 +- 己 +- Տ +- ‽ +- 仆 +- 닉 +- 봇 +- 앗 +- Ġ +- 窗 +- 滕 +- 猛 +- 综 +- 珊 +- 误 +- Ɣ +- 柄 +- 젝 +- ၄ +- 鍾 +- 騙 +- 닷 +- 략 +- 엎 +- ૅ +- 虱 +- 銷 +- 黨 +- 添 +- 廉 +- 辆 +- È +- 穗 +- 탱 +- ਓ +- 縣 +- 宽 +- 坎 +- 亥 +- 币 +- 鬆 +- Ô +- ẻ +- 賽 +- 궈 +- 볍 +- 딪 +- ഘ +- 攪 +- 瓶 +- 炭 +- 阁 +- 扩 +- 串 +- 铜 +- Ӓ +- ޏ +- 梨 +- Å +- ၅ +- 讷 +- 룰 +- 割 +- 築 +- 翁 +- 啟 +- 遵 +- 刊 +- 璃 +- 翔 +- 驱 +- 伍 +- 幻 +- ヴ +- ৪ +- 辣 +- ヘ +- 圓 +- 梗 +- 댁 +- 셉 +- 셜 +- 쌩 +- 帽 +- 怖 +- 顯 +- 俗 +- 夕 +- 键 +- Ụ +- 孫 +- 꼽 +- 빴 +- 奔 +- 瓣 +- 藩 +- 拒 +- 恶 +- Ԥ +- ၈ +- ኘ +- 뱃 +- 칵 +- ጡ +- ፒ +- 偷 +- 踏 +- 桐 +- 憶 +- 靚 +- 嫌 +- 疏 +- 隧 +- 厦 +- 茅 +- 貝 +- 寨 +- 绕 +- ÿ +- ̃ +- 捨 +- 썬 +- 쿼 +- 툭 +- 푼 +- ൗ +- 渐 +- 鳴 +- 퉁 +- 脫 +- 癌 +- 腿 +- 愉 +- 哺 +- 檸 +- 拓 +- ぼ +- 荆 +- 狸 +- 뿔 +- Þ +- 哂 +- 浩 +- 爪 +- 辅 +- 岳 +- 濃 +- 荒 +- 陪 +- 姨 +- 鬧 +- 绘 +- 廟 +- 禄 +- 빔 +- 빽 +- 坛 +- 즉 +- 詞 +- 恭 +- 彈 +- 审 +- 催 +- ぱ +- ဤ +- 蝽 +- 뺑 +- ỹ +- 謂 +- 둑 +- 壮 +- 鸡 +- 魂 +- 純 +- 乃 +- 両 +- 樱 +- औ +- Վ +- 彻 +- 粟 +- 갱 +- 쎈 +- 탠 +- 蓬 +- 獎 +- 袖 +- ጦ +- ৯ +- 廢 +- 껏 +- 쥬 +- ґ +- 絡 +- ಏ +- ᱴ +- 輪 +- 姊 +- 衫 +- 填 +- 恢 +- 燃 +- 辞 +- 斜 +- 円 +- 恋 +- 塑 +- 俺 +- 欠 +- 鞍 +- Ħ +- ౦ +- 掛 +- 깡 +- 탓 +- 谓 +- 龟 +- 屿 +- 谱 +- 瞬 +- 缩 +- 掃 +- 烟 +- 忆 +- 賀 +- 肺 +- 輩 +- 꼰 +- 헛 +- 盲 +- 蟲 +- 샷 +- ޠ +- Ρ +- 寂 +- 陰 +- 弁 +- 賢 +- 弘 +- 扬 +- 豹 +- 腺 +- ỳ +- € +- 엥 +- 撤 +- 箕 +- 仰 +- 従 +- 釋 +- 摇 +- 毁 +- 큼 +- ឃ +- 秒 +- 蒿 +- ጂ +- 嚇 +- 맻 +- Ք +- 攞 +- 껀 +- 뜰 +- Օ +- 勵 +- 蕾 +- 戻 +- 伝 +- 鰂 +- 窩 +- 惜 +- ᱥ +- 迅 +- 빗 +- 흉 +- Թ +- 订 +- 拳 +- 衔 +- 鼎 +- 猎 +- 蘿 +- 橙 +- 辰 +- 砂 +- 삿 +- ٩ +- 뉘 +- 햇 +- 煎 +- 碟 +- 損 +- 披 +- 衍 +- 爛 +- 萧 +- 札 +- І +- ় +- ឌ +- 饮 +- 拼 +- 遣 +- Լ +- 膜 +- 撐 +- 姿 +- 吻 +- 棠 +- 沟 +- 詢 +- 阜 +- 밭 +- 傻 +- 截 +- 曉 +- 爽 +- 쩜 +- 핏 +- 擁 +- 稻 +- 忽 +- 磨 +- 吾 +- 葱 +- 株 +- 蜂 +- 庞 +- ቪ +- ዞ +- 虹 +- ৫ +- ỷ +- 맣 +- 賞 +- 凝 +- 肩 +- 歧 +- Ɛ +- 帆 +- ԑ +- ၆ +- 앵 +- ۰ +- ၃ +- 뀔 +- 贏 +- 驾 +- 慶 +- 抓 +- 嫁 +- ஒ +- 脷 +- 廈 +- 悄 +- 増 +- 葛 +- Ҷ +- ቷ +- 뱀 +- ′ +- 悠 +- 莉 +- 筲 +- 暂 +- 丞 +- 陀 +- 掘 +- ぜ +- ኙ +- ⠀ +- 摸 +- 굿 +- 퀄 +- 훅 +- 匯 +- 崗 +- 妥 +- 祠 +- 満 +- 蛙 +- 豉 +- 頻 +- 芸 +- 湿 +- 誕 +- ৩ +- 懂 +- 隱 +- 卑 +- 郵 +- 編 +- 肃 +- 劣 +- 档 +- 瘤 +- 盆 +- 冒 +- 弃 +- 彭 +- 稅 +- 깥 +- ፅ +- 넥 +- 쇄 +- 쏟 +- 푹 +- 籌 +- 亭 +- 芹 +- 邓 +- 薇 +- ୌ +- 壽 +- 蜱 +- 凌 +- 膽 +- 륵 +- 扇 +- 멸 +- 歐 +- ಘ +- 喊 +- 赢 +- 雜 +- 珀 +- 礎 +- 撚 +- 恰 +- 藉 +- 驶 +- 못 +- 땜 +- 桌 +- 蒜 +- ሠ +- 섹 +- 図 +- 駛 +- 誠 +- 圭 +- 鍋 +- 勾 +- 纯 +- 谋 +- ւ +- 茛 +- ቸ +- 贺 +- ፉ +- 猴 +- 眉 +- 퀘 +- Չ +- 吊 +- 鱲 +- 瞓 +- 囊 +- 腰 +- 矶 +- 赶 +- 蚬 +- 妃 +- 揮 +- 융 +- 틈 +- ኃ +- 损 +- 뇨 +- 빚 +- 찡 +- 侄 +- 仿 +- 浅 +- 邵 +- 券 +- 忧 +- 润 +- 雍 +- 汤 +- 遮 +- 丧 +- ヨ +- 髮 +- 냄 +- 횟 +- Ő +- ጆ +- 淘 +- 귈 +- 므 +- Ֆ +- 櫃 +- 琳 +- 虚 +- 织 +- 阮 +- 循 +- 辨 +- 姻 +- 您 +- ຝ +- 棕 +- ኬ +- ‚ +- 崩 +- 暑 +- 飾 +- 깼 +- 칩 +- 펑 +- Ռ +- ᱵ +- 슐 +- 寞 +- 巷 +- 圳 +- 谈 +- 抜 +- 杭 +- 筋 +- 摘 +- 挪 +- 狄 +- 跃 +- 鹰 +- 汝 +- 맞 +- 般 +- 茎 +- Ə +- ਊ +- 咋 +- 泣 +- 丟 +- 翅 +- 蔥 +- 駅 +- 脏 +- 矩 +- 闽 +- 疲 +- 牡 +- 韓 +- 槭 +- 嗱 +- ڼ +- 暫 +- 淚 +- 蓋 +- 뎅 +- 뢰 +- 찼 +- 績 +- 誤 +- 瑚 +- 魯 +- 旨 +- 蛤 +- 쩔 +- 競 +- 紹 +- ৬ +- 狂 +- 냠 +- 쏠 +- 潛 +- 栗 +- 遲 +- 纵 +- 棉 +- 隋 +- 囲 +- 丛 +- Ō +- 喀 +- 曜 +- 棘 +- 냬 +- 奧 +- 脆 +- 逛 +- 젖 +- 録 +- 悬 +- 魷 +- 晨 +- 狮 +- 桔 +- 胎 +- 脊 +- 倉 +- 页 +- ໌ +- ፃ +- ỡ +- 帳 +- 팽 +- 嚕 +- ẳ +- 咕 +- 烤 +- 抚 +- 椅 +- 抑 +- 粹 +- ឿ +- 擦 +- 焼 +- 夾 +- 脂 +- ৮ +- ኞ +- ዴ +- 哩 +- 擺 +- 睛 +- 憲 +- 怡 +- 滩 +- 蕃 +- 矮 +- 卜 +- 惟 +- 賓 +- 稳 +- 敌 +- 蔷 +- 枢 +- Ζ +- 숲 +- Ї +- ઊ +- 굔 +- 띵 +- 뭣 +- 섰 +- 옴 +- 펼 +- 擅 +- 稀 +- 兽 +- 慎 +- 浓 +- 氟 +- 娱 +- 簽 +- 芋 +- 貓 +- ゼ +- 乖 +- 揀 +- 젓 +- 閃 +- 룬 +- 쉰 +- 処 +- 姜 +- 凱 +- 吐 +- 災 +- 逸 +- 頃 +- 樟 +- 籠 +- 覽 +- 販 +- 携 +- 卿 +- 艘 +- 滚 +- 奮 +- 欖 +- 遷 +- ޟ +- 楊 +- 淳 +- Ẹ +- 훔 +- 卓 +- 詐 +- 顆 +- 漁 +- 劫 +- 廊 +- 녔 +- 储 +- 廿 +- 굶 +- 喫 +- 뱉 +- 燥 +- 狭 +- 浴 +- 碳 +- 渠 +- 铃 +- 斐 +- 獅 +- 琉 +- 胶 +- 乏 +- 叛 +- 뭐 +- 冕 +- 帯 +- ଢ +- ਢ +- ၌ +- 咬 +- 惑 +- 貿 +- 赴 +- 괌 +- 멕 +- 츄 +- 횡 +- 稚 +- 蠻 +- 励 +- 捐 +- 祈 +- 遂 +- 杆 +- ங +- 荨 +- 拠 +- 滅 +- ᱷ +- ኳ +- 汰 +- 탭 +- 팁 +- 밸 +- 얄 +- 汀 +- 褐 +- 晩 +- 虛 +- 诚 +- 醉 +- 辉 +- 毫 +- 虑 +- 蒋 +- 稿 +- 冯 +- 検 +- 靴 +- 瑙 +- 蝠 +- 贾 +- Շ +- 寓 +- 糟 +- 뻘 +- 훌 +- 鎮 +- 幣 +- 违 +- 阔 +- 稍 +- 牵 +- 効 +- 鉴 +- 逻 +- 戟 +- 揚 +- 腔 +- à +- 繳 +- ĺ +- ٨ +- 폈 +- 콕 +- 聰 +- 丑 +- 岗 +- 舟 +- 愈 +- 狱 +- 罕 +- 绩 +- 笠 +- 甥 +- 収 +- 億 +- 傲 +- 壶 +- 蕉 +- Ӑ +- ‐ +- ٤ +- ፤ +- 렬 +- 및 +- 옹 +- 칫 +- 哀 +- 縮 +- 碌 +- 鎖 +- 鹹 +- 夥 +- 巫 +- 灌 +- 弥 +- 沼 +- 锁 +- Ā +- ʿ +- ઑ +- 緣 +- 랗 +- 숏 +- 肌 +- 啤 +- 頼 +- 慰 +- 虽 +- 崖 +- 溜 +- 拱 +- 諸 +- 氯 +- ユ +- 询 +- 倾 +- 喉 +- ៌ +- २ +- 깅 +- 꽈 +- 쑥 +- 옵 +- 잎 +- 袁 +- 澄 +- 酋 +- 募 +- 挂 +- 痕 +- 纤 +- 屠 +- 践 +- 榕 +- 逼 +- 辭 +- 膠 +- 蛱 +- ۂ +- ઢ +- 輝 +- 锥 +- 됩 +- 蘇 +- 蝴 +- 鹃 +- Է +- 娃 +- 嶼 +- 淋 +- 夸 +- 蓮 +- 蠢 +- 帐 +- 醇 +- 龄 +- 壬 +- 貫 +- 漂 +- 菇 +- 黐 +- ⁄ +- 梵 +- 돔 +- 킥 +- 텅 +- ଵ +- 狐 +- 戀 +- 託 +- 獻 +- 寸 +- 递 +- 丙 +- 儀 +- 龜 +- 萼 +- 凸 +- Ή +- ޘ +- ఓ +- 奪 +- 皿 +- 넛 +- 똘 +- 룡 +- 쁨 +- 왤 +- 짰 +- 툴 +- ፀ +- 佔 +- 昏 +- 쿵 +- ഠ +- 甫 +- 惹 +- 蘋 +- 撰 +- 撥 +- 磷 +- 纜 +- 尉 +- 鹅 +- 牆 +- 遥 +- 옛 +- Ø +- Ý +- ՛ +- 餓 +- 鸿 +- 첼 +- 퍽 +- 훠 +- 雑 +- 浸 +- 狠 +- 赐 +- 鋪 +- 扣 +- 诞 +- 乞 +- 赚 +- 禧 +- 堵 +- ሂ +- 堤 +- 昼 +- 鵝 +- 겄 +- 맑 +- 쓱 +- 얹 +- 옳 +- 첩 +- 匈 +- 秩 +- 夹 +- 鹤 +- 甩 +- 遅 +- 邪 +- 扑 +- 闪 +- 壤 +- 慌 +- 綜 +- 隙 +- 綫 +- 縄 +- 佑 +- 觸 +- ঈ +- 攰 +- 鹦 +- 욘 +- 軒 +- 阅 +- 깄 +- 꺾 +- 벚 +- 틸 +- 剪 +- 幽 +- 彦 +- 歇 +- 氢 +- 浜 +- 徵 +- 悉 +- 棚 +- 黒 +- 酷 +- 掠 +- 弓 +- 鞭 +- ឯ +- 兔 +- 洁 +- 漆 +- 餘 +- 갸 +- 녜 +- 맺 +- 텝 +- 퓨 +- 兆 +- 懶 +- 誌 +- 喙 +- 殼 +- 螢 +- 諾 +- 鞘 +- 徙 +- 鄰 +- 盒 +- 纺 +- 荐 +- 赠 +- 垒 +- 婴 +- 胺 +- ឧ +- ሏ +- 嘞 +- 怨 +- 颠 +- 뭇 +- 펀 +- 筑 +- 芽 +- 塗 +- 寮 +- 伪 +- 墩 +- 菠 +- 宴 +- 慕 +- 链 +- 敷 +- 盈 +- 戚 +- 柚 +- 輔 +- ٥ +- ၇ +- ሺ +- 廚 +- 糊 +- 蔓 +- 겟 +- 뛸 +- 벳 +- 숱 +- 찹 +- 媛 +- 淺 +- 漠 +- 玫 +- 皱 +- 蓉 +- 賭 +- 鳗 +- 卯 +- 坂 +- 慘 +- 穩 +- 瑰 +- 奎 +- 氮 +- 褲 +- 叠 +- 锤 +- '[' +- 爲 +- 恆 +- ̄ +- Զ +- ९ +- 喂 +- 渣 +- 萤 +- ጸ +- 搶 +- 蔗 +- 薩 +- 鑊 +- 铺 +- 늬 +- 솜 +- 爺 +- 贊 +- 頸 +- 宛 +- 硫 +- 贤 +- 耕 +- 贼 +- 嘈 +- 騎 +- 凭 +- 峽 +- 惯 +- 芥 +- ٣ +- 厭 +- 徳 +- 撃 +- 椰 +- 檔 +- 곽 +- 렙 +- 쭤 +- 펐 +- 艳 +- 跡 +- 骗 +- 戸 +- 縱 +- 姚 +- 邸 +- 幹 +- 悼 +- 誓 +- 伽 +- 않 +- ຼ +- 巾 +- 厘 +- 噏 +- 얌 +- Ċ +- Ɓ +- ఐ +- 仗 +- 営 +- 妖 +- 鳅 +- 벙 +- 숍 +- 얜 +- 욜 +- 읍 +- Ҩ +- 滷 +- 繞 +- 闲 +- 掩 +- 逢 +- 饼 +- 扭 +- 攝 +- 栋 +- 茹 +- 済 +- 雾 +- 沐 +- Խ +- ሐ +- ቫ +- 揸 +- 淮 +- 猜 +- 禅 +- 胃 +- 껑 +- 됨 +- 팸 +- 肖 +- 辺 +- 씌 +- 嬤 +- 屆 +- 旭 +- 铭 +- 僅 +- 聘 +- 俊 +- 鸣 +- 绳 +- 纷 +- 胁 +- 躍 +- 绣 +- 殘 +- 欄 +- 澤 +- 削 +- 阎 +- 찮 +- Ң +- ٦ +- 徹 +- 棟 +- 裸 +- 깠 +- 늫 +- 뻗 +- 셈 +- 柿 +- 煤 +- 矢 +- 庸 +- 酰 +- 滴 +- 詳 +- 噚 +- 芯 +- 魁 +- 弯 +- 崔 +- 圧 +- 犀 +- ̂ +- \ +- Ь +- ፌ +- 礁 +- 蘆 +- 钦 +- 괄 +- 렷 +- 샴 +- 셰 +- 抹 +- 牽 +- 妮 +- 嘗 +- 宰 +- 廖 +- 莊 +- 俘 +- 凉 +- 迦 +- 仇 +- 缓 +- 걱 +- 査 +- 儲 +- Ҭ +- Ջ +- 労 +- 毀 +- 糙 +- ৭ +- ಐ +- ឲ +- 冚 +- 厲 +- 疼 +- 偉 +- 儒 +- 牲 +- 讚 +- 呃 +- 碱 +- 萝 +- 逗 +- 彰 +- 砖 +- 肿 +- 訳 +- 僧 +- 咨 +- 挙 +- 溯 +- 瀑 +- 碍 +- 尝 +- 汪 +- 檬 +- 赌 +- 菁 +- ५ +- 抄 +- 斤 +- 襯 +- 닙 +- 쯔 +- 퀸 +- 텨 +- 蜗 +- 貪 +- 귓 +- 엮 +- ಞ +- 憎 +- 滇 +- 酥 +- 娶 +- 孕 +- 捉 +- 晓 +- 樽 +- 蜘 +- 閱 +- 雇 +- 筹 +- 癸 +- 酶 +- 儘 +- 睦 +- 膨 +- Œ +- 勁 +- 恨 +- 拎 +- 苞 +- 貧 +- 隣 +- 갇 +- 넜 +- 댈 +- 릏 +- ઞ +- 凹 +- 歉 +- 謠 +- 躲 +- ぽ +- 嗣 +- 滾 +- 汐 +- 喱 +- 贴 +- 権 +- 烷 +- 児 +- 菩 +- 辈 +- 騰 +- ୈ +- 肋 +- 킷 +- ఘ +- ᱡ +- 勘 +- 叻 +- 鍊 +- 퐁 +- ヌ +- 媳 +- 稣 +- 鲍 +- 僱 +- 榴 +- 芙 +- 悟 +- 拘 +- 瓷 +- 仑 +- 霖 +- 饱 +- 棺 +- 滤 +- 赋 +- 渊 +- 央 +- 蕊 +- 蹟 +- 搖 +- 秉 +- 껐 +- Ë +- 戒 +- 揭 +- 瞭 +- 趨 +- 軽 +- 깍 +- 륙 +- 뻑 +- 쌔 +- 콧 +- 픔 +- 蔭 +- 迴 +- 帅 +- 硝 +- 舱 +- 踢 +- 嘆 +- 忌 +- 渔 +- 伙 +- 盗 +- 蹈 +- 锐 +- 颇 +- 窮 +- 醜 +- 祯 +- 腕 +- 尹 +- £ +- 凰 +- 喪 +- 嬲 +- 潔 +- '~' +- ঋ +- 債 +- 芭 +- 薹 +- 짚 +- 펭 +- 毅 +- 汶 +- 糧 +- 蜡 +- 敵 +- 枫 +- 橡 +- 凶 +- 呆 +- 碰 +- 薛 +- 涵 +- 姬 +- 柬 +- 贫 +- 撮 +- 豊 +- 庶 +- 纬 +- Ș +- ሯ +- ᱣ +- 嬰 +- 孢 +- 潤 +- 酵 +- 閣 +- 굵 +- 덧 +- 짖 +- 쿡 +- 燭 +- 驼 +- 冊 +- 厥 +- 愁 +- 楠 +- 濑 +- 診 +- 谁 +- 铸 +- 頁 +- 岑 +- 晃 +- 瘦 +- 肾 +- 噶 +- 窑 +- 戌 +- 摆 +- 窓 +- 坟 +- 涂 +- 炉 +- 壊 +- 檗 +- 毋 +- 餵 +- 壇 +- 専 +- 脾 +- 霧 +- 鬚 +- ŕ +- ऋ +- 戊 +- 脈 +- Ҵ +- ۱ +- ጤ +- 廁 +- 烯 +- 蓼 +- 럿 +- 샹 +- 싯 +- 쬐 +- 펫 +- 헝 +- 흙 +- 墅 +- 賴 +- 喷 +- 榄 +- 佈 +- 咒 +- 扰 +- 豫 +- 罂 +- 硤 +- 継 +- 莓 +- 餃 +- ゥ +- 宠 +- 罐 +- ಓ +- ಠ +- ዥ +- 弦 +- 菓 +- 봅 +- 왓 +- 톨 +- Փ +- 滘 +- 瑜 +- 紐 +- 綿 +- 逮 +- 酮 +- 熄 +- 笛 +- 繫 +- 繰 +- 氛 +- 瀝 +- 陨 +- 歳 +- 殷 +- 奠 +- 奸 +- 酬 +- 杖 +- 钠 +- 浆 +- 狩 +- 좋 +- 険 +- 擬 +- Υ +- 椎 +- ፐ +- 瘟 +- 렐 +- 쭝 +- ڦ +- 旱 +- 衆 +- 삽 +- 摧 +- 罩 +- 鬥 +- 徴 +- 抬 +- 挖 +- 噴 +- 揺 +- 梭 +- 棍 +- 腾 +- 谊 +- 鲤 +- 侠 +- 枕 +- 耗 +- 많 +- 斛 +- 詩 +- 蔔 +- 亿 +- 椭 +- 盔 +- 胀 +- ८ +- ഐ +- ဧ +- 惊 +- 拾 +- 肢 +- 谥 +- 줍 +- 剥 +- 愚 +- 槍 +- 縫 +- 绵 +- 迟 +- 鋼 +- 颖 +- 눕 +- 딲 +- 멧 +- 숟 +- 쏴 +- 癮 +- 篤 +- 鳃 +- 鹉 +- 컹 +- 厳 +- 獄 +- 扫 +- 括 +- 槟 +- 焗 +- 賊 +- 钾 +- 坝 +- 払 +- 獵 +- 罹 +- 墟 +- 绥 +- 雌 +- 沫 +- 魅 +- 梳 +- 抛 +- 榆 +- 潟 +- 脸 +- 悔 +- 杏 +- 랴 +- Æ +- Ƙ +- ۖ +- ३ +- 傍 +- 惧 +- 譯 +- 鄧 +- 霉 +- 鳄 +- 껜 +- 꿍 +- 둡 +- 륜 +- 뮬 +- 슝 +- 슥 +- 윽 +- 줏 +- 짼 +- 嫩 +- 栄 +- 炆 +- 庚 +- 颓 +- 巢 +- 幌 +- 荠 +- 撇 +- 苯 +- 暨 +- 肇 +- 鍵 +- 颂 +- 寡 +- ஏ +- ᱼ +- 琼 +- 寬 +- 挽 +- 襲 +- 坤 +- 橘 +- 洒 +- 贈 +- 钝 +- ဌ +- 虐 +- Û +- ᱦ +- ᱯ +- 惰 +- 擴 +- 昇 +- 蒐 +- 蠔 +- 겜 +- 뛴 +- 왁 +- 짭 +- 펄 +- ጻ +- 壯 +- 汕 +- 贪 +- 軌 +- 闸 +- 駆 +- 栽 +- 氨 +- 硅 +- 瑪 +- 栏 +- 豁 +- 槐 +- ଝ +- Ұ +- ६ +- 哇 +- 憂 +- 绒 +- 깬 +- 돗 +- 뵈 +- 삘 +- 쏙 +- 얗 +- 웰 +- 즙 +- 캉 +- 펠 +- ዌ +- 弊 +- 併 +- 噉 +- 冶 +- 綁 +- 刁 +- 磐 +- 鳩 +- 蜥 +- 卸 +- 歪 +- 芦 +- '}' +- Ũ +- Ŭ +- ǔ +- ᱰ +- 鮑 +- 륨 +- 쩰 +- ѐ +- ٧ +- ᱹ +- 勿 +- 孖 +- 悅 +- 扉 +- 朴 +- 榧 +- 橄 +- 竇 +- 苹 +- 낑 +- 늑 +- 돕 +- 뒹 +- 붐 +- 뿜 +- 엣 +- 첸 +- 팥 +- 乸 +- 馨 +- 麒 +- 妄 +- 趾 +- 毗 +- 衞 +- 沪 +- 勳 +- 膝 +- 辩 +- 奋 +- 拡 +- '@' +- 塩 +- 椿 +- 铠 +- 傑 +- 壺 +- 霞 +- ቬ +- 剖 +- 懐 +- 柑 +- 渴 +- ጁ +- 厨 +- 곈 +- 왈 +- ଃ +- 妾 +- 棱 +- 琦 +- 聪 +- 蚓 +- 锅 +- 鸦 +- 擾 +- 斥 +- 梓 +- 瘋 +- 绶 +- 荊 +- 荡 +- 霜 +- 恕 +- 抖 +- 殉 +- ~ +- 峭 +- 涛 +- 陌 +- 罢 +- 迄 +- 憧 +- 抢 +- 绑 +- 缴 +- 蝇 +- 秃 +- 떤 +- 鮭 +- 匙 +- 垣 +- ぷ +- 恥 +- 钩 +- Ū +- 庇 +- 痴 +- 눅 +- 뤘 +- 륭 +- 뾰 +- 샜 +- 쌰 +- 잣 +- Ξ +- Ӗ +- 茸 +- 髪 +- 庵 +- 礙 +- 荫 +- 侦 +- 贷 +- 颱 +- 憾 +- 択 +- 眺 +- 窄 +- 薬 +- 鑽 +- 詹 +- 趋 +- 녁 +- Ը +- 攀 +- 攸 +- 菱 +- 賤 +- Ύ +- Ω +- ۀ +- Ị +- ‒ +- 咳 +- 埠 +- 宵 +- 뜸 +- 렘 +- 롬 +- 젯 +- 쭘 +- 킵 +- ঔ +- 膳 +- 鴻 +- 扯 +- 沽 +- 羌 +- 踩 +- 猶 +- 帜 +- 畜 +- 驳 +- 嗲 +- 尻 +- 蝉 +- 誘 +- 粮 +- 阀 +- 飓 +- 窟 +- 総 +- 劑 +- 縁 +- 嗌 +- 湧 +- 飼 +- 慳 +- 鈴 +- 駐 +- '#' +- Ɗ +- Ҽ +- Ճ +- 吏 +- 屌 +- 枱 +- 爐 +- 謀 +- 鹏 +- 뚤 +- 텃 +- Յ +- 唷 +- 喳 +- 杞 +- 爹 +- 謹 +- ഔ +- 兀 +- 饶 +- 颈 +- 庐 +- 聴 +- 挡 +- 淆 +- 轰 +- 禽 +- 诱 +- 炼 +- ဿ +- 脅 +- 艦 +- 薑 +- 槲 +- ቨ +- 갯 +- 꾹 +- 붉 +- 캄 +- 쾅 +- Ĕ +- Ī +- Ҿ +- ሟ +- 峻 +- 諮 +- 贬 +- 鉄 +- 쟀 +- 釣 +- 懼 +- 撻 +- 牟 +- 肠 +- 昧 +- 鲈 +- 侨 +- 湘 +- 铎 +- 厢 +- 履 +- 债 +- 悦 +- 粤 +- ឥ +- 沸 +- 曝 +- 净 +- 厉 +- 搏 +- 浏 +- 淇 +- 獐 +- 禍 +- ఛ +- ቧ +- 弾 +- 欺 +- 蛄 +- 씁 +- 캘 +- ४ +- ሹ +- ቭ +- 呱 +- 啥 +- 嚿 +- 嬸 +- 洽 +- 蟻 +- 诵 +- 豚 +- 颅 +- 駕 +- 깁 +- 뻣 +- 핍 +- 倡 +- 擎 +- 檯 +- 唤 +- 惱 +- 憑 +- 罵 +- 譽 +- 悶 +- 涩 +- 邑 +- 襄 +- 翌 +- 犁 +- 踪 +- 酱 +- 帘 +- 淀 +- 侶 +- 渭 +- 蹤 +- 黙 +- ሾ +- ጪ +- 撈 +- 畅 +- 盧 +- 齢 +- 솥 +- 쨍 +- 쫀 +- 튕 +- Ļ +- Њ +- 眨 +- 籽 +- 肽 +- 舖 +- 钙 +- 벡 +- 쫑 +- 큘 +- 핬 +- 헨 +- 壹 +- 濾 +- 蔵 +- 冤 +- 啖 +- 繩 +- 衙 +- 詰 +- '{' +- 挺 +- 朵 +- 坞 +- 醋 +- 尘 +- 鄭 +- 挝 +- 陡 +- 劍 +- 淄 +- 缆 +- 罰 +- 曳 +- 隠 +- Ć +- Ӡ +- ७ +- ᱲ +- 噌 +- 揼 +- 矣 +- 룻 +- 앰 +- 횐 +- 兇 +- 燉 +- 錶 +- 攬 +- 笏 +- 肤 +- 剤 +- 嫡 +- 屡 +- 缀 +- 囚 +- 懿 +- 镁 +- 祀 +- 趁 +- 鸭 +- 叩 +- 讼 +- 颌 +- 叮 +- 髓 +- 艰 +- 贩 +- 鴛 +- 鴦 +- 框 +- 邱 +- 袂 +- 拯 +- 濤 +- 萍 +- 韭 +- 妨 +- 薈 +- ዬ +- 壩 +- 澎 +- 瞧 +- 繪 +- 羞 +- 胖 +- 谭 +- 鸢 +- 겔 +- 랠 +- 맸 +- 몫 +- 뵙 +- 삥 +- 쏭 +- 앚 +- 읏 +- 팜 +- ۲ +- ਔ +- 剿 +- 卧 +- 峯 +- 彎 +- 臂 +- 蓄 +- 跤 +- 驅 +- 뎌 +- ― +- 咏 +- 屑 +- 拋 +- 熔 +- 萊 +- 遙 +- 騒 +- 俱 +- 嗜 +- 緑 +- 缠 +- 螃 +- 谨 +- 喘 +- 趙 +- 霹 +- 哦 +- 莺 +- 邢 +- 憬 +- 氷 +- 酯 +- 滞 +- 黔 +- 噬 +- 俞 +- 濱 +- ޛ +- ፔ +- 暮 +- 밋 +- 뽈 +- Ў +- ጄ +- Ấ +- 侣 +- 唸 +- 桦 +- 泌 +- 稔 +- 莹 +- 钻 +- 갛 +- 꿋 +- 슛 +- 쑤 +- 얕 +- 엌 +- Ҟ +- ־ +- 傘 +- 呵 +- 妝 +- 嵩 +- 慾 +- 暈 +- 沧 +- 禹 +- 蝕 +- 흩 +- 嘩 +- 盞 +- 笨 +- 魄 +- 償 +- 垩 +- 駁 +- 嘎 +- 枯 +- 茫 +- 谦 +- 匿 +- 跋 +- 镰 +- ‧ +- 氰 +- 臘 +- 輯 +- ぴ +- 馳 +- Ă +- ͘ +- ঊ +- ቄ +- 瑶 +- 盼 +- 薦 +- 酿 +- ಃ +- ១ +- 冀 +- 茱 +- 阱 +- 랖 +- 섀 +- 얽 +- 엿 +- 윌 +- 잌 +- 찻 +- 챠 +- Ґ +- ẵ +- 婿 +- 擠 +- 曆 +- 鷹 +- 沛 +- 濫 +- 祿 +- 蜀 +- 釐 +- 闊 +- 沮 +- 濯 +- 拨 +- 訝 +- 柒 +- 珂 +- 蚁 +- 诊 +- 劈 +- 腓 +- 鄂 +- 劲 +- 拂 +- 砵 +- 溃 +- 摔 +- 肆 +- 蛳 +- ဍ +- 呂 +- 彙 +- 樑 +- 飘 +- 鴉 +- ‹ +- 暇 +- 芩 +- 鰻 +- 齡 +- Ĩ +- ΐ +- ѝ +- ۓ +- ቺ +- › +- 殃 +- 譜 +- 곁 +- 깰 +- 뗄 +- 잭 +- 촥 +- 캣 +- 펍 +-  +- ០ +- 嘲 +- 憐 +- 戇 +- 掙 +- ฌ +- 悩 +- 淒 +- 聂 +- 剌 +- 匝 +- 辯 +- 謊 +- 鉛 +- 鲷 +- 娄 +- 罚 +- 苋 +- 衷 +- 膀 +- 裴 +- 赣 +- 縦 +- 僚 +- 卦 +- 坳 +- 鱿 +- 绎 +- 铅 +- 窝 +- 妓 +- 岔 +- 乒 +- 扱 +- 溢 +- 讽 +- 膚 +- 傣 +- 懒 +- 猿 +- 輋 +- 丫 +- 穎 +- 笪 +- 蓟 +- 倚 +- 匠 +- 匪 +- 砍 +- 莆 +- 莞 +- 诏 +- ː +- Ժ +- ሣ +- 螞 +- 釀 +- 갭 +- 눴 +- 닳 +- 띡 +- 맴 +- 숯 +- 쐈 +- 씽 +- 킴 +- ዣ +- 奕 +- 砌 +- 諒 +- 錫 +- 雰 +- 喻 +- 岷 +- 灑 +- 炳 +- 爍 +- 絵 +- 炊 +- 礦 +- 跆 +- 輿 +- 螯 +- 朔 +- 蔽 +- 唆 +- 塌 +- 馴 +- 纠 +- 螂 +- 铝 +- 懸 +- ゾ +- 猕 +- 稽 +- 姥 +- 吟 +- 廓 +- 肪 +- 攤 +- ፏ +- 哄 +- 巻 +- 猬 +- 藐 +- 闵 +- 캥 +- Ē +- ޣ +- 咽 +- 婷 +- 榔 +- 漬 +- 瓊 +- 窰 +- 鼩 +- 꿇 +- 넬 +- 됬 +- 떳 +- 릅 +- 봬 +- 뽐 +- 삑 +- 삔 +- 죙 +- 퀵 +- 텼 +- 홋 +- 휙 +- 揾 +- 斧 +- 獸 +- 膏 +- 诈 +- 郝 +- 핥 +- 훑 +- ഊ +- 堺 +- 懵 +- 擰 +- 烃 +- 瘾 +- 瞻 +- 舜 +- 萌 +- 蚌 +- 辟 +- 훗 +- 孚 +- 揉 +- 紛 +- 罷 +- 頑 +- 偽 +- 斩 +- 糸 +- 慨 +- 捲 +- 璧 +- 筷 +- 睿 +- 翡 +- 苷 +- 蝙 +- 冢 +- 佼 +- 岚 +- 胤 +- 讳 +- 佣 +- 邹 +- 乓 +- 钉 +- 寵 +- 帥 +- 榈 +- 尧 +- 槽 +- ఞ +- 唉 +- 牠 +- 茵 +- ۳ +- 乍 +- 幡 +- 撲 +- 樊 +- 绯 +- 銃 +- 꽝 +- 뗐 +- 썪 +- 앓 +- 컥 +- Ź +- Ҥ +- ሑ +- Ở +- 琪 +- 竖 +- 糯 +- 缕 +- 腫 +- 蛭 +- 酗 +- 顏 +- 걀 +- 괘 +- 밉 +- 빕 +- 웜 +- 쟈 +- 휠 +- 唧 +- 敘 +- 疊 +- 癲 +- 缝 +- 賦 +- 鄞 +- 冻 +- 絞 +- 寅 +- 懋 +- 濒 +- 烛 +- 睜 +- 蛮 +- 谕 +- 皺 +- 厌 +- 吵 +- 漲 +- 汚 +- 韵 +- 劉 +- 砲 +- ぺ +- 溲 +- 阐 +- 柜 +- 藨 +- 龐 +- 淹 +- 吼 +- 荚 +- 喬 +- 藜 +- 萸 +- Ձ +- ఊ +- 滯 +- 麽 +- 꿉 +- 잰 +- Ց +- ՞ +- ሷ +- ቾ +- 厕 +- 嘥 +- 娥 +- 悖 +- 斬 +- 曰 +- 棵 +- 畏 +- 畿 +- 肴 +- 腥 +- 酉 +- 깽 +- 낌 +- 뽂 +- 켄 +- 쿤 +- 헉 +- ڱ +- 峨 +- 朋 +- 枉 +- 皖 +- 駿 +- 斋 +- 熬 +- 纏 +- 襪 +- 趟 +- 釘 +- 陥 +- 骆 +- 喚 +- 羚 +- 俯 +- 兑 +- 歯 +- 渝 +- 彗 +- 抒 +- 鲇 +- 喵 +- 沁 +- 虔 +- 雳 +- 泪 +- 穷 +- 蔬 +- 拐 +- 摺 +- 舂 +- 帖 +- 铉 +- 憩 +- 咧 +- 譲 +- 嘟 +- 籲 +- 釜 +- 韌 +- Ď +- Ķ +- ۴ +- ۵ +- ۶ +- 倦 +- 偕 +- 塵 +- 掣 +- 暉 +- 瀨 +- 爷 +- 粋 +- 菸 +- 賠 +- 鶴 +- 곶 +- 끽 +- 둣 +- 떵 +- 맷 +- 셌 +- 솟 +- 츤 +- 푠 +- 呔 +- 嗽 +- 嘔 +- 拗 +- 泄 +- 煽 +- 챌 +- 捱 +- 浑 +- 鲸 +- 兮 +- 凑 +- 嚨 +- 幢 +- 栉 +- 楔 +- 萎 +- 荧 +- 跪 +- 锈 +- 畴 +- 譚 +- 镖 +- 斌 +- 吠 +- 篷 +- 醚 +- 钓 +- 闹 +- 饲 +- 晤 +- 蔚 +- 庁 +- 栓 +- 侃 +- 掷 +- 眶 +- 狙 +- 菝 +- 嬉 +- 尸 +- 註 +- 隍 +- 哉 +- 欽 +- 蜆 +- 雁 +- ಔ +- ɔ +- Ծ +- ዦ +- 崛 +- 揪 +- 盎 +- 箬 +- 艷 +- 鲅 +- 꺠 +- 낵 +- 냔 +- 눔 +- 뎀 +- 뎃 +- 몄 +- 뵀 +- 숑 +- 씸 +- 옇 +- 쨈 +- 탬 +- 俭 +- 疯 +- 竣 +- 締 +- 鹭 +- 壟 +- 嫂 +- 嵌 +- 檀 +- 筱 +- 簪 +- 谍 +- 骤 +- 谅 +- 騮 +- 騷 +- 娅 +- 汛 +- 軀 +- 侬 +- 哎 +- 挫 +- 皎 +- 侏 +- 繹 +- 镶 +- 荞 +- 嫔 +- 癡 +- 哮 +- 璋 +- Ḥ +- 좀 +- 葜 +- 闯 +- 旬 +- 泵 +- 窒 +- 絨 +- 髀 +- ሦ +- 壘 +- 汾 +- ቼ +- 啄 +- 琅 +- 窿 +- 葳 +- 맏 +- 빰 +- 촘 +- ̈ +- ጌ +- ឍ +- 惘 +- 掰 +- 柺 +- 洱 +- 矾 +- 磚 +- 缨 +- 腧 +- 訣 +- 誡 +- 둠 +- 맬 +- 샾 +- 썽 +- 젬 +- 쥴 +- 冗 +- 剃 +- 吳 +- 眞 +- 篆 +- 胚 +- 脹 +- 栩 +- 沢 +- 祕 +- 祟 +- 肅 +- 铀 +- 龚 +- 凳 +- 剝 +- 忒 +- 撕 +- 紥 +- 纱 +- 虞 +- 鏈 +- 駄 +- 簇 +- 鑑 +- 堰 +- 毓 +- 窃 +- 谬 +- 轩 +- 姉 +- 晰 +- 闇 +- 闘 +- 聋 +- 烹 +- 牺 +- 俳 +- 茴 +- 刃 +- 嘯 +- 窦 +- 㩒 +- 捧 +- 耆 +- 傢 +- 沂 +- 秆 +- 删 +- 柵 +- 梧 +- 蚕 +- 驸 +- 齒 +- ઔ +- ḿ +- 睏 +- 苟 +- 갬 +- 촛 +- Ņ +- ؔ +- ኣ +- ጧ +- ᱪ +- 偿 +- 匡 +- 叹 +- 峒 +- 栾 +- 棲 +- 脖 +- 赦 +- 鲡 +- 뗀 +- 뮌 +- 쉼 +- 쎘 +- 얏 +- 췌 +- 툼 +- 튠 +- ഛ +- 揦 +- 漳 +- 烘 +- 祺 +- 鳖 +- 刹 +- 韧 +- 飄 +- 轎 +- 『 +- 皂 +- 蝎 +- 骼 +- 劾 +- 拦 +- 榻 +- 靶 +- 斟 +- 渥 +- 笃 +- 廂 +- 穀 +- 诫 +- 賬 +- 揣 +- 匣 +- 猩 +- 』 +- 啸 +- 赎 +- 蝨 +- 縛 +- 焰 +- 舆 +- 鄣 +- 閂 +- 鬃 +- 哼 +- 嚼 +- 懲 +- 熠 +- 戎 +- 歹 +- 荛 +- 邁 +- 陂 +- 鳕 +- Ώ +- 偈 +- 囡 +- 趴 +- 閘 +- 텁 +- ሔ +- ኗ +- ፆ +- 嘍 +- 屁 +- 揈 +- 灼 +- 畸 +- 稠 +- 笙 +- 籮 +- 遴 +- 驟 +- 麝 +- 麪 +- 꺄 +- 낡 +- 늠 +- 듭 +- 몹 +- 믈 +- 밧 +- 밲 +- 뼛 +- 샛 +- 섣 +- 屜 +- 翊 +- 耍 +- 苇 +- 葫 +- 辜 +- 酌 +- 鉤 +- 霎 +- 頗 +- 끙 +- 룐 +- 弛 +- 怠 +- 珉 +- 戳 +- 紋 +- 谟 +- 豈 +- 辱 +- 鄙 +- 钥 +- 彬 +- 慷 +- 潑 +- 葶 +- 躺 +- 冥 +- 澜 +- 裘 +- 鯊 +- 怯 +- 袍 +- 裙 +- 䁪 +- 佃 +- 勧 +- 弧 +- 拚 +- 溴 +- 绸 +- 钧 +- 煌 +- 祷 +- 釉 +- 瓯 +- 堕 +- 鹑 +- ゅ +- 壆 +- 簕 +- 惨 +- 灶 +- 祉 +- 鹘 +- 剷 +- 擒 +- 梾 +- 蝗 +- 丼 +- 啞 +- 籃 +- 缶 +- 蟬 +- 遜 +- 錐 +- 훼 +- © +- ۚ +- ۸ +- ጩ +- ጾ +- 嗡 +- 捡 +- 歼 +- 氹 +- 洮 +- 穹 +- 籬 +- 醛 +- 闢 +- 鶯 +- 麓 +- 굘 +- 궜 +- 낱 +- 룽 +- 뺨 +- 뽁 +- 싣 +- 쎌 +- 웁 +- 짙 +- 춧 +- 팎 +- 핼 +- ဩ +- 厝 +- 娟 +- 弼 +- 彌 +- 擋 +- 檳 +- 欸 +- 盜 +- 篙 +- 緬 +- 蘅 +- 蠟 +- 貂 +- 躬 +- 朕 +- 蚵 +- ២ +- 箍 +- 锻 +- 佗 +- 咲 +- 熏 +- 紗 +- 鲨 +- 敲 +- 舶 +- 銘 +- 怦 +- 恍 +- 撳 +- 劝 +- 贰 +- 髒 +- 彝 +- 淑 +- 艱 +- 伺 +- 绅 +- 谎 +- 翘 +- 瑾 +- 苍 +- 蟑 +- 瞄 +- 赁 +- 迭 +- 荸 +- 婪 +- 曖 +- 溉 +- 绛 +- ဠ +- 埼 +- 擲 +- 涧 +- 瀬 +- 稼 +- 訟 +- ఠ +- 啶 +- 柽 +- 泮 +- 泻 +- 渾 +- 滌 +- 纂 +- 羹 +- 臟 +- 鹌 +- 헹 +- ¬ +- ۃ +- ઍ +- ዑ +- ዢ +- ៧ +- 啩 +- 弈 +- 簿 +- 糰 +- 缸 +- 荼 +- 郓 +- 饋 +- 鹂 +- 곗 +- 궐 +- 딥 +- 떄 +- 봔 +- 슉 +- 슘 +- 쯧 +- 챈 +- 톰 +- ઃ +- ಊ +- ጮ +- 嘜 +- 夯 +- 屙 +- 徊 +- 徘 +- 晝 +- 洵 +- 漱 +- 诛 +- 빳 +- 샬 +- 刈 +- 巒 +- 羲 +- ᱬ +- 彪 +- 拌 +- 挟 +- 焉 +- 砸 +- 菅 +- 霄 +- 掖 +- 晟 +- 饒 +- 噪 +- 孵 +- 浣 +- 猟 +- 襟 +- 攋 +- 疣 +- 謢 +- 剣 +- 瞳 +- 苓 +- 吁 +- 愕 +- 烦 +- 瞒 +- 羟 +- 谣 +- 噹 +- 沾 +- 潞 +- 聆 +- 啼 +- 濂 +- 痒 +- 顕 +- 墜 +- 滝 +- 颶 +- 墮 +- 捽 +- 褒 +- 坠 +- 囝 +- 聳 +- 邏 +- 垫 +- 貯 +- 垢 +- 埕 +- 寰 +- 帚 +- 滙 +- 綺 +- 葎 +- 馅 +- 桤 +- 俚 +- 绰 +- 郷 +- ƴ +- ϋ +- ዷ +- 瑕 +- 腍 +- 셧 +- ʽ +- Ί +- ѕ +- ኚ +- ዐ +- 妍 +- 屉 +- 屍 +- 捞 +- 櫻 +- 焊 +- 牴 +- 珧 +- 璇 +- 脓 +- 蒺 +- 蓣 +- 蕪 +- 虧 +- 裤 +- 隅 +- 靡 +- 鲹 +- 鸞 +- 黯 +- 굼 +- 긱 +- 꾀 +- 댐 +- 딤 +- 뗘 +- 멩 +- 멱 +- 빢 +- 뿅 +- 웍 +- 윷 +- 잦 +- 쟨 +- 쫘 +- 폿 +- 햐 +- 헥 +- ಛ +- 伎 +- 咯 +- 栎 +- 眷 +- 镑 +- 唑 +- 唞 +- 畑 +- 鸻 +- 亏 +- 凿 +- 洼 +- 琢 +- 谏 +- 鎂 +- 崁 +- 脳 +- 漿 +- 炙 +- 祸 +- 轄 +- 顽 +- 奢 +- 妒 +- 涡 +- 涸 +- 钰 +- 鬣 +- 噓 +- 癱 +- 蚀 +- 贿 +- 寇 +- 粘 +- 倪 +- 箔 +- 摊 +- 睐 +- 娴 +- 渲 +- 谐 +- 橇 +- 脐 +- 鼹 +- 晾 +- 搣 +- 楝 +- 渚 +- 纖 +- 耽 +- 逹 +- 叭 +- 斉 +- 紳 +- 飩 +- Ľ +- 墾 +- 捍 +- 輾 +- ٗ +- ៖ +- ៣ +- ㄟ +- 乂 +- 侮 +- 咆 +- 崙 +- 帛 +- 懈 +- 擸 +- 晉 +- 暹 +- 檻 +- 綱 +- 羧 +- 菏 +- 衅 +- 輻 +- 钇 +- 鯖 +- 걜 +- 냑 +- 넙 +- 똔 +- 띨 +- 쉘 +- 쏜 +- 옅 +- 짹 +- 휩 +- Ț +- ᱝ +- 濛 +- 糾 +- 莪 +- 鈕 +- 볐 +- 얍 +- 좆 +- 凋 +- 宸 +- 彤 +- 燴 +- 缉 +- 袜 +- 酪 +- 鳐 +- 叨 +- 媲 +- 晕 +- 淫 +- 籤 +- 紮 +- 芮 +- 蟾 +- 覓 +- 踐 +- 轿 +- 鈔 +- 鳢 +- 睾 +- 纔 +- 腈 +- 谴 +- 隼 +- 黏 +- 檐 +- 蛟 +- 誇 +- 靼 +- 岖 +- 岱 +- 愤 +- 炀 +- 羁 +- 逞 +- 鞑 +- 鸠 +- 浔 +- 钛 +- 鲮 +- 岬 +- 焚 +- 鬱 +- 珅 +- 蕴 +- 咎 +- 戮 +- 酚 +- 鲳 +- 왜 +- 賃 +- 僭 +- 鮫 +- 寥 +- 绡 +- 匂 +- 嶄 +- 掀 +- 歎 +- 捋 +- 砚 +- 肘 +- 犠 +- 睹 +- 覧 +- 賛 +- 洸 +- 纾 +- 菀 +- 逾 +- 镧 +- 蕗 +- 邕 +- ̔ +- 叢 +- 彷 +- 恵 +- ^ +- × +- ʺ +- Љ +- ӊ +- Ӳ +- 仮 +- 佰 +- 噃 +- 噸 +- 圻 +- 奀 +- 扔 +- 挠 +- 搗 +- 浛 +- 滔 +- 炫 +- 耘 +- 膩 +- 臻 +- 苈 +- 螳 +- 蠅 +- 裹 +- 铨 +- 頒 +- 궤 +- 넹 +- 덱 +- 땔 +- 볕 +- 뵌 +- 뵐 +- 쉭 +- 쌋 +- 옌 +- 윳 +- 젼 +- 쫒 +- 챘 +- 첵 +- 틋 +- 휜 +- ۹ +- ഃ +- 恤 +- 揖 +- 樸 +- 茲 +- 詭 +- 赔 +- 遞 +- 鄢 +- 骏 +- 깻 +- 윰 +- ൌ +- 奄 +- 姍 +- 愧 +- 扼 +- 溺 +- 珙 +- 蕎 +- 阙 +- 涟 +- ぇ +- 兌 +- 吩 +- 咐 +- 憤 +- 渋 +- 瑛 +- 簷 +- 耸 +- 胰 +- 膊 +- 蘑 +- 辗 +- 酞 +- 霆 +- 鲱 +- 牢 +- 缔 +- 缚 +- 酔 +- 崴 +- 捩 +- 撫 +- 舄 +- 莼 +- 嘻 +- 塾 +- 桜 +- 涿 +- 苻 +- 泓 +- 盅 +- 蹦 +- 哨 +- 喧 +- 栅 +- 炕 +- 舎 +- 艙 +- 蕈 +- 誼 +- 謁 +- 饪 +- 搔 +- 拝 +- 塁 +- 浒 +- 驿 +- 謙 +- 褶 +- 捜 +- 砟 +- 膈 +- 煦 +- 遐 +- 捺 +- 蘊 +- 疮 +- 竜 +- 贱 +- 稃 +- 罟 +- 湳 +- 圏 +- 遏 +- 鯛 +- 嘘 +- 攜 +- 湊 +- 瑩 +- 臓 +- 芪 +- 雋 +- 偵 +- 巣 +- 惦 +- 桉 +- 渉 +- 祐 +- 驴 +- 骥 +- 魑 +- ޡ +- ଈ +- 剔 +- 暝 +- 梢 +- 甬 +- 禪 +- 竺 +- ŏ +- ؑ +- ዉ +- 佥 +- 俸 +- 凄 +- 娑 +- 惶 +- 槤 +- 涷 +- 漾 +- 炽 +- 烽 +- 珏 +- 痾 +- 筐 +- 糞 +- 罄 +- 羈 +- 耿 +- 腭 +- 萃 +- 蔻 +- 谳 +- 贻 +- 迩 +- 邾 +- 閩 +- 骠 +- 겅 +- 곪 +- 넴 +- 뇽 +- 딧 +- 똠 +- 묽 +- 쐬 +- 옐 +- 웩 +- 쨰 +- 캅 +- Ģ +- ቮ +- 刪 +- 啜 +- 泼 +- 琛 +- 稲 +- 腮 +- 芷 +- 萱 +- 蔣 +- 蠱 +- 鰲 +- 黧 +- 댑 +- 숄 +- 캬 +- 콥 +- 탤 +- ๅ +- 庾 +- 睫 +- 笋 +- 耧 +- 脇 +- 菰 +- 鑰 +- 隸 +- 雛 +- 顷 +- 骰 +- 呻 +- 墊 +- 惕 +- 膺 +- 曇 +- 湄 +- 錨 +- ヶ +- 臃 +- 俬 +- 啉 +- 瀚 +- 烙 +- 珈 +- 癒 +- 臀 +- 蕭 +- 锑 +- 閪 +- 孪 +- 躝 +- 拷 +- 燦 +- 陇 +- 骸 +- 魇 +- 瞩 +- 朧 +- 綴 +- 渺 +- 胥 +- 禛 +- 挤 +- 柝 +- 腋 +- 漩 +- 窯 +- 蒨 +- 郦 +- 铬 +- 矫 +- 倬 +- 烁 +- 輛 +- 酢 +- 膛 +- 艮 +- 琐 +- 穏 +- 侥 +- 拢 +- 撩 +- 翩 +- 镊 +- 雏 +- 搽 +- 锌 +- 弩 +- 漓 +- 蛹 +- 嘧 +- 炜 +- 箜 +- 篌 +- 鉱 +- 彿 +- 擘 +- 绢 +- 吽 +- 懇 +- 訕 +- 攏 +- 桓 +- 縉 +- 魍 +- 魎 +- 鹼 +- ʾ +- Ւ +- ۗ +- ఢ +- ฯ +- ኡ +- ጹ +- ፑ +- 丶 +- 亢 +- 偃 +- 啰 +- 囌 +- 圩 +- 塚 +- 嫲 +- 恪 +- 昱 +- 棗 +- 瑧 +- 璟 +- 瓏 +- 畐 +- 痰 +- 痱 +- 笈 +- 粳 +- 紓 +- 胯 +- 腌 +- 腎 +- 臧 +- 菉 +- 蟠 +- 謄 +- 豂 +- 躉 +- 逅 +- 遁 +- 邂 +- 郫 +- 鋒 +- 鋸 +- 鑄 +- 闺 +- 雉 +- 頌 +- 鱷 +- 鲃 +- 鶏 +- 鼱 +- 귿 +- 꼿 +- 꿩 +- 듦 +- 딛 +- 띃 +- 숴 +- 짢 +- 쨋 +- 챕 +- 촤 +- 츰 +- 튈 +- 팰 +- 혓 +- § +- ଋ +- ዟ +- ጢ +- 僖 +- 尷 +- 愣 +- 杵 +- 楷 +- 潺 +- 潼 +- 祁 +- 縻 +- 翟 +- 驕 +- 냇 +- 닛 +- 샥 +- 쩝 +- 콰 +- ኅ +- 亜 +- 媚 +- 尬 +- 戍 +- 渤 +- 燙 +- 犧 +- 疹 +- 篩 +- 钜 +- 齁 +- 丕 +- 呤 +- 嗆 +- 嘌 +- 挨 +- 柃 +- 氫 +- 鏟 +- 韶 +- 廬 +- 敖 +- 湛 +- 澡 +- 碾 +- 藁 +- 鈍 +- 鲑 +- 嬌 +- 痺 +- 铵 +- 堉 +- 撓 +- 楞 +- 渗 +- 濠 +- 禺 +- 绞 +- 賈 +- 顛 +- 朦 +- 璨 +- 蝸 +- 鰭 +- 喃 +- 垦 +- 孀 +- 瞰 +- 銳 +- 歓 +- 闌 +- 坯 +- 夭 +- 杷 +- 枇 +- 渎 +- 撑 +- 梠 +- 駭 +- 蚂 +- 阕 +- 掲 +- 쫌 +- 醸 +- 茁 +- 邙 +- 糬 +- 勐 +- 垮 +- 噗 +- 蜻 +- 洩 +- 涙 +- 軸 +- 鍛 +- ဋ +- 霊 +- 囍 +- 恬 +- 淪 +- 癖 +- 舐 +- 蜃 +- 踎 +- 锯 +- 隘 +- 餛 +- 驰 +- 敝 +- 濁 +- 肛 +- 臼 +- 轸 +- 鑫 +- 陋 +- 쇽 +- "\x97" +- Ƴ +- ஔ +- 跣 +- 헀 +- Ќ +- ؓ +- ઋ +- Ả +- ぁ +- 刽 +- 塢 +- 墳 +- 姦 +- 徬 +- 扛 +- 抿 +- 摞 +- 摟 +- 昴 +- 暧 +- 桨 +- 桿 +- 楓 +- 殳 +- 氾 +- 潰 +- 烂 +- 瑭 +- 璜 +- 瘀 +- 瞌 +- 砰 +- 竅 +- 竿 +- 緻 +- 縈 +- 縝 +- 胪 +- 裾 +- 褪 +- 誅 +- 跖 +- 邈 +- 郿 +- 銜 +- 铰 +- 镌 +- 镓 +- 隷 +- 頬 +- 颍 +- 鳌 +- 鼐 +- 곯 +- 끕 +- 놉 +- 놋 +- 늄 +- 늪 +- 뗬 +- 랏 +- 앳 +- 읊 +- 잴 +- 캇 +- 켈 +- 켔 +- 튄 +- 튿 +- 푯 +- 훤 +- Ћ +- ဈ +- 朽 +- 杠 +- 秤 +- 舔 +- 蚶 +- 靂 +- 鱈 +- 鱔 +- 긌 +- ─ +- 勻 +- 廃 +- 歆 +- 甄 +- 祚 +- 簾 +- 绫 +- 踊 +- 闖 +- 驹 +- 鹛 +- ઐ +- ಢ +- 圃 +- 幔 +- 揩 +- 濡 +- 琶 +- 盏 +- 钴 +- 卉 +- 噜 +- 婺 +- 崧 +- 湍 +- 瀾 +- 竊 +- 翱 +- 苕 +- 蒼 +- 褚 +- 餚 +- 迂 +- 俨 +- 攔 +- 滥 +- 犒 +- 氓 +- 虏 +- 俏 +- 疙 +- 瘩 +- 綦 +- 駝 +- 騭 +- 鯉 +- 姒 +- 帧 +- 浇 +- 痢 +- 饥 +- 窥 +- 纭 +- 愫 +- 筛 +- 舗 +- 鲣 +- 惩 +- 釈 +- 琮 +- 淖 +- 胱 +- 铲 +- 旷 +- 碲 +- 狍 +- 渓 +- 燮 +- 揹 +- 镒 +- 垌 +- 邳 +- ㄧ +- 堀 +- 丢 +- 挣 +- 揗 +- 摂 +- 罅 +- 褓 +- 锷 +- 麾 +- 覲 +- 粿 +- 肄 +- 蜓 +- 噩 +- 濺 +- 矗 +- 翦 +- 饕 +- 鸮 +- ಋ +- 岿 +- 沔 +- 煉 +- 鬘 +- ឪ +- 亀 +- 婉 +- 孭 +- 暄 +- 枋 +- 溞 +- 睥 +- 磯 +- 谜 +- 嗤 +- 陞 +- Ė +- ̕ +- ۷ +- ଔ +- ᱶ +- 倩 +- 僵 +- 恙 +- 汙 +- 漚 +- 潲 +- 疵 +- 粽 +- 鬍 +- 썅 +- ­ +- ® +- Ŵ +- ̓ +- ٪ +- ୦ +- ៦ +- ᱸ +- Ủ +- ỵ +- 仏 +- 刨 +- 咇 +- 嚀 +- 娇 +- 嫦 +- 宦 +- 幺 +- 怵 +- 恳 +- 慤 +- 懦 +- 戛 +- 撷 +- 旣 +- 晖 +- 暅 +- 暱 +- 柞 +- 楸 +- 樞 +- 毯 +- 渦 +- 瀉 +- 灞 +- 爨 +- 琥 +- 瓢 +- 癢 +- 睨 +- 瞞 +- 砝 +- 秧 +- 筠 +- 箪 +- 篡 +- 粼 +- 縷 +- 翹 +- 菔 +- 蓆 +- 蕩 +- 衿 +- 袱 +- 谙 +- 豌 +- 跛 +- 辍 +- 逍 +- 逕 +- 鉅 +- 銮 +- 颐 +- 颙 +- 餒 +- 鵪 +- 鶉 +- 갰 +- 곌 +- 꺽 +- 꿰 +- 뎁 +- 땋 +- 롷 +- 룔 +- 멓 +- 뼌 +- 뼘 +- 뿍 +- 셍 +- 쉑 +- 슁 +- 쑈 +- 욤 +- 윅 +- 읔 +- 쭌 +- 챗 +- 촐 +- 켠 +- 콸 +- 큭 +- 팻 +- 햅 +- 嗨 +- 桧 +- 樺 +- 煞 +- 爰 +- 瑤 +- 祗 +- 緘 +- 莘 +- 鈉 +- 鵬 +- 吝 +- 吶 +- 鱸 +- 롸 +- 凈 +- 峙 +- 毙 +- 疤 +- 稷 +- 蓓 +- 蜴 +- 讣 +- 辙 +- 黛 +- 恃 +- 掏 +- 榨 +- 羨 +- 茉 +- 荽 +- 躁 +- 嫻 +- 泞 +- 畳 +- 腱 +- 蜊 +- 骚 +- 忻 +- 擢 +- 簧 +- 骁 +- 侈 +- 咙 +- 巅 +- 葆 +- 釗 +- 鬠 +- 锺 +- 鋆 +- 卤 +- 粪 +- 镂 +- 糴 +- 饿 +- 鼆 +- 锰 +- 鲢 +- 鸥 +- 亵 +- 晗 +- 焙 +- 筥 +- 锴 +- 镀 +- 騾 +- 麋 +- 砕 +- 驯 +- 拇 +- 燶 +- 蝮 +- 迥 +- 揃 +- 噁 +- 胴 +- 挿 +- 熨 +- 舵 +- 鋭 +- 閨 +- 懺 +- 倧 +- 馈 +- 鲗 +- 鸵 +- 呟 +- 孺 +- 姫 +- 璀 +- 憊 +- 饗 +- 冧 +- 忟 +- 挈 +- 晌 +- 梘 +- 沆 +- 滁 +- 謎 +- 跚 +- 雯 +- 餼 +- 佇 +- 妆 +- 撿 +- 竄 +- 豎 +- 뤼 +- ఔ +- ៥ +- 捌 +- 澹 +- 皋 +- 렜 +- 쇳 +- 숀 +- Ŋ +- Ѕ +- Ъ +- ॊ +- ፥ +- ៤ +- → +- 刮 +- 吨 +- 咚 +- 咱 +- 唬 +- 嗚 +- 嚐 +- 奂 +- 孥 +- 屐 +- 巩 +- 弋 +- 怜 +- 恼 +- 憫 +- 懞 +- 扳 +- 拃 +- 拈 +- 捏 +- 掗 +- 掸 +- 摅 +- 擱 +- 柘 +- 槻 +- 氦 +- 沱 +- 洶 +- 涕 +- 淥 +- 湃 +- 溧 +- 滄 +- 牀 +- 牦 +- 牻 +- 玘 +- 玟 +- 琰 +- 璽 +- 瓒 +- 畋 +- 痤 +- 瘡 +- 盯 +- 硎 +- 禀 +- 穫 +- 窜 +- 篠 +- 繋 +- 舫 +- 蓑 +- 藿 +- 蚺 +- 諦 +- 貲 +- 赍 +- 踱 +- 踴 +- 踹 +- 蹲 +- 躇 +- 躊 +- 轍 +- 遼 +- 遽 +- 铕 +- 锶 +- 阆 +- 頰 +- 骂 +- 骊 +- 鮪 +- 鰈 +- 鰜 +- 鹳 +- 黍 +- 겡 +- 깟 +- 끍 +- 넋 +- 넵 +- 뉠 +- 뉩 +- 늉 +- 뎠 +- 뒨 +- 땁 +- 띤 +- 뭄 +- 쌘 +- 쑨 +- 엡 +- 쳔 +- 켐 +- 콱 +- 큔 +- 텄 +- 펩 +- 퓸 +- 핌 +- 홧 +- 휑 +- Ψ +- ሼ +- 劖 +- 喐 +- 拄 +- 枣 +- 瀏 +- 牯 +- 狹 +- 砷 +- 莒 +- 蕙 +- 딕 +- 폄 +- Ğ +- ḅ +- 駱 +- 俐 +- 僑 +- 婢 +- 澍 +- 絆 +- 繕 +- 薏 +- 诃 +- 遢 +- 邺 +- 閑 +- 飢 +- 饵 +- 鵲 +- 藕 +- 咿 +- 噎 +- 孰 +- 敕 +- 栢 +- 櫸 +- 涪 +- 澱 +- 甾 +- 羰 +- 蹴 +- 顰 +- 桢 +- 佤 +- 嗶 +- 奨 +- 嫉 +- 櫚 +- 瀕 +- 砻 +- 稈 +- 蜕 +- 閏 +- 傩 +- 喹 +- 昙 +- 棣 +- 淝 +- 玖 +- 誊 +- 镕 +- 阊 +- 隴 +- 馮 +- 哌 +- 炔 +- 肓 +- 諺 +- 賜 +- 邋 +- 閲 +- 饌 +- 娛 +- 蟒 +- 嚷 +- 剰 +- 寢 +- 捶 +- 斎 +- 犸 +- 竭 +- 铋 +- 锆 +- 鯨 +- 澀 +- 拧 +- 灏 +- 蟥 +- 颚 +- 锗 +- 砾 +- 钮 +- 譴 +- 疇 +- 铤 +- 廻 +- 邰 +- 噄 +- 捻 +- 藹 +- 踵 +- 鉢 +- 铢 +- 塀 +- 鐙 +- 蒴 +- 㪐 +- 吱 +- 圪 +- 掅 +- 矲 +- 禿 +- 脍 +- 蛻 +- 蜈 +- 蠹 +- 豇 +- 龛 +- 淵 +- 爿 +- 胿 +- 蚣 +- 㩿 +- 卞 +- 叱 +- 哣 +- 嵐 +- 撼 +- 昕 +- 殴 +- 沦 +- 滂 +- 瑆 +- 糋 +- 荥 +- 萋 +- 譬 +- 賒 +- 飨 +- Ÿ +- ḫ +- 嗅 +- 孜 +- 摷 +- 澧 +- 筍 +- 箇 +- 荘 +- 讎 +- 趷 +- 퓰 +- ሿ +- 잿 +- 뚠 +- 뺌 +- 쌉 +- 쌨 +- ˮ +- ̧ +- Ӯ +- Ղ +- ಝ +- ዡ +- ጵ +- ፁ +- ṅ +- ⁠ +- √ +- ♫ +- ♭ +- 䰧 +- 佚 +- 俎 +- 俠 +- 厴 +- 吭 +- 呗 +- 唥 +- 唪 +- 喽 +- 嗒 +- 嗷 +- 囑 +- 坭 +- 垓 +- 奓 +- 岘 +- 崆 +- 巉 +- 幟 +- 彫 +- 徭 +- 悒 +- 惋 +- 惮 +- 愜 +- 慚 +- 憚 +- 戯 +- 掕 +- 揞 +- 摻 +- 擳 +- 斃 +- 旌 +- 昊 +- 昺 +- 椤 +- 楦 +- 榛 +- 樵 +- 橞 +- 欉 +- 歙 +- 歛 +- 泾 +- 濞 +- 炬 +- 犊 +- 犷 +- 狒 +- 獠 +- 珩 +- 琚 +- 畲 +- 瘁 +- 瘌 +- 皈 +- 皓 +- 皚 +- 眯 +- 睄 +- 睬 +- 瞅 +- 硞 +- 碉 +- 祆 +- 綾 +- 肨 +- 芎 +- 苴 +- 茯 +- 蒡 +- 蓁 +- 蔊 +- 薷 +- 蘸 +- 虓 +- 蛀 +- 蜇 +- 蜍 +- 蟀 +- 蟋 +- 蠶 +- 訐 +- 詡 +- 詬 +- 誨 +- 譖 +- 谚 +- 貶 +- 賄 +- 蹌 +- 蹣 +- 蹺 +- 躼 +- 輟 +- 迾 +- 郃 +- 錬 +- 钒 +- 铷 +- 锚 +- 锹 +- 镥 +- 隕 +- 靭 +- 韻 +- 頷 +- 頹 +- 餮 +- 饅 +- 馩 +- 鳚 +- 鳶 +- 鸽 +- 黾 +- 鼬 +- 겋 +- 괭 +- 궂 +- 꽌 +- 꽥 +- 넝 +- 뇸 +- 덨 +- 땟 +- 떫 +- 떽 +- 뚸 +- 렝 +- 밈 +- 뻬 +- 뽜 +- 삣 +- 쉈 +- 쌜 +- 썜 +- 쐐 +- 쓕 +- 씰 +- 읒 +- 읜 +- 잽 +- 젱 +- 죗 +- 짊 +- 쮸 +- 쯘 +- 쯩 +- 칡 +- 캤 +- 켤 +- 쿱 +- 퀀 +- 탯 +- 퉤 +- 팼 +- 헙 +- 횔 +- 힝 +- 荀 +- 룟 +- 휸 +- 佟 +- 坷 +- 塱 +- 壢 +- 扈 +- 抾 +- 泱 +- 琵 +- 谛 +- 鏊 +- 鑿 +- 钕 +- 顎 +- 駒 +- 倘 +- 箴 +- ଐ +- 嗦 +- 嶋 +- 搁 +- 曙 +- 洎 +- 煜 +- 牘 +- 禱 +- 缪 +- 軚 +- 镐 +- 闰 +- 陲 +- 雒 +- 颊 +- 鸨 +- 麺 +- 黚 +- 黜 +- 宕 +- 挛 +- 摯 +- 沅 +- 謬 +- 錆 +- 颤 +- 暢 +- 冼 +- 勖 +- 勺 +- 啅 +- 孱 +- 幂 +- 斝 +- 橼 +- 珥 +- 踉 +- 鎭 +- 钯 +- 镆 +- 顼 +- 餡 +- 馭 +- 鲽 +- 鲿 +- 鹋 +- 鹩 +- 玲 +- 磊 +- 铂 +- 闫 +- 坍 +- 壕 +- 沤 +- 狡 +- 繊 +- 轔 +- 韋 +- 枷 +- 肼 +- 拮 +- 氡 +- 籐 +- 臥 +- 謾 +- 跷 +- 馔 +- 馗 +- 焕 +- 揽 +- 瘓 +- 蜣 +- 贅 +- 瘠 +- 沌 +- 걔 +- 餌 +- 嵴 +- 浄 +- 炱 +- 倖 +- 臆 +- 欒 +- 烨 +- 纥 +- 胭 +- 趖 +- 郛 +- 钳 +- 骹 +- 仞 +- 搾 +- 擂 +- 棧 +- 楣 +- 殻 +- 瘿 +- 簫 +- 裳 +- 霓 +- 昐 +- 栈 +- 蛍 +- 觚 +- 逵 +- 髻 +- 夔 +- 溥 +- 滲 +- 臍 +- 釵 +- 鈿 +- 閥 +- 鲾 +- 鹊 +- 唂 +- 寃 +- 戥 +- 烊 +- 碇 +- 稟 +- 缇 +- ៨ +- 匐 +- 奚 +- 懊 +- 曷 +- 楂 +- 螨 +- 趺 +- 镍 +- 펨 +- ± +- ̋ +- ໊ +- ṯ +- 嗯 +- 넒 +- 뙤 +- 뤠 +- ኸ +- ዪ +- ጐ +- ጿ +- ៩ +- ṉ +- ∆ +- ● +- 䆀 +- 佮 +- 佻 +- 俅 +- 儈 +- 剡 +- 劏 +- 厍 +- 咦 +- 咹 +- 啓 +- 喼 +- 嘸 +- 嚢 +- 圑 +- 塭 +- 妬 +- 庹 +- 弍 +- 徇 +- 忿 +- 悚 +- 惇 +- 戕 +- 戽 +- 拙 +- 拭 +- 捹 +- 掞 +- 掟 +- 掳 +- 摙 +- 柢 +- 桩 +- 桮 +- 梱 +- 梼 +- 榷 +- 樁 +- 橐 +- 殯 +- 毡 +- 氚 +- 汲 +- 涨 +- 淞 +- 淸 +- 渌 +- 溼 +- 煨 +- 熒 +- 玮 +- 璁 +- 璈 +- 疚 +- 疟 +- 瞑 +- 砒 +- 硃 +- 禡 +- 禦 +- 穌 +- 窠 +- 筜 +- 筼 +- 箓 +- 篳 +- 聼 +- 舢 +- 舨 +- 茔 +- 莴 +- 莽 +- 萺 +- 蓠 +- 蓺 +- 蔫 +- 虬 +- 蛏 +- 蛞 +- 蜢 +- 蝓 +- 螣 +- 袈 +- 裟 +- 諫 +- 诠 +- 谶 +- 賰 +- 趌 +- 轳 +- 辘 +- 酝 +- 鈣 +- 鉗 +- 鉸 +- 銭 +- 鍚 +- 鐡 +- 鐸 +- 顱 +- 飜 +- 饑 +- 馒 +- 駟 +- 鮟 +- 鯪 +- 鱇 +- 鳔 +- 鸾 +- 鹪 +- 굥 +- 귐 +- 꾜 +- 낏 +- 녈 +- 놥 +- 뉸 +- 댜 +- 떈 +- 떰 +- 롄 +- 맀 +- 먀 +- 멏 +- 멨 +- 몀 +- 뭍 +- 밌 +- 뱄 +- 뵤 +- 뻠 +- 숩 +- 쌂 +- 쑹 +- 씅 +- 앝 +- 옙 +- 좃 +- 줴 +- 짔 +- 쩠 +- 쬬 +- 칬 +- 켸 +- 쿰 +- 탰 +- 텟 +- 퇸 +- 팹 +- 폔 +- 퐈 +- 푤 +- 훙 +- 휀 +- 힉 +- 떔 +- ޥ +- † +- 罨 +- 혠 +- Ư +- 冉 +- 恽 +- 擷 +- 昀 +- 桟 +- 泗 +- 泸 +- 璐 +- 盃 +- 讶 +- 锂 +- 鬢 +- 뾱 +- ּ +- ऍ +- ഢ +- ኺ +- 傃 +- 嚥 +- 芰 +- 蚝 +- 龢 +- 俣 +- 儍 +- 兖 +- 厩 +- 吋 +- 垄 +- 娯 +- 娼 +- 挞 +- 敞 +- 熹 +- 珒 +- 笹 +- 罔 +- 翎 +- 膦 +- 苳 +- 虜 +- 账 +- 钵 +- 俩 +- 儂 +- 劊 +- 呜 +- 庠 +- 挾 +- 斕 +- 梣 +- 櫈 +- 瀟 +- 癆 +- 碩 +- 绾 +- 荏 +- 褦 +- 諷 +- 鑠 +- 钞 +- 铯 +- 隹 +- 騅 +- 骢 +- 鲀 +- 倭 +- 傀 +- 儡 +- 巽 +- 捆 +- 柠 +- 汴 +- 縞 +- 萘 +- 蛎 +- 錠 +- 颉 +- 帷 +- 疃 +- 嗟 +- 洄 +- 灿 +- 猾 +- 硖 +- 祜 +- 紊 +- 衢 +- 诀 +- 躯 +- 醍 +- 醐 +- 鼾 +- 镫 +- 晁 +- 郸 +- 啻 +- 邯 +- 枠 +- 橢 +- 淌 +- 湮 +- 瘍 +- 襞 +- 髣 +- 綻 +- 佯 +- 唛 +- 嘿 +- 壌 +- 徨 +- 氙 +- 漪 +- 硼 +- 礫 +- 綢 +- 蒞 +- 蛰 +- 辇 +- 铳 +- 鰐 +- 擤 +- 猗 +- 姣 +- 孽 +- 搦 +- 杮 +- 枥 +- 柰 +- 棖 +- 煖 +- 箋 +- 綵 +- 胍 +- 苁 +- 苧 +- 茑 +- 莩 +- 薔 +- 衬 +- 衲 +- 諜 +- 賂 +- 邃 +- 铆 +- 隈 +- 甕 +- ኰ +- ⋯ +- 傖 +- 兎 +- 凊 +- 嗝 +- 坩 +- 塙 +- 櫥 +- 沓 +- 痹 +- 糉 +- 莸 +- 薟 +- 蠕 +- 踅 +- 鉎 +- 鲫 +- 麭 +- ǫ +- Ѹ +- Ր +- ḏ +- ♯ +- 剾 +- 甌 +- 絚 +- 跩 +- 蹬 +- 빻 +- 쾰 +- 퀭 +- ḷ +- 됴 +- 틔 +- "\x93" +- "\x94" +- Ð +- Ę +- Ʋ +- ǎ +- Ǹ +- ȃ +- ȏ +- Ʉ +- ̌ +- ̱ +- ۛ +- ॄ +- ॔ +- ୧ +- ቿ +- ኟ +- ዒ +- ዖ +- ጳ +- ጴ +- ጺ +- ឮ +- ḩ +- Ḫ +- ṇ +- ↔ +- ≥ +- ⻣ +- 䱽 +- 亩 +- 亳 +- 佘 +- 俟 +- 俦 +- 僻 +- 冴 +- 剎 +- 匆 +- 叟 +- 呉 +- 呦 +- 呷 +- 唈 +- 啵 +- 喩 +- 喰 +- 嘰 +- 塬 +- 壱 +- 壸 +- 妊 +- 婊 +- 婕 +- 婵 +- 岀 +- 嶙 +- 巔 +- 巿 +- 廪 +- 弸 +- 恺 +- 悗 +- 悸 +- 戾 +- 扻 +- 抆 +- 揳 +- 搓 +- 撬 +- 斡 +- 昃 +- 曱 +- 栻 +- 椋 +- 楋 +- 橿 +- 檜 +- 歕 +- 殄 +- 沥 +- 浚 +- 淤 +- 淩 +- 渇 +- 濘 +- 烚 +- 焘 +- 燜 +- 燧 +- 猁 +- 猄 +- 猖 +- 猞 +- 獗 +- 珐 +- 琨 +- 瑋 +- 甴 +- 疍 +- 痣 +- 痧 +- 癀 +- 眵 +- 眸 +- 瞎 +- 瞪 +- 砥 +- 礑 +- 祢 +- 禰 +- 稞 +- 箩 +- 簍 +- 糍 +- 糜 +- 紡 +- 緲 +- 縹 +- 繄 +- 罠 +- 羣 +- 聿 +- 脛 +- 脯 +- 艋 +- 芜 +- 苎 +- 荇 +- 菴 +- 萦 +- 薮 +- 蜷 +- 蠣 +- 褸 +- 訛 +- 詮 +- 諧 +- 譎 +- 谒 +- 貘 +- 贛 +- 贮 +- 躋 +- 郯 +- 鄄 +- 鄱 +- 酣 +- 醪 +- 醺 +- 鎌 +- 铪 +- 锣 +- 闔 +- 闳 +- 霾 +- 靳 +- 鞄 +- 鞅 +- 餋 +- 馏 +- 馥 +- 驥 +- 骧 +- 髴 +- 鯇 +- 鱗 +- 鲲 +- 鲻 +- 鳎 +- 鴿 +- 鷄 +- 鷯 +- 鸝 +- 鼙 +- 龠 +- 갉 +- 갼 +- 겊 +- 겝 +- 곘 +- 껸 +- 꼄 +- 꼳 +- 끅 +- 냘 +- 넚 +- 놘 +- 눟 +- 뉜 +- 늣 +- 댤 +- 덫 +- 뎄 +- 됙 +- 듈 +- 땍 +- 뜀 +- 롹 +- 륑 +- 맜 +- 멎 +- 뭠 +- 뭡 +- 뭥 +- 뮴 +- 밎 +- 밨 +- 뱁 +- 뷴 +- 빘 +- 뻰 +- 뿟 +- 뿡 +- 삻 +- 샅 +- 샨 +- 셤 +- 셩 +- 셴 +- 솎 +- 쉣 +- 쉿 +- 슌 +- 슾 +- 썻 +- 쎅 +- 쎔 +- 쏵 +- 쏸 +- 쒯 +- 앎 +- 얉 +- 얐 +- 엤 +- 옜 +- 옫 +- 읃 +- 졍 +- 좍 +- 좐 +- 죵 +- 줜 +- 쥔 +- 쥘 +- 짆 +- 짯 +- 쨀 +- 쨉 +- 쩧 +- 쭙 +- 챂 +- 촙 +- 췻 +- 츨 +- 칟 +- 켕 +- 큄 +- 큣 +- 퉜 +- 핟 +- 햑 +- 헴 +- 헸 +- 힛 +- ῖ +- 땃 +- 띔 +- 읎 +- 킁 +- "\x96" +- ǰ +- ഋ +- 咄 +- 媠 +- 漕 +- 潦 +- 舺 +- 阇 +- 굣 +- 뭅 +- 푀 +- ʙ +- 뱌 +- 䟴 +- 伶 +- 嚮 +- 揇 +- 敛 +- 椴 +- 槓 +- 殆 +- 犍 +- 瑄 +- 眩 +- 碁 +- 耙 +- 膣 +- 蔺 +- 诰 +- 赃 +- 赈 +- 赉 +- 踮 +- 钼 +- 鸯 +- 鹬 +- 尪 +- 倯 +- 匀 +- 匮 +- 吡 +- 啍 +- 啕 +- 啧 +- 啾 +- 嗄 +- 噛 +- 嚎 +- 妤 +- 娠 +- 媾 +- 嫣 +- 嵋 +- 彧 +- 悽 +- 拶 +- 掬 +- 摳 +- 摹 +- 擏 +- 旳 +- 枹 +- 柩 +- 汞 +- 獺 +- 珰 +- 珲 +- 瑳 +- 璉 +- 畝 +- 痉 +- 稜 +- 篋 +- 缮 +- 耦 +- 芃 +- 虻 +- 貞 +- 赂 +- 蹋 +- 酐 +- 鋤 +- 鏢 +- 鏰 +- 钐 +- 陝 +- 鹫 +- 坨 +- 覃 +- 〇 +- 傉 +- 哑 +- 嗓 +- 屹 +- 崭 +- 涞 +- 痪 +- 瘫 +- 粛 +- 縊 +- 苛 +- 蜉 +- 蜒 +- 蜿 +- 诡 +- 遡 +- 鵜 +- 齧 +- 虢 +- 颧 +- 咥 +- 杓 +- 桡 +- 簔 +- 綽 +- 飪 +- 诅 +- 딨 +- ၎ +- 덟 +- 얻 +- 젊 +- 옮 +- ٘ +- ଙ +- 꽤 +- 짧 +- Ḍ +- 훨 +- Ǧ +- 굉 +- 걍 +- 첫 +- 굳 +- Ẓ +- 浊 +- 聡 +- 荁 +- 뭘 +- ໋ +- 傕 +- 撸 +- 汜 +- 洙 +- 疽 +- 瘧 +- 癩 +- 纮 +- 蝥 +- 觐 +- 讃 +- 豔 +- ⿸ +- 疒 +- 硩 +- 伛 +- 姝 +- 庖 +- 徛 +- 攑 +- 枭 +- 烫 +- 睁 +- 矯 +- 竈 +- 绽 +- 翃 +- 聶 +- 胼 +- 膻 +- 芍 +- 荪 +- 蛸 +- 螟 +- 蠓 +- 谯 +- 跻 +- 蹒 +- 軋 +- 郏 +- 錘 +- 闍 +- 阖 +- 髎 +- 鮎 +- 鸲 +- 괜 +- 鱟 +- ṁ +- 䒏 +- 刣 +- 勍 +- 匏 +- 咾 +- 哆 +- 嚾 +- 囥 +- 墘 +- 忤 +- 怛 +- 扲 +- 扴 +- 攥 +- 昉 +- 栃 +- 栲 +- 栳 +- 榫 +- 欑 +- 氘 +- 烰 +- 牁 +- 牂 +- 瑀 +- 甪 +- 畷 +- 疕 +- 疶 +- 筅 +- 緁 +- 緹 +- 羶 +- 肟 +- 蝰 +- 袓 +- 誑 +- 诲 +- 谪 +- 迌 +- 郴 +- 郾 +- 铽 +- 镉 +- 镱 +- 颞 +- 髡 +- 鵠 +- 鼷 +- Ù +- ၤ +- ≡ +- ≪ +- 尕 +- 敧 +- 敨 +- 桅 +- 檨 +- 狛 +- 痀 +- 睑 +- 鑢 +- 閻 +- 骯 +- Ɔ +- Ӌ +- Ӷ +- Ầ +- ∨ +- Ⱅ +- ゔ +- 㓁 +- 䖙 +- 毵 +- 낋 +- 녘 +- 벘 +- 봣 +- 졉 +- "\x8D" +- "\x92" +- Ï +- Ơ +- ǃ +- ǐ +- ɑ +- ʔ +- ʹ +- ̐ +- ̟ +- ̠ +- ̨ +- ̪ +- ̲ +- Ѐ +- Ђ +- Ѓ +- ַ +- ٓ +- ٖ +- ٴ +- ۘ +- ऌ +- ऎ +- ऩ +- ॆ +- ઼ +- ଊ +- ୄ +- ୨ +- ୪ +- ୫ +- ୬ +- ୭ +- ୮ +- ୯ +- ఋ +- ་ +- ན +- ሓ +- ቓ +- ቚ +- ኜ +- ኻ +- ጇ +- ፗ +- ፦ +- ឦ +- ឫ +- ឳ +- ៎ +- ḡ +- ṟ +- Ẩ +- Ố +- Ὑ +- ₽ +- ∞ +- ∼ +- ≫ +- ⋅ +- ◦ +- ◯ +- ♂ +- ✅ +- ✔ +- ⟨ +- ⟩ +- Ⱎ +- 㧎 +- 䒐 +- 䴘 +- 䴙 +- 仄 +- 仝 +- 仱 +- 伉 +- 侖 +- 侷 +- 俍 +- 倣 +- 倻 +- 偌 +- 傭 +- 僫 +- 儆 +- 儑 +- 儕 +- 儬 +- 冑 +- 刍 +- 匾 +- 呎 +- 唌 +- 唎 +- 唏 +- 唖 +- 啋 +- 啪 +- 啮 +- 喾 +- 嗙 +- 噂 +- 嚓 +- 嚡 +- 囤 +- 囪 +- 坜 +- 坵 +- 埈 +- 埞 +- 埲 +- 埽 +- 堑 +- 墀 +- 壅 +- 壙 +- 奣 +- 妫 +- 娆 +- 嬴 +- 嬷 +- 尣 +- 尨 +- 屄 +- 屢 +- 岂 +- 岈 +- 岫 +- 峄 +- 峇 +- 嵖 +- 嵗 +- 嵯 +- 巯 +- 幃 +- 幄 +- 弔 +- 弢 +- 彊 +- 彥 +- 怙 +- 恂 +- 恣 +- 悌 +- 惚 +- 惬 +- 愷 +- 憍 +- 懍 +- 扂 +- 扽 +- 抌 +- 抦 +- 拑 +- 挹 +- 捅 +- 掻 +- 掾 +- 揆 +- 搐 +- 搡 +- 搤 +- 搩 +- 摃 +- 摑 +- 摖 +- 摵 +- 撨 +- 擛 +- 攕 +- 攘 +- 斂 +- 旆 +- 旮 +- 旯 +- 晷 +- 杙 +- 桄 +- 梆 +- 棨 +- 椏 +- 楗 +- 槱 +- 樫 +- 樾 +- 橈 +- 殮 +- 氈 +- 氖 +- 泅 +- 泠 +- 泯 +- 泷 +- 洘 +- 浉 +- 涓 +- 涝 +- 湫 +- 溅 +- 溋 +- 漉 +- 澈 +- 瀼 +- 灸 +- 烺 +- 焐 +- 熥 +- 熾 +- 牕 +- 犅 +- 玆 +- 玠 +- 玹 +- 珞 +- 珺 +- 璵 +- 瓘 +- 瓛 +- 瓮 +- 疡 +- 痟 +- 痼 +- 盱 +- 眙 +- 睞 +- 睢 +- 瞽 +- 矸 +- 矽 +- 砧 +- 磕 +- 磲 +- 秅 +- 穢 +- 窈 +- 窍 +- 窕 +- 窘 +- 笆 +- 筏 +- 筮 +- 筵 +- 箸 +- 簸 +- 籴 +- 粕 +- 粧 +- 粱 +- 絮 +- 綖 +- 繃 +- 繆 +- 纶 +- 纻 +- 绚 +- 缎 +- 缭 +- 缵 +- 罒 +- 罓 +- 翕 +- 翥 +- 聾 +- 腻 +- 膑 +- 膵 +- 苡 +- 茭 +- 荖 +- 莨 +- 菖 +- 菪 +- 萁 +- 葺 +- 蒟 +- 蓍 +- 蔑 +- 蕲 +- 薰 +- 藔 +- 藳 +- 虯 +- 蛉 +- 蛲 +- 蟆 +- 蟯 +- 衮 +- 袒 +- 袴 +- 覇 +- 觅 +- 觴 +- 詆 +- 詛 +- 諡 +- 謔 +- 讐 +- 讥 +- 讪 +- 讫 +- 诔 +- 诙 +- 诟 +- 诧 +- 诩 +- 诹 +- 谌 +- 谠 +- 赘 +- 赟 +- 赡 +- 赭 +- 趒 +- 跙 +- 蹆 +- 蹶 +- 軾 +- 輊 +- 輒 +- 輷 +- 轆 +- 轶 +- 轼 +- 辊 +- 辶 +- 迍 +- 邛 +- 邬 +- 酃 +- 醴 +- 鋐 +- 鎊 +- 鎧 +- 钲 +- 铖 +- 镛 +- 镠 +- 镡 +- 镤 +- 镳 +- 闱 +- 闷 +- 闾 +- 阉 +- 隗 +- 雺 +- 韬 +- 頤 +- 飧 +- 饯 +- 騃 +- 驛 +- 驺 +- 骄 +- 骘 +- 骷 +- 髅 +- 鮓 +- 鮨 +- 鰆 +- 鱉 +- 鲎 +- 鲟 +- 鲴 +- 鷲 +- 鸚 +- 鸳 +- 麂 +- 鼢 +- Ꝑ +- 갗 +- 걈 +- 걧 +- 겻 +- 괍 +- 괏 +- 귯 +- 깞 +- 깸 +- 꺅 +- 꺤 +- 껨 +- 꼇 +- 꽐 +- 꽜 +- 꽨 +- 꽹 +- 꾿 +- 꿑 +- 꿘 +- 꿜 +- 뀜 +- 뀰 +- 낢 +- 냣 +- 뉼 +- 늗 +- 닜 +- 닪 +- 닯 +- 닸 +- 댇 +- 뎡 +- 돚 +- 돠 +- 됀 +- 됑 +- 됫 +- 딿 +- 떌 +- 떱 +- 뗌 +- 뗏 +- 똡 +- 뙈 +- 뛌 +- 뛘 +- 뜷 +- 뜽 +- 띈 +- 띌 +- 랒 +- 랟 +- 랮 +- 럐 +- 럤 +- 렿 +- 뢴 +- 룃 +- 뤤 +- 릈 +- 맫 +- 맽 +- 먁 +- 먄 +- 먕 +- 먙 +- 뫠 +- 뫼 +- 묀 +- 묜 +- 뭑 +- 뭬 +- 믁 +- 믕 +- 밷 +- 뱍 +- 뱡 +- 벋 +- 벛 +- 벰 +- 벱 +- 벵 +- 볌 +- 볏 +- 볻 +- 봈 +- 봘 +- 봠 +- 봡 +- 봥 +- 붇 +- 뷘 +- 뷜 +- 뷸 +- 빱 +- 빶 +- 빾 +- 뺘 +- 뺸 +- 뼉 +- 뼜 +- 뽝 +- 뿝 +- 쁩 +- 삯 +- 샆 +- 셕 +- 셥 +- 셸 +- 솝 +- 쉪 +- 슦 +- 싰 +- 싷 +- 썃 +- 쎠 +- 쎤 +- 쎼 +- 쏱 +- 쐴 +- 쒀 +- 쒸 +- 쒹 +- 씃 +- 앏 +- 앜 +- 앟 +- 앺 +- 얖 +- 얠 +- 얬 +- 얾 +- 엏 +- 옂 +- 옉 +- 옘 +- 옭 +- 옼 +- 왐 +- 왝 +- 왬 +- 왯 +- 왱 +- 욌 +- 욧 +- 웝 +- 웟 +- 웡 +- 읓 +- 읗 +- 읭 +- 읺 +- 잍 +- 잛 +- 쟝 +- 쟬 +- 젔 +- 젭 +- 젰 +- 젹 +- 졀 +- 졔 +- 좇 +- 죈 +- 죔 +- 죤 +- 쥣 +- 쥰 +- 쩄 +- 쩟 +- 쩬 +- Ứ +- ኽ +- 놧 +- 뇰 +- 맇 +- 묙 +- 쁸 +- 쌕 +- ¤ +- ՚ +- ః +- ☆ +- 傈 +- 僳 +- 噤 +- 崑 +- 懣 +- 摠 +- 曽 +- 穂 +- 酺 +- 鉀 +- 麶 +- 黗 +- 됭 +- 싧 +- 쌧 +- ฃ +- 傱 +- 匍 +- 唁 +- 噱 +- 堃 +- 堯 +- 壠 +- 奅 +- 嫖 +- 峥 +- 崞 +- 嵬 +- 巳 +- 忝 +- 恁 +- 愴 +- 揤 +- 攣 +- 晦 +- 暸 +- 杻 +- 楮 +- 樨 +- 橂 +- 欹 +- 殒 +- 殡 +- 沬 +- 涤 +- 湟 +- 漯 +- 澶 +- 熁 +- 狯 +- 獴 +- 玑 +- 玜 +- 玳 +- 瓠 +- 痠 +- 痫 +- 癫 +- 砦 +- 硨 +- 簁 +- 糶 +- 緯 +- 缜 +- 胛 +- 脣 +- 腑 +- 芫 +- 茌 +- 荦 +- 蚩 +- 蜚 +- 蜞 +- 螋 +- 蠋 +- 裇 +- 裎 +- 褧 +- 覕 +- 詣 +- 誣 +- 酂 +- 钫 +- 钿 +- 铌 +- 阏 +- 餉 +- 馀 +- 駈 +- 髭 +- 鼯 +- 䴕 +- 伫 +- 侗 +- 俹 +- 偲 +- 勲 +- 呕 +- 嚏 +- 圉 +- 垠 +- 埤 +- 岜 +- 崮 +- 廆 +- 愃 +- 慄 +- 掴 +- 攒 +- 昵 +- 曦 +- 栀 +- 桁 +- 槌 +- 槛 +- 殚 +- 浞 +- 淯 +- 潷 +- 濮 +- 瀍 +- 燹 +- 猝 +- 獰 +- 瑮 +- 疱 +- 瞿 +- 筈 +- 羡 +- 耻 +- 胄 +- 胳 +- 蓀 +- 裬 +- 觑 +- 諲 +- 謖 +- 讹 +- 诣 +- 赓 +- 輘 +- 辕 +- 辫 +- 逡 +- 遑 +- 釁 +- 鈞 +- 鈾 +- 铟 +- 铮 +- 锔 +- 镎 +- 镗 +- 闕 +- 闼 +- 阑 +- 霰 +- 靛 +- 颔 +- 驀 +- 驪 +- 鰓 +- 鶇 +- 떻 +- 娣 +- 暎 +- 滦 +- 醮 +- 俵 +- 剽 +- 圹 +- 奘 +- 孛 +- 揷 +- 搅 +- 搥 +- 潢 +- 焮 +- 燐 +- 獭 +- 笕 +- 絹 +- 翽 +- 臯 +- 蛊 +- 贄 +- 醯 +- 饴 +- 惺 +- 桝 +- 淅 +- 蝋 +- 馊 +- 뭔 +- 膾 +- 钪 +- 陛 +- ํ +- 蔦 +- +- +- +init: null +input_size: null +ctc_conf: + dropout_rate: 0.0 + ctc_type: builtin + reduce: true + ignore_nan_grad: null + zero_infinity: true + brctc_risk_strategy: exp + brctc_group_strategy: end + brctc_risk_factor: 0.0 +use_preprocessor: true +token_type: bpe +bpemodel: data/token_list/bpe_unigram50000/bpe.model +non_linguistic_symbols: null +cleaner: null +g2p: null +speech_volume_normalize: null +rir_scp: null +rir_apply_prob: 1.0 +noise_scp: null +noise_apply_prob: 1.0 +noise_db_range: '13_15' +short_noise_thres: 0.5 +frontend: default +frontend_conf: + n_fft: 512 + win_length: 400 + hop_length: 160 + fs: 16k +specaug: specaug +specaug_conf: + apply_time_warp: false + time_warp_window: 5 + time_warp_mode: bicubic + apply_freq_mask: true + freq_mask_width_range: + - 0 + - 27 + num_freq_mask: 2 + apply_time_mask: true + time_mask_width_ratio_range: + - 0.0 + - 0.05 + num_time_mask: 4 +normalize: global_mvn +normalize_conf: + stats_file: exp/s2t_stats_raw_bpe50000/train/feats_stats.npz +model: espnet +model_conf: + ctc_weight: 0.3 + lsm_weight: 0.1 + length_normalized_loss: false + sym_na: +preencoder: null +preencoder_conf: {} +encoder: e_branchformer +encoder_conf: + output_size: 768 + attention_heads: 12 + attention_layer_type: selfattn + pos_enc_layer_type: abs_pos + rel_pos_type: latest + attention_qk_norm: false + use_flash_attn: true + cgmlp_linear_units: 3072 + cgmlp_conv_kernel: 31 + use_linear_after_conv: false + gate_activation: identity + num_blocks: 9 + dropout_rate: 0.1 + positional_dropout_rate: 0.1 + attention_dropout_rate: 0.1 + input_layer: conv2d + layer_drop_rate: 0.0 + linear_units: 3072 + positionwise_layer_type: linear + use_ffn: true + macaron_ffn: true + merge_conv_kernel: 31 +postencoder: null +postencoder_conf: {} +decoder: transformer +decoder_conf: + attention_heads: 12 + linear_units: 3072 + num_blocks: 9 + dropout_rate: 0.1 + positional_dropout_rate: 0.1 + self_attention_dropout_rate: 0.1 + src_attention_dropout_rate: 0.1 + self_attention_qk_norm: false + src_attention_qk_norm: false + self_attention_use_flash_attn: true + src_attention_use_flash_attn: true +preprocessor: s2t +preprocessor_conf: + text_prev_name: text_prev + text_ctc_name: text_ctc + fs: 16000 + na_symbol: + speech_length: 30 + speech_resolution: 0.02 + speech_init_silence: 30 + text_prev_apply_prob: 0.5 + time_apply_prob: 0.5 + notime_symbol: + first_time_symbol: <0.00> + last_time_symbol: <30.00> +required: +- output_dir +- token_list +version: '202310' +distributed: true diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/acc.png b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/acc.png new file mode 100644 index 0000000000000000000000000000000000000000..8853c5f3ff99a599ed85ea77fbeabef96e34a551 Binary files /dev/null and b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/acc.png differ diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/backward_time.png b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/backward_time.png new file mode 100644 index 0000000000000000000000000000000000000000..e37bd66df95d0642062db825c4361a51d9b3b046 Binary files /dev/null and b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/backward_time.png differ diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/cer.png b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/cer.png new file mode 100644 index 0000000000000000000000000000000000000000..b6b20af54573edce4fde3315e348c723226c82c9 Binary files /dev/null and b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/cer.png differ diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/cer_ctc.png b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/cer_ctc.png new file mode 100644 index 0000000000000000000000000000000000000000..5feb6b1f23f7925f3fb2cad32a9dc0cc44f230a2 Binary files /dev/null and b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/cer_ctc.png differ diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/clip.png b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/clip.png new file mode 100644 index 0000000000000000000000000000000000000000..a9b24f6cc646381cca9c7c86f6e9caa93d392363 Binary files /dev/null and b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/clip.png differ diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/forward_time.png b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/forward_time.png new file mode 100644 index 0000000000000000000000000000000000000000..aee3ce0141d56c8625e4a5a487417641b75f36ba Binary files /dev/null and b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/forward_time.png differ diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/gpu_max_cached_mem_GB.png b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/gpu_max_cached_mem_GB.png new file mode 100644 index 0000000000000000000000000000000000000000..45a9711b1672380b1543bf0202f7ea8714549e4a Binary files /dev/null and b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/gpu_max_cached_mem_GB.png differ diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/grad_norm.png b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/grad_norm.png new file mode 100644 index 0000000000000000000000000000000000000000..00924f4379f744f4519768bc04aad1f374ca5c0f Binary files /dev/null and b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/grad_norm.png differ diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/iter_time.png b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/iter_time.png new file mode 100644 index 0000000000000000000000000000000000000000..ce713e68019e5dbf547d774cba411b466fa9f3af Binary files /dev/null and b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/iter_time.png differ diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/loss.png b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/loss.png new file mode 100644 index 0000000000000000000000000000000000000000..4aee7ca7fa6f29d69e6ff02f735d8105769a82bd Binary files /dev/null and b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/loss.png differ diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/loss_att.png b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/loss_att.png new file mode 100644 index 0000000000000000000000000000000000000000..42c14c5dc55d0d92845280b87a22849d6569a746 Binary files /dev/null and b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/loss_att.png differ diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/loss_ctc.png b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/loss_ctc.png new file mode 100644 index 0000000000000000000000000000000000000000..90e758a9b13dfb42158e79b3e5f37affd83e14aa Binary files /dev/null and b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/loss_ctc.png differ diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/loss_scale.png b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/loss_scale.png new file mode 100644 index 0000000000000000000000000000000000000000..2d48607292409d98942f790b4ca148f4136f3210 Binary files /dev/null and b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/loss_scale.png differ diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/optim0_lr0.png b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/optim0_lr0.png new file mode 100644 index 0000000000000000000000000000000000000000..69fb67fa2fc9a8188ac4291daf9b5ec1511d48c3 Binary files /dev/null and b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/optim0_lr0.png differ diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/optim_step_time.png b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/optim_step_time.png new file mode 100644 index 0000000000000000000000000000000000000000..bdbe8df71deb27b5ad9d3e3af6a1fd0625784aa0 Binary files /dev/null and b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/optim_step_time.png differ diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/train_time.png b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/train_time.png new file mode 100644 index 0000000000000000000000000000000000000000..f933bbd07194bb16c54fc478f67c9468bf784e0d Binary files /dev/null and b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/train_time.png differ diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/wer.png b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/wer.png new file mode 100644 index 0000000000000000000000000000000000000000..6cc79bc697417d3b0321e85def9640a11fe9d98c Binary files /dev/null and b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/images/wer.png differ diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/train.2.log b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/train.2.log new file mode 100644 index 0000000000000000000000000000000000000000..8f59a2d8e4a10e29cc5bc824a5528c23bbb9dab4 --- /dev/null +++ b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/train.2.log @@ -0,0 +1,1322 @@ +# Running on ip-10-0-194-127 +# Started at Mon Feb 26 17:43:20 UTC 2024 +# SLURMD_NODENAME=ip-10-0-194-127 +# SLURM_CLUSTER_NAME=external +# SLURM_CONF=/opt/slurm/etc/slurm.conf +# SLURM_CPUS_ON_NODE=80 +# SLURM_EXPORT_ENV=PATH +# SLURM_GET_USER_ENV=1 +# SLURM_GPUS_ON_NODE=8 +# SLURM_GPUS_PER_NODE=8 +# SLURM_GTIDS=0 +# SLURM_JOBID=81044 +# SLURM_JOB_ACCOUNT=wavlab +# SLURM_JOB_CPUS_PER_NODE='80(x2)' +# SLURM_JOB_END_TIME=1709574196 +# SLURM_JOB_GID=1879800513 +# SLURM_JOB_GPUS=0,1,2,3,4,5,6,7 +# SLURM_JOB_ID=81044 +# SLURM_JOB_NAME=exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/train.log +# SLURM_JOB_NODELIST=ip-10-0-194-127,ip-10-0-216-33 +# SLURM_JOB_NUM_NODES=2 +# SLURM_JOB_PARTITION=a40x +# SLURM_JOB_QOS=normal +# SLURM_JOB_START_TIME=1708969395 +# SLURM_JOB_UID=1879802134 +# SLURM_JOB_USER=pengyf +# SLURM_LOCALID=0 +# SLURM_MEM_PER_CPU=10000 +# SLURM_NNODES=2 +# SLURM_NODEID=0 +# SLURM_NODELIST=ip-10-0-194-127,ip-10-0-216-33 +# SLURM_NODE_ALIASES='(null)' +# SLURM_OPEN_MODE=a +# SLURM_PRIO_PROCESS=0 +# SLURM_PROCID=0 +# SLURM_SCRIPT_CONTEXT=prolog_task +# SLURM_SUBMIT_DIR=/weka/home-pengyf/espnet-owsm-train/egs2/owsm_v3.1_licensefree_nosa/s2t1 +# SLURM_SUBMIT_HOST=ip-172-17-22-214 +# SLURM_TASKS_PER_NODE='80(x2)' +# SLURM_TASK_PID=32642 +# SLURM_TOPOLOGY_ADDR=ip-10-0-194-127 +# SLURM_TOPOLOGY_ADDR_PATTERN=node +# SLURM_WORKING_CLUSTER=external:ip-172-17-55-206:6817:9984:109 +# srun --export=ALL python3 -m espnet2.bin.s2t_train --use_preprocessor true --bpemodel data/token_list/bpe_unigram50000/bpe.model --token_type bpe --token_list data/token_list/bpe_unigram50000/tokens.txt --non_linguistic_symbols none --cleaner none --g2p none --valid_data_path_and_name_and_type dump/raw/dev/wav.scp,speech,kaldi_ark --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/speech_shape --resume true --fold_length 80000 --output_dir exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000 --config conf/train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn.yaml --frontend_conf fs=16k --normalize=global_mvn --normalize_conf stats_file=exp/s2t_stats_raw_bpe50000/train/feats_stats.npz --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/wav.scp,speech,kaldi_ark --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/speech_shape --fold_length 150 --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/text.prev,text_prev,text --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/text_prev_shape.bpe --fold_length 150 --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/text.ctc,text_ctc,text --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/text_ctc_shape.bpe --fold_length 150 --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/text,text,text --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/text_shape.bpe --multiple_iterator true --valid_data_path_and_name_and_type dump/raw/dev/text.prev,text_prev,text --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/text_prev_shape.bpe --valid_data_path_and_name_and_type dump/raw/dev/text.ctc,text_ctc,text --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/text_ctc_shape.bpe --valid_data_path_and_name_and_type dump/raw/dev/text,text,text --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/text_shape.bpe --ngpu 8 --multiprocessing_distributed true --dist_launcher slurm --dist_init_method file:///weka/home-pengyf/espnet-owsm-train/egs2/owsm_v3.1_licensefree_nosa/s2t1/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/.dist_init_6161be84-6c49-455e-b1b4-8fb570936836 +/weka/home-pengyf/espnet-owsm-train/tools/venv/bin/python3 /weka/home-pengyf/espnet-owsm-train/espnet2/bin/s2t_train.py --use_preprocessor true --bpemodel data/token_list/bpe_unigram50000/bpe.model --token_type bpe --token_list data/token_list/bpe_unigram50000/tokens.txt --non_linguistic_symbols none --cleaner none --g2p none --valid_data_path_and_name_and_type dump/raw/dev/wav.scp,speech,kaldi_ark --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/speech_shape --resume true --fold_length 80000 --output_dir exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000 --config conf/train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn.yaml --frontend_conf fs=16k --normalize=global_mvn --normalize_conf stats_file=exp/s2t_stats_raw_bpe50000/train/feats_stats.npz --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/wav.scp,speech,kaldi_ark --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/speech_shape --fold_length 150 --train_data_path_and_/weka/home-pengyf/espnet-owsm-train/tools/venv/bin/python3 /weka/home-pengyf/espnet-owsm-train/espnet2/bin/s2t_train.py --use_preprocessor true --bpemodel data/token_list/bpe_unigram50000/bpe.model --token_type bpe --token_list data/token_list/bpe_unigram50000/tokens.txt --non_linguistic_symbols none --cleaner none --g2p none --valid_data_path_and_name_and_type dump/raw/dev/wav.scp,speech,kaldi_ark --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/speech_shape --resume true --fold_length 80000 --output_dir exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000 --config conf/train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn.yaml --frontend_conf fs=16k --normalize=global_mvn --normalize_conf stats_file=exp/s2t_stats_raw_bpe50000/train/feats_stats.npz --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/wav.scp,speech,kaldi_ark --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/speech_shape --fold_length 150 --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/text.prev,text_prev,text --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/text_prev_shape.bpe --fold_length 150 --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/text.ctc,text_ctc,text --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/text_ctc_shape.bpe --fold_length 150 --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/text,text,text --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/text_shape.bpe --multiple_iterator true --valid_data_path_and_name_and_type dump/raw/dev/text.prev,text_prev,text --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/text_prev_shape.bpe --valid_data_path_and_name_and_type dump/raw/dev/text.ctc,text_ctc,text --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/text_ctc_shape.bpe --valid_data_path_and_name_and_type dump/raw/dev/text,text,text --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/text_shape.bpe --ngpu 8 --multiprocessing_distributed true --dist_launcher slurm --dist_init_method file:///weka/home-pengyf/espnet-owsm-train/egs2/owsm_v3.1_licensefree_nosa/s2t1/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/.dist_init_6161be84-6c49-455e-b1b4-8fb570936836 +name_and_type exp/s2t_stats_raw_bpe50000/splits3/text.prev,text_prev,text --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/text_prev_shape.bpe --fold_length 150 --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/text.ctc,text_ctc,text --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/text_ctc_shape.bpe --fold_length 150 --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/text,text,text --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/text_shape.bpe --multiple_iterator true --valid_data_path_and_name_and_type dump/raw/dev/text.prev,text_prev,text --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/text_prev_shape.bpe --valid_data_path_and_name_and_type dump/raw/dev/text.ctc,text_ctc,text --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/text_ctc_shape.bpe --valid_data_path_and_name_and_type dump/raw/dev/text,text,text --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/text_shape.bpe --ngpu 8 --multiprocessing_distributed true --dist_launcher slurm --dist_init_method file:///weka/home-pengyf/espnet-owsm-train/egs2/owsm_v3.1_licensefree_nosa/s2t1/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/.dist_init_6161be84-6c49-455e-b1b4-8fb570936836 +[ip-10-0-194-127:0/16] 2024-02-26 17:43:34,876 (distributed_c10d:319) INFO: Added key: store_based_barrier_key:1 to store for rank: 0 +[ip-10-0-194-127:0/16] 2024-02-26 17:43:35,274 (distributed_c10d:353) INFO: Rank 0: Completed store-based barrier for key:store_based_barrier_key:1 with 16 nodes. +[ip-10-0-194-127:0/16] 2024-02-26 17:43:35,305 (s2t:464) INFO: Vocabulary size: 50002 +[ip-10-0-194-127:0/16] 2024-02-26 17:43:40,465 (abs_task:1271) INFO: pytorch.version=1.13.1+cu117, cuda.available=True, cudnn.version=8500, cudnn.benchmark=False, cudnn.deterministic=True +[ip-10-0-194-127:0/16] 2024-02-26 17:43:40,473 (abs_task:1272) INFO: Model structure: +ESPnetS2TModel( + (frontend): DefaultFrontend( + (stft): Stft(n_fft=512, win_length=400, hop_length=160, center=True, normalized=False, onesided=True) + (frontend): Frontend() + (logmel): LogMel(sr=16000, n_fft=512, n_mels=80, fmin=0, fmax=8000.0, htk=False) + ) + (specaug): SpecAug( + (freq_mask): MaskAlongAxis(mask_width_range=[0, 27], num_mask=2, axis=freq) + (time_mask): MaskAlongAxisVariableMaxWidth(mask_width_ratio_range=[0.0, 0.05], num_mask=4, axis=time) + ) + (normalize): GlobalMVN(stats_file=exp/s2t_stats_raw_bpe50000/train/feats_stats.npz, norm_means=True, norm_vars=True) + (encoder): EBranchformerEncoder( + (embed): Conv2dSubsampling( + (conv): Sequential( + (0): Conv2d(1, 768, kernel_size=(3, 3), stride=(2, 2)) + (1): ReLU() + (2): Conv2d(768, 768, kernel_size=(3, 3), stride=(2, 2)) + (3): ReLU() + ) + (out): Sequential( + (0): Linear(in_features=14592, out_features=768, bias=True) + (1): PositionalEncoding( + (dropout): Dropout(p=0.1, inplace=False) + ) + ) + ) + (encoders): MultiSequential( + (0): EBranchformerEncoderLayer( + (attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (cgmlp): ConvolutionalGatingMLP( + (channel_proj1): Sequential( + (0): Linear(in_features=768, out_features=3072, bias=True) + (1): GELU(approximate='none') + ) + (csgu): ConvolutionalSpatialGatingUnit( + (norm): LayerNorm((1536,), eps=1e-12, elementwise_affine=True) + (conv): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (act): Identity() + (dropout): Dropout(p=0.1, inplace=False) + ) + (channel_proj2): Linear(in_features=1536, out_features=768, bias=True) + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (feed_forward_macaron): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (norm_ff): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_ff_macaron): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mha): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mlp): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_final): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + (depthwise_conv_fusion): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (merge_proj): Linear(in_features=1536, out_features=768, bias=True) + ) + (1): EBranchformerEncoderLayer( + (attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (cgmlp): ConvolutionalGatingMLP( + (channel_proj1): Sequential( + (0): Linear(in_features=768, out_features=3072, bias=True) + (1): GELU(approximate='none') + ) + (csgu): ConvolutionalSpatialGatingUnit( + (norm): LayerNorm((1536,), eps=1e-12, elementwise_affine=True) + (conv): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (act): Identity() + (dropout): Dropout(p=0.1, inplace=False) + ) + (channel_proj2): Linear(in_features=1536, out_features=768, bias=True) + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (feed_forward_macaron): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (norm_ff): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_ff_macaron): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mha): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mlp): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_final): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + (depthwise_conv_fusion): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (merge_proj): Linear(in_features=1536, out_features=768, bias=True) + ) + (2): EBranchformerEncoderLayer( + (attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (cgmlp): ConvolutionalGatingMLP( + (channel_proj1): Sequential( + (0): Linear(in_features=768, out_features=3072, bias=True) + (1): GELU(approximate='none') + ) + (csgu): ConvolutionalSpatialGatingUnit( + (norm): LayerNorm((1536,), eps=1e-12, elementwise_affine=True) + (conv): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (act): Identity() + (dropout): Dropout(p=0.1, inplace=False) + ) + (channel_proj2): Linear(in_features=1536, out_features=768, bias=True) + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (feed_forward_macaron): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (norm_ff): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_ff_macaron): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mha): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mlp): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_final): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + (depthwise_conv_fusion): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (merge_proj): Linear(in_features=1536, out_features=768, bias=True) + ) + (3): EBranchformerEncoderLayer( + (attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (cgmlp): ConvolutionalGatingMLP( + (channel_proj1): Sequential( + (0): Linear(in_features=768, out_features=3072, bias=True) + (1): GELU(approximate='none') + ) + (csgu): ConvolutionalSpatialGatingUnit( + (norm): LayerNorm((1536,), eps=1e-12, elementwise_affine=True) + (conv): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (act): Identity() + (dropout): Dropout(p=0.1, inplace=False) + ) + (channel_proj2): Linear(in_features=1536, out_features=768, bias=True) + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (feed_forward_macaron): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (norm_ff): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_ff_macaron): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mha): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mlp): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_final): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + (depthwise_conv_fusion): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (merge_proj): Linear(in_features=1536, out_features=768, bias=True) + ) + (4): EBranchformerEncoderLayer( + (attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (cgmlp): ConvolutionalGatingMLP( + (channel_proj1): Sequential( + (0): Linear(in_features=768, out_features=3072, bias=True) + (1): GELU(approximate='none') + ) + (csgu): ConvolutionalSpatialGatingUnit( + (norm): LayerNorm((1536,), eps=1e-12, elementwise_affine=True) + (conv): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (act): Identity() + (dropout): Dropout(p=0.1, inplace=False) + ) + (channel_proj2): Linear(in_features=1536, out_features=768, bias=True) + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (feed_forward_macaron): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (norm_ff): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_ff_macaron): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mha): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mlp): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_final): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + (depthwise_conv_fusion): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (merge_proj): Linear(in_features=1536, out_features=768, bias=True) + ) + (5): EBranchformerEncoderLayer( + (attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (cgmlp): ConvolutionalGatingMLP( + (channel_proj1): Sequential( + (0): Linear(in_features=768, out_features=3072, bias=True) + (1): GELU(approximate='none') + ) + (csgu): ConvolutionalSpatialGatingUnit( + (norm): LayerNorm((1536,), eps=1e-12, elementwise_affine=True) + (conv): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (act): Identity() + (dropout): Dropout(p=0.1, inplace=False) + ) + (channel_proj2): Linear(in_features=1536, out_features=768, bias=True) + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (feed_forward_macaron): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (norm_ff): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_ff_macaron): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mha): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mlp): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_final): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + (depthwise_conv_fusion): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (merge_proj): Linear(in_features=1536, out_features=768, bias=True) + ) + (6): EBranchformerEncoderLayer( + (attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (cgmlp): ConvolutionalGatingMLP( + (channel_proj1): Sequential( + (0): Linear(in_features=768, out_features=3072, bias=True) + (1): GELU(approximate='none') + ) + (csgu): ConvolutionalSpatialGatingUnit( + (norm): LayerNorm((1536,), eps=1e-12, elementwise_affine=True) + (conv): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (act): Identity() + (dropout): Dropout(p=0.1, inplace=False) + ) + (channel_proj2): Linear(in_features=1536, out_features=768, bias=True) + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (feed_forward_macaron): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (norm_ff): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_ff_macaron): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mha): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mlp): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_final): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + (depthwise_conv_fusion): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (merge_proj): Linear(in_features=1536, out_features=768, bias=True) + ) + (7): EBranchformerEncoderLayer( + (attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (cgmlp): ConvolutionalGatingMLP( + (channel_proj1): Sequential( + (0): Linear(in_features=768, out_features=3072, bias=True) + (1): GELU(approximate='none') + ) + (csgu): ConvolutionalSpatialGatingUnit( + (norm): LayerNorm((1536,), eps=1e-12, elementwise_affine=True) + (conv): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (act): Identity() + (dropout): Dropout(p=0.1, inplace=False) + ) + (channel_proj2): Linear(in_features=1536, out_features=768, bias=True) + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (feed_forward_macaron): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (norm_ff): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_ff_macaron): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mha): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mlp): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_final): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + (depthwise_conv_fusion): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (merge_proj): Linear(in_features=1536, out_features=768, bias=True) + ) + (8): EBranchformerEncoderLayer( + (attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (cgmlp): ConvolutionalGatingMLP( + (channel_proj1): Sequential( + (0): Linear(in_features=768, out_features=3072, bias=True) + (1): GELU(approximate='none') + ) + (csgu): ConvolutionalSpatialGatingUnit( + (norm): LayerNorm((1536,), eps=1e-12, elementwise_affine=True) + (conv): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (act): Identity() + (dropout): Dropout(p=0.1, inplace=False) + ) + (channel_proj2): Linear(in_features=1536, out_features=768, bias=True) + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (feed_forward_macaron): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (norm_ff): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_ff_macaron): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mha): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mlp): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_final): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + (depthwise_conv_fusion): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (merge_proj): Linear(in_features=1536, out_features=768, bias=True) + ) + ) + (after_norm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + ) + (decoder): TransformerDecoder( + (embed): Sequential( + (0): Embedding(50002, 768) + (1): PositionalEncoding( + (dropout): Dropout(p=0.1, inplace=False) + ) + ) + (after_norm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (output_layer): Linear(in_features=768, out_features=50002, bias=True) + (decoders): MultiSequential( + (0): DecoderLayer( + (self_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (src_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): ReLU() + ) + (norm1): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm2): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm3): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + ) + (1): DecoderLayer( + (self_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (src_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): ReLU() + ) + (norm1): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm2): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm3): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + ) + (2): DecoderLayer( + (self_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (src_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): ReLU() + ) + (norm1): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm2): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm3): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + ) + (3): DecoderLayer( + (self_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (src_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): ReLU() + ) + (norm1): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm2): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm3): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + ) + (4): DecoderLayer( + (self_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (src_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): ReLU() + ) + (norm1): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm2): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm3): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + ) + (5): DecoderLayer( + (self_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (src_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): ReLU() + ) + (norm1): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm2): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm3): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + ) + (6): DecoderLayer( + (self_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (src_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): ReLU() + ) + (norm1): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm2): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm3): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + ) + (7): DecoderLayer( + (self_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (src_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): ReLU() + ) + (norm1): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm2): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm3): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + ) + (8): DecoderLayer( + (self_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (src_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): ReLU() + ) + (norm1): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm2): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm3): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + ) + ) + ) + (criterion_att): LabelSmoothingLoss( + (criterion): KLDivLoss() + ) + (ctc): CTC( + (ctc_lo): Linear(in_features=768, out_features=50002, bias=True) + (ctc_loss): CTCLoss() + ) +) + +Model summary: + Class Name: ESPnetS2TModel + Total Number of model parameters: 366.65 M + Number of trainable parameters: 366.65 M (100.0%) + Size: 1.47 GB + Type: torch.float32 +[ip-10-0-194-127:0/16] 2024-02-26 17:43:40,473 (abs_task:1275) INFO: Optimizer: +AdamW ( +Parameter Group 0 + amsgrad: False + betas: [0.9, 0.98] + capturable: False + eps: 1e-06 + foreach: None + initial_lr: 0.0005 + lr: 1.6666666666666667e-09 + maximize: False + weight_decay: 0.0 +) +[ip-10-0-194-127:0/16] 2024-02-26 17:43:40,473 (abs_task:1276) INFO: Scheduler: PiecewiseLinearWarmupLR(warmup_steps_list=[0, 30000, 60000], warmup_lr_list=[0.0, 5e-05, 0.0005]) +[ip-10-0-194-127:0/16] 2024-02-26 17:43:40,476 (abs_task:1285) INFO: Saving the configuration in exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/config.yaml +[ip-10-0-194-127:0/16] 2024-02-26 17:43:44,171 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-194-127:0/16] 2024-02-26 17:43:44,716 (abs_task:1663) INFO: [valid] dataset: +ESPnetDataset( + speech: {"path": "dump/raw/dev/wav.scp", "type": "kaldi_ark"} + text_prev: {"path": "dump/raw/dev/text.prev", "type": "text"} + text_ctc: {"path": "dump/raw/dev/text.ctc", "type": "text"} + text: {"path": "dump/raw/dev/text", "type": "text"} + preprocess: ) +[ip-10-0-194-127:0/16] 2024-02-26 17:43:44,716 (abs_task:1664) INFO: [valid] Batch sampler: UnsortedBatchSampler(N-batch=2133, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/valid/speech_shape, +[ip-10-0-194-127:0/16] 2024-02-26 17:43:44,717 (abs_task:1665) INFO: [valid] mini-batch sizes summary: N-batch=2133, mean=256.1, min=256, max=257 +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Tracking run with wandb version 0.16.3 +wandb: Run data is saved locally in exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/wandb/run-20240226_174347-zw9g4dhv +wandb: Run `wandb offline` to turn off syncing. +wandb: Syncing run _weka_home-pengyf_espnet-owsm-train_egs2_owsm_v3.1_licensefree_nosa_s2t1 +wandb: ⭐️ View project at https://stability.wandb.io/pengyf/ESPnet_S2TTask +wandb: 🚀 View run at https://stability.wandb.io/pengyf/ESPnet_S2TTask/runs/zw9g4dhv +wandb: WARNING Serializing object of type list that is 400072 bytes +wandb: WARNING Serializing object of type list that is 406488 bytes +ip-10-0-194-127:32770:32770 [0] NCCL INFO Bootstrap : Using ens32:10.0.194.127<0> +ip-10-0-194-127:32770:32770 [0] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-194-127:32770:32770 [0] NCCL INFO cudaDriverVersion 12000 +NCCL version 2.14.3+cuda11.7 +[ip-10-0-194-127:0/16] 2024-02-26 17:43:51,253 (trainer:302) INFO: 1/45epoch started +[ip-10-0-194-127:0/16] 2024-02-26 17:43:51,295 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-194-127:0/16] 2024-02-26 17:44:27,554 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-194-127:0/16] 2024-02-26 17:44:35,530 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-194-127:0/16] 2024-02-26 17:44:35,530 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-194-127:0/16] 2024-02-26 17:44:35,535 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +ip-10-0-216-33:179936:179936 [2] NCCL INFO cudaDriverVersion 12000 +ip-10-0-216-33:179936:179936 [2] NCCL INFO Bootstrap : Using ens32:10.0.216.33<0> +ip-10-0-216-33:179936:179936 [2] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-216-33:179936:180196 [2] NCCL INFO NET/IB : No device found. +ip-10-0-216-33:179936:180196 [2] NCCL INFO NET/Socket : Using [0]ens32:10.0.216.33<0> +ip-10-0-216-33:179936:180196 [2] NCCL INFO Using network Socket +ip-10-0-216-33:179936:180196 [2] NCCL INFO Setting affinity for GPU 2 to 0f,ffff0000,000fffff +ip-10-0-216-33:179936:180196 [2] NCCL INFO Trees [0] 11/-1/-1->10->9 [1] 11/-1/-1->10->9 +ip-10-0-216-33:179936:180196 [2] NCCL INFO Channel 00/0 : 10[201c0] -> 9[101d0] via P2P/IPC/read +ip-10-0-216-33:179936:180196 [2] NCCL INFO Channel 01/0 : 10[201c0] -> 9[101d0] via P2P/IPC/read +ip-10-0-216-33:179936:180196 [2] NCCL INFO Connected all rings +ip-10-0-216-33:179936:180196 [2] NCCL INFO Channel 00/0 : 10[201c0] -> 11[201d0] via P2P/IPC/read +ip-10-0-216-33:179936:180196 [2] NCCL INFO Channel 01/0 : 10[201c0] -> 11[201d0] via P2P/IPC/read +ip-10-0-216-33:179936:180196 [2] NCCL INFO Connected all trees +ip-10-0-216-33:179936:180196 [2] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-216-33:179936:180196 [2] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-216-33:179936:180196 [2] NCCL INFO comm 0x4dcec360 rank 10 nranks 16 cudaDev 2 busId 201c0 - Init COMPLETE +ip-10-0-216-33:179937:179937 [3] NCCL INFO cudaDriverVersion 12000 +ip-10-0-216-33:179937:179937 [3] NCCL INFO Bootstrap : Using ens32:10.0.216.33<0> +ip-10-0-216-33:179937:179937 [3] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-216-33:179937:180193 [3] NCCL INFO NET/IB : No device found. +ip-10-0-216-33:179937:180193 [3] NCCL INFO NET/Socket : Using [0]ens32:10.0.216.33<0> +ip-10-0-216-33:179937:180193 [3] NCCL INFO Using network Socket +ip-10-0-216-33:179937:180193 [3] NCCL INFO Setting affinity for GPU 3 to 0f,ffff0000,000fffff +ip-10-0-216-33:179937:180193 [3] NCCL INFO Trees [0] 12/-1/-1->11->10 [1] 12/-1/-1->11->10 +ip-10-0-216-33:179937:180193 [3] NCCL INFO Channel 00/0 : 11[201d0] -> 10[201c0] via P2P/IPC/read +ip-10-0-216-33:179937:180193 [3] NCCL INFO Channel 01/0 : 11[201d0] -> 10[201c0] via P2P/IPC/read +ip-10-0-216-33:179937:180193 [3] NCCL INFO Connected all rings +ip-10-0-216-33:179937:180193 [3] NCCL INFO Channel 00/0 : 11[201d0] -> 12[901c0] via P2P/IPC/read +ip-10-0-216-33:179937:180193 [3] NCCL INFO Channel 01/0 : 11[201d0] -> 12[901c0] via P2P/IPC/read +ip-10-0-216-33:179937:180193 [3] NCCL INFO Connected all trees +ip-10-0-216-33:179937:180193 [3] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-216-33:179937:180193 [3] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-216-33:179937:180193 [3] NCCL INFO comm 0x50f3b9d0 rank 11 nranks 16 cudaDev 3 busId 201d0 - Init COMPLETE +ip-10-0-216-33:179935:179935 [1] NCCL INFO cudaDriverVersion 12000 +ip-10-0-216-33:179935:179935 [1] NCCL INFO Bootstrap : Using ens32:10.0.216.33<0> +ip-10-0-216-33:179935:179935 [1] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-216-33:179935:180195 [1] NCCL INFO NET/IB : No device found. +ip-10-0-216-33:179935:180195 [1] NCCL INFO NET/Socket : Using [0]ens32:10.0.216.33<0> +ip-10-0-216-33:179935:180195 [1] NCCL INFO Using network Socket +ip-10-0-216-33:179935:180195 [1] NCCL INFO Setting affinity for GPU 1 to 0f,ffff0000,000fffff +ip-10-0-216-33:179935:180195 [1] NCCL INFO Trees [0] 10/-1/-1->9->8 [1] 10/-1/-1->9->8 +ip-10-0-216-33:179935:180195 [1] NCCL INFO Channel 00/0 : 9[101d0] -> 0[101c0] [send] via NET/Socket/0 +ip-10-0-216-33:179935:180195 [1] NCCL INFO Channel 01/0 : 9[101d0] -> 0[101c0] [send] via NET/Socket/0 +ip-10-0-216-33:179935:180195 [1] NCCL INFO Connected all rings +ip-10-0-216-33:179935:180195 [1] NCCL INFO Channel 00/0 : 9[101d0] -> 10[201c0] via P2P/IPC/read +ip-10-0-216-33:179935:180195 [1] NCCL INFO Channel 01/0 : 9[101d0] -> 10[201c0] via P2P/IPC/read +ip-10-0-216-33:179935:180195 [1] NCCL INFO Channel 00/0 : 9[101d0] -> 8[101c0] via P2P/IPC/read +ip-10-0-216-33:179935:180195 [1] NCCL INFO Channel 01/0 : 9[101d0] -> 8[101c0] via P2P/IPC/read +ip-10-0-216-33:179935:180195 [1] NCCL INFO Connected all trees +ip-10-0-216-33:179935:180195 [1] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-216-33:179935:180195 [1] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-216-33:179935:180195 [1] NCCL INFO comm 0x12fbe1c0 rank 9 nranks 16 cudaDev 1 busId 101d0 - Init COMPLETE +ip-10-0-194-127:32774:32774 [4] NCCL INFO cudaDriverVersion 12000 +ip-10-0-194-127:32774:32774 [4] NCCL INFO Bootstrap : Using ens32:10.0.194.127<0> +ip-10-0-194-127:32774:32774 [4] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-194-127:32774:33200 [4] NCCL INFO NET/IB : No device found. +ip-10-0-194-127:32774:33200 [4] NCCL INFO NET/Socket : Using [0]ens32:10.0.194.127<0> +ip-10-0-194-127:32774:33200 [4] NCCL INFO Using network Socket +ip-10-0-194-127:32774:33200 [4] NCCL INFO Setting affinity for GPU 4 to 0fffff00,00000fff,ff000000 +ip-10-0-194-127:32774:33200 [4] NCCL INFO Trees [0] 5/-1/-1->4->3 [1] 5/-1/-1->4->3 +ip-10-0-194-127:32774:33200 [4] NCCL INFO Channel 00/0 : 4[901c0] -> 3[201d0] via P2P/IPC/read +ip-10-0-194-127:32774:33200 [4] NCCL INFO Channel 01/0 : 4[901c0] -> 3[201d0] via P2P/IPC/read +ip-10-0-194-127:32774:33200 [4] NCCL INFO Connected all rings +ip-10-0-194-127:32774:33200 [4] NCCL INFO Channel 00/0 : 4[901c0] -> 5[901d0] via P2P/IPC/read +ip-10-0-216-33:179941:179941 [7] NCCL INFO cudaDriverVersion 12000 +ip-10-0-216-33:179941:179941 [7] NCCL INFO Bootstrap : Using ens32:10.0.216.33<0> +ip-10-0-216-33:179941:179941 [7] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-216-33:179941:180191 [7] NCCL INFO NET/IB : No device found. +ip-10-0-216-33:179941:180191 [7] NCCL INFO NET/Socket : Using [0]ens32:10.0.216.33<0> +ip-10-0-216-33:179941:180191 [7] NCCL INFO Using network Socket +ip-10-0-216-33:179941:180191 [7] NCCL INFO Setting affinity for GPU 7 to 0fffff00,00000fff,ff000000 +ip-10-0-216-33:179941:180191 [7] NCCL INFO Trees [0] -1/-1/-1->15->14 [1] -1/-1/-1->15->14 +ip-10-0-216-33:179941:180191 [7] NCCL INFO Channel 00/0 : 15[a01d0] -> 14[a01c0] via P2P/IPC/read +ip-10-0-216-33:179941:180191 [7] NCCL INFO Channel 01/0 : 15[a01d0] -> 14[a01c0] via P2P/IPC/read +ip-10-0-216-33:179941:180191 [7] NCCL INFO Connected all rings +ip-10-0-216-33:179941:180191 [7] NCCL INFO Connected all trees +ip-10-0-194-127:32774:33200 [4] NCCL INFO Channel 01/0 : 4[901c0] -> 5[901d0] via P2P/IPC/read +ip-10-0-194-127:32774:33200 [4] NCCL INFO Connected all trees +ip-10-0-194-127:32774:33200 [4] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-194-127:32774:33200 [4] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-194-127:32774:33200 [4] NCCL INFO comm 0x1233b9a0 rank 4 nranks 16 cudaDev 4 busId 901c0 - Init COMPLETE +ip-10-0-216-33:179941:180191 [7] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-216-33:179941:180191 [7] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-216-33:179941:180191 [7] NCCL INFO comm 0x873b790 rank 15 nranks 16 cudaDev 7 busId a01d0 - Init COMPLETE +ip-10-0-194-127:32771:32771 [1] NCCL INFO cudaDriverVersion 12000 +ip-10-0-194-127:32771:32771 [1] NCCL INFO Bootstrap : Using ens32:10.0.194.127<0> +ip-10-0-194-127:32771:32771 [1] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-194-127:32771:33199 [1] NCCL INFO NET/IB : No device found. +ip-10-0-194-127:32771:33199 [1] NCCL INFO NET/Socket : Using [0]ens32:10.0.194.127<0> +ip-10-0-194-127:32771:33199 [1] NCCL INFO Using network Socket +ip-10-0-194-127:32771:33199 [1] NCCL INFO Setting affinity for GPU 1 to 0f,ffff0000,000fffff +ip-10-0-194-127:32771:33199 [1] NCCL INFO Trees [0] 2/-1/-1->1->0 [1] 2/-1/-1->1->0 +ip-10-0-194-127:32771:33199 [1] NCCL INFO Channel 00/0 : 1[101d0] -> 8[101c0] [send] via NET/Socket/0 +ip-10-0-194-127:32771:33199 [1] NCCL INFO Channel 01/0 : 1[101d0] -> 8[101c0] [send] via NET/Socket/0 +ip-10-0-194-127:32771:33199 [1] NCCL INFO Connected all rings +ip-10-0-194-127:32771:33199 [1] NCCL INFO Channel 00/0 : 1[101d0] -> 2[201c0] via P2P/IPC/read +ip-10-0-216-33:179934:179934 [0] NCCL INFO cudaDriverVersion 12000 +ip-10-0-216-33:179934:179934 [0] NCCL INFO Bootstrap : Using ens32:10.0.216.33<0> +ip-10-0-216-33:179934:179934 [0] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-216-33:179934:180190 [0] NCCL INFO NET/IB : No device found. +ip-10-0-216-33:179934:180190 [0] NCCL INFO NET/Socket : Using [0]ens32:10.0.216.33<0> +ip-10-0-216-33:179934:180190 [0] NCCL INFO Using network Socket +ip-10-0-216-33:179934:180190 [0] NCCL INFO Setting affinity for GPU 0 to 0f,ffff0000,000fffff +ip-10-0-216-33:179934:180190 [0] NCCL INFO Trees [0] 9/-1/-1->8->0 [1] 9/0/-1->8->-1 +ip-10-0-216-33:179934:180205 [0] NCCL INFO NET/Socket: Using 2 threads and 8 sockets per thread +ip-10-0-216-33:179934:180190 [0] NCCL INFO Channel 00/0 : 1[101d0] -> 8[101c0] [receive] via NET/Socket/0 +ip-10-0-216-33:179934:180205 [0] NCCL INFO NET/Socket: Using 2 threads and 8 sockets per thread +ip-10-0-194-127:32771:33199 [1] NCCL INFO Channel 01/0 : 1[101d0] -> 2[201c0] via P2P/IPC/read +ip-10-0-194-127:32771:33199 [1] NCCL INFO Channel 00/0 : 1[101d0] -> 0[101c0] via P2P/IPC/read +ip-10-0-194-127:32771:33199 [1] NCCL INFO Channel 01/0 : 1[101d0] -> 0[101c0] via P2P/IPC/read +ip-10-0-194-127:32771:33199 [1] NCCL INFO Connected all trees +ip-10-0-194-127:32771:33199 [1] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-194-127:32771:33199 [1] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-194-127:32771:33199 [1] NCCL INFO comm 0x5084f1f0 rank 1 nranks 16 cudaDev 1 busId 101d0 - Init COMPLETE +ip-10-0-216-33:179934:180190 [0] NCCL INFO Channel 01/0 : 1[101d0] -> 8[101c0] [receive] via NET/Socket/0 +ip-10-0-216-33:179934:180190 [0] NCCL INFO Channel 00/0 : 8[101c0] -> 15[a01d0] via P2P/IPC/read +ip-10-0-216-33:179934:180190 [0] NCCL INFO Channel 01/0 : 8[101c0] -> 15[a01d0] via P2P/IPC/read +ip-10-0-216-33:179934:180190 [0] NCCL INFO Connected all rings +ip-10-0-216-33:179934:180190 [0] NCCL INFO Channel 00/0 : 8[101c0] -> 9[101d0] via P2P/IPC/read +ip-10-0-216-33:179934:180190 [0] NCCL INFO Channel 01/0 : 8[101c0] -> 9[101d0] via P2P/IPC/read +ip-10-0-216-33:179934:180205 [0] NCCL INFO NET/Socket: Using 2 threads and 8 sockets per thread +ip-10-0-216-33:179934:180190 [0] NCCL INFO Channel 00/0 : 0[101c0] -> 8[101c0] [receive] via NET/Socket/0 +ip-10-0-216-33:179934:180205 [0] NCCL INFO NET/Socket: Using 2 threads and 8 sockets per thread +ip-10-0-216-33:179934:180190 [0] NCCL INFO Channel 01/0 : 0[101c0] -> 8[101c0] [receive] via NET/Socket/0 +ip-10-0-194-127:32773:32773 [3] NCCL INFO cudaDriverVersion 12000 +ip-10-0-194-127:32773:32773 [3] NCCL INFO Bootstrap : Using ens32:10.0.194.127<0> +ip-10-0-194-127:32773:32773 [3] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-194-127:32773:33195 [3] NCCL INFO NET/IB : No device found. +ip-10-0-194-127:32773:33195 [3] NCCL INFO NET/Socket : Using [0]ens32:10.0.194.127<0> +ip-10-0-194-127:32773:33195 [3] NCCL INFO Using network Socket +ip-10-0-194-127:32773:33195 [3] NCCL INFO Setting affinity for GPU 3 to 0f,ffff0000,000fffff +ip-10-0-194-127:32773:33195 [3] NCCL INFO Trees [0] 4/-1/-1->3->2 [1] 4/-1/-1->3->2 +ip-10-0-194-127:32773:33195 [3] NCCL INFO Channel 00/0 : 3[201d0] -> 2[201c0] via P2P/IPC/read +ip-10-0-194-127:32773:33195 [3] NCCL INFO Channel 01/0 : 3[201d0] -> 2[201c0] via P2P/IPC/read +ip-10-0-194-127:32773:33195 [3] NCCL INFO Connected all rings +ip-10-0-194-127:32773:33195 [3] NCCL INFO Channel 00/0 : 3[201d0] -> 4[901c0] via P2P/IPC/read +ip-10-0-216-33:179934:180190 [0] NCCL INFO Channel 00/0 : 8[101c0] -> 0[101c0] [send] via NET/Socket/0 +ip-10-0-216-33:179934:180190 [0] NCCL INFO Channel 01/0 : 8[101c0] -> 0[101c0] [send] via NET/Socket/0 +ip-10-0-216-33:179934:180190 [0] NCCL INFO Connected all trees +ip-10-0-216-33:179934:180190 [0] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-216-33:179934:180190 [0] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-216-33:179934:180190 [0] NCCL INFO comm 0x9464c40 rank 8 nranks 16 cudaDev 0 busId 101c0 - Init COMPLETE +ip-10-0-194-127:32773:33195 [3] NCCL INFO Channel 01/0 : 3[201d0] -> 4[901c0] via P2P/IPC/read +ip-10-0-194-127:32773:33195 [3] NCCL INFO Connected all trees +ip-10-0-194-127:32773:33195 [3] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-194-127:32773:33195 [3] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-194-127:32773:33195 [3] NCCL INFO comm 0x52a8a440 rank 3 nranks 16 cudaDev 3 busId 201d0 - Init COMPLETE +ip-10-0-216-33:179938:179938 [4] NCCL INFO cudaDriverVersion 12000 +ip-10-0-216-33:179938:179938 [4] NCCL INFO Bootstrap : Using ens32:10.0.216.33<0> +ip-10-0-216-33:179938:179938 [4] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-216-33:179938:180192 [4] NCCL INFO NET/IB : No device found. +ip-10-0-216-33:179938:180192 [4] NCCL INFO NET/Socket : Using [0]ens32:10.0.216.33<0> +ip-10-0-216-33:179938:180192 [4] NCCL INFO Using network Socket +ip-10-0-216-33:179938:180192 [4] NCCL INFO Setting affinity for GPU 4 to 0fffff00,00000fff,ff000000 +ip-10-0-216-33:179938:180192 [4] NCCL INFO Trees [0] 13/-1/-1->12->11 [1] 13/-1/-1->12->11 +ip-10-0-216-33:179938:180192 [4] NCCL INFO Channel 00/0 : 12[901c0] -> 11[201d0] via P2P/IPC/read +ip-10-0-216-33:179938:180192 [4] NCCL INFO Channel 01/0 : 12[901c0] -> 11[201d0] via P2P/IPC/read +ip-10-0-216-33:179938:180192 [4] NCCL INFO Connected all rings +ip-10-0-194-127:32770:33193 [0] NCCL INFO NET/IB : No device found. +ip-10-0-194-127:32770:33193 [0] NCCL INFO NET/Socket : Using [0]ens32:10.0.194.127<0> +ip-10-0-194-127:32770:33193 [0] NCCL INFO Using network Socket +ip-10-0-194-127:32770:33193 [0] NCCL INFO Setting affinity for GPU 0 to 0f,ffff0000,000fffff +ip-10-0-194-127:32770:33193 [0] NCCL INFO Channel 00/02 : 0 7 6 5 4 3 2 1 8 15 14 13 12 11 10 9 +ip-10-0-194-127:32770:33193 [0] NCCL INFO Channel 01/02 : 0 7 6 5 4 3 2 1 8 15 14 13 12 11 10 9 +ip-10-0-194-127:32770:33193 [0] NCCL INFO Trees [0] 1/8/-1->0->-1 [1] 1/-1/-1->0->8 +ip-10-0-194-127:32770:33203 [0] NCCL INFO NET/Socket: Using 2 threads and 8 sockets per thread +ip-10-0-194-127:32770:33193 [0] NCCL INFO Channel 00/0 : 9[101d0] -> 0[101c0] [receive] via NET/Socket/0 +ip-10-0-194-127:32770:33203 [0] NCCL INFO NET/Socket: Using 2 threads and 8 sockets per thread +ip-10-0-216-33:179938:180192 [4] NCCL INFO Channel 00/0 : 12[901c0] -> 13[901d0] via P2P/IPC/read +ip-10-0-216-33:179938:180192 [4] NCCL INFO Channel 01/0 : 12[901c0] -> 13[901d0] via P2P/IPC/read +ip-10-0-216-33:179938:180192 [4] NCCL INFO Connected all trees +ip-10-0-216-33:179938:180192 [4] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-216-33:179938:180192 [4] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-216-33:179938:180192 [4] NCCL INFO comm 0x50ef4030 rank 12 nranks 16 cudaDev 4 busId 901c0 - Init COMPLETE +ip-10-0-216-33:179940:179940 [6] NCCL INFO cudaDriverVersion 12000 +ip-10-0-216-33:179940:179940 [6] NCCL INFO Bootstrap : Using ens32:10.0.216.33<0> +ip-10-0-216-33:179940:179940 [6] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-216-33:179940:180194 [6] NCCL INFO NET/IB : No device found. +ip-10-0-216-33:179940:180194 [6] NCCL INFO NET/Socket : Using [0]ens32:10.0.216.33<0> +ip-10-0-194-127:32770:33193 [0] NCCL INFO Channel 01/0 : 9[101d0] -> 0[101c0] [receive] via NET/Socket/0 +ip-10-0-194-127:32770:33193 [0] NCCL INFO Channel 00/0 : 0[101c0] -> 7[a01d0] via P2P/IPC/read +ip-10-0-194-127:32770:33193 [0] NCCL INFO Channel 01/0 : 0[101c0] -> 7[a01d0] via P2P/IPC/read +ip-10-0-194-127:32770:33193 [0] NCCL INFO Connected all rings +ip-10-0-194-127:32770:33193 [0] NCCL INFO Channel 00/0 : 0[101c0] -> 1[101d0] via P2P/IPC/read +ip-10-0-194-127:32770:33193 [0] NCCL INFO Channel 01/0 : 0[101c0] -> 1[101d0] via P2P/IPC/read +ip-10-0-194-127:32770:33203 [0] NCCL INFO NET/Socket: Using 2 threads and 8 sockets per thread +ip-10-0-194-127:32770:33193 [0] NCCL INFO Channel 00/0 : 8[101c0] -> 0[101c0] [receive] via NET/Socket/0 +ip-10-0-194-127:32770:33203 [0] NCCL INFO NET/Socket: Using 2 threads and 8 sockets per thread +ip-10-0-194-127:32770:33193 [0] NCCL INFO Channel 01/0 : 8[101c0] -> 0[101c0] [receive] via NET/Socket/0 +ip-10-0-216-33:179940:180194 [6] NCCL INFO Using network Socket +ip-10-0-216-33:179940:180194 [6] NCCL INFO Setting affinity for GPU 6 to 0fffff00,00000fff,ff000000 +ip-10-0-216-33:179940:180194 [6] NCCL INFO Trees [0] 15/-1/-1->14->13 [1] 15/-1/-1->14->13 +ip-10-0-216-33:179940:180194 [6] NCCL INFO Channel 00/0 : 14[a01c0] -> 13[901d0] via P2P/IPC/read +ip-10-0-216-33:179940:180194 [6] NCCL INFO Channel 01/0 : 14[a01c0] -> 13[901d0] via P2P/IPC/read +ip-10-0-216-33:179940:180194 [6] NCCL INFO Connected all rings +ip-10-0-216-33:179940:180194 [6] NCCL INFO Channel 00/0 : 14[a01c0] -> 15[a01d0] via P2P/IPC/read +ip-10-0-216-33:179940:180194 [6] NCCL INFO Channel 01/0 : 14[a01c0] -> 15[a01d0] via P2P/IPC/read +ip-10-0-216-33:179940:180194 [6] NCCL INFO Connected all trees +ip-10-0-216-33:179940:180194 [6] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-216-33:179940:180194 [6] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-194-127:32770:33193 [0] NCCL INFO Channel 00/0 : 0[101c0] -> 8[101c0] [send] via NET/Socket/0 +ip-10-0-194-127:32770:33193 [0] NCCL INFO Channel 01/0 : 0[101c0] -> 8[101c0] [send] via NET/Socket/0 +ip-10-0-194-127:32770:33193 [0] NCCL INFO Connected all trees +ip-10-0-194-127:32770:33193 [0] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-194-127:32770:33193 [0] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-194-127:32770:33193 [0] NCCL INFO comm 0x134bb460 rank 0 nranks 16 cudaDev 0 busId 101c0 - Init COMPLETE +ip-10-0-216-33:179940:180194 [6] NCCL INFO comm 0x13382de0 rank 14 nranks 16 cudaDev 6 busId a01c0 - Init COMPLETE +ip-10-0-194-127:32775:32775 [5] NCCL INFO cudaDriverVersion 12000 +ip-10-0-194-127:32775:32775 [5] NCCL INFO Bootstrap : Using ens32:10.0.194.127<0> +ip-10-0-194-127:32775:32775 [5] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-194-127:32775:33197 [5] NCCL INFO NET/IB : No device found. +ip-10-0-194-127:32775:33197 [5] NCCL INFO NET/Socket : Using [0]ens32:10.0.194.127<0> +ip-10-0-194-127:32775:33197 [5] NCCL INFO Using network Socket +ip-10-0-194-127:32775:33197 [5] NCCL INFO Setting affinity for GPU 5 to 0fffff00,00000fff,ff000000 +ip-10-0-194-127:32775:33197 [5] NCCL INFO Trees [0] 6/-1/-1->5->4 [1] 6/-1/-1->5->4 +ip-10-0-194-127:32775:33197 [5] NCCL INFO Channel 00/0 : 5[901d0] -> 4[901c0] via P2P/IPC/read +ip-10-0-194-127:32775:33197 [5] NCCL INFO Channel 01/0 : 5[901d0] -> 4[901c0] via P2P/IPC/read +ip-10-0-194-127:32775:33197 [5] NCCL INFO Connected all rings +ip-10-0-194-127:32775:33197 [5] NCCL INFO Channel 00/0 : 5[901d0] -> 6[a01c0] via P2P/IPC/read +ip-10-0-216-33:179939:179939 [5] NCCL INFO cudaDriverVersion 12000 +ip-10-0-216-33:179939:179939 [5] NCCL INFO Bootstrap : Using ens32:10.0.216.33<0> +ip-10-0-216-33:179939:179939 [5] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-216-33:179939:180197 [5] NCCL INFO NET/IB : No device found. +ip-10-0-216-33:179939:180197 [5] NCCL INFO NET/Socket : Using [0]ens32:10.0.216.33<0> +ip-10-0-216-33:179939:180197 [5] NCCL INFO Using network Socket +ip-10-0-216-33:179939:180197 [5] NCCL INFO Setting affinity for GPU 5 to 0fffff00,00000fff,ff000000 +ip-10-0-216-33:179939:180197 [5] NCCL INFO Trees [0] 14/-1/-1->13->12 [1] 14/-1/-1->13->12 +ip-10-0-216-33:179939:180197 [5] NCCL INFO Channel 00/0 : 13[901d0] -> 12[901c0] via P2P/IPC/read +ip-10-0-216-33:179939:180197 [5] NCCL INFO Channel 01/0 : 13[901d0] -> 12[901c0] via P2P/IPC/read +ip-10-0-216-33:179939:180197 [5] NCCL INFO Connected all rings +ip-10-0-194-127:32775:33197 [5] NCCL INFO Channel 01/0 : 5[901d0] -> 6[a01c0] via P2P/IPC/read +ip-10-0-194-127:32775:33197 [5] NCCL INFO Connected all trees +ip-10-0-194-127:32775:33197 [5] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-194-127:32775:33197 [5] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-194-127:32775:33197 [5] NCCL INFO comm 0x50b28b60 rank 5 nranks 16 cudaDev 5 busId 901d0 - Init COMPLETE +ip-10-0-216-33:179939:180197 [5] NCCL INFO Channel 00/0 : 13[901d0] -> 14[a01c0] via P2P/IPC/read +ip-10-0-216-33:179939:180197 [5] NCCL INFO Channel 01/0 : 13[901d0] -> 14[a01c0] via P2P/IPC/read +ip-10-0-216-33:179939:180197 [5] NCCL INFO Connected all trees +ip-10-0-216-33:179939:180197 [5] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-216-33:179939:180197 [5] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-216-33:179939:180197 [5] NCCL INFO comm 0x4fde0b90 rank 13 nranks 16 cudaDev 5 busId 901d0 - Init COMPLETE +ip-10-0-194-127:32776:32776 [6] NCCL INFO cudaDriverVersion 12000 +ip-10-0-194-127:32776:32776 [6] NCCL INFO Bootstrap : Using ens32:10.0.194.127<0> +ip-10-0-194-127:32776:32776 [6] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-194-127:32776:33196 [6] NCCL INFO NET/IB : No device found. +ip-10-0-194-127:32776:33196 [6] NCCL INFO NET/Socket : Using [0]ens32:10.0.194.127<0> +ip-10-0-194-127:32776:33196 [6] NCCL INFO Using network Socket +ip-10-0-194-127:32776:33196 [6] NCCL INFO Setting affinity for GPU 6 to 0fffff00,00000fff,ff000000 +ip-10-0-194-127:32776:33196 [6] NCCL INFO Trees [0] 7/-1/-1->6->5 [1] 7/-1/-1->6->5 +ip-10-0-194-127:32776:33196 [6] NCCL INFO Channel 00/0 : 6[a01c0] -> 5[901d0] via P2P/IPC/read +ip-10-0-194-127:32776:33196 [6] NCCL INFO Channel 01/0 : 6[a01c0] -> 5[901d0] via P2P/IPC/read +ip-10-0-194-127:32776:33196 [6] NCCL INFO Connected all rings +ip-10-0-194-127:32776:33196 [6] NCCL INFO Channel 00/0 : 6[a01c0] -> 7[a01d0] via P2P/IPC/read +ip-10-0-194-127:32776:33196 [6] NCCL INFO Channel 01/0 : 6[a01c0] -> 7[a01d0] via P2P/IPC/read +ip-10-0-194-127:32776:33196 [6] NCCL INFO Connected all trees +ip-10-0-194-127:32776:33196 [6] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-194-127:32776:33196 [6] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-194-127:32776:33196 [6] NCCL INFO comm 0xa2f8460 rank 6 nranks 16 cudaDev 6 busId a01c0 - Init COMPLETE +ip-10-0-194-127:32777:32777 [7] NCCL INFO cudaDriverVersion 12000 +ip-10-0-194-127:32777:32777 [7] NCCL INFO Bootstrap : Using ens32:10.0.194.127<0> +ip-10-0-194-127:32777:32777 [7] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-194-127:32777:33194 [7] NCCL INFO NET/IB : No device found. +ip-10-0-194-127:32777:33194 [7] NCCL INFO NET/Socket : Using [0]ens32:10.0.194.127<0> +ip-10-0-194-127:32777:33194 [7] NCCL INFO Using network Socket +ip-10-0-194-127:32777:33194 [7] NCCL INFO Setting affinity for GPU 7 to 0fffff00,00000fff,ff000000 +ip-10-0-194-127:32777:33194 [7] NCCL INFO Trees [0] -1/-1/-1->7->6 [1] -1/-1/-1->7->6 +ip-10-0-194-127:32777:33194 [7] NCCL INFO Channel 00/0 : 7[a01d0] -> 6[a01c0] via P2P/IPC/read +ip-10-0-194-127:32777:33194 [7] NCCL INFO Channel 01/0 : 7[a01d0] -> 6[a01c0] via P2P/IPC/read +ip-10-0-194-127:32777:33194 [7] NCCL INFO Connected all rings +ip-10-0-194-127:32777:33194 [7] NCCL INFO Connected all trees +ip-10-0-194-127:32777:33194 [7] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-194-127:32777:33194 [7] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-194-127:32777:33194 [7] NCCL INFO comm 0x12908900 rank 7 nranks 16 cudaDev 7 busId a01d0 - Init COMPLETE +ip-10-0-194-127:32772:32772 [2] NCCL INFO cudaDriverVersion 12000 +ip-10-0-194-127:32772:32772 [2] NCCL INFO Bootstrap : Using ens32:10.0.194.127<0> +ip-10-0-194-127:32772:32772 [2] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-194-127:32772:33198 [2] NCCL INFO NET/IB : No device found. +ip-10-0-194-127:32772:33198 [2] NCCL INFO NET/Socket : Using [0]ens32:10.0.194.127<0> +ip-10-0-194-127:32772:33198 [2] NCCL INFO Using network Socket +ip-10-0-194-127:32772:33198 [2] NCCL INFO Setting affinity for GPU 2 to 0f,ffff0000,000fffff +ip-10-0-194-127:32772:33198 [2] NCCL INFO Trees [0] 3/-1/-1->2->1 [1] 3/-1/-1->2->1 +ip-10-0-194-127:32772:33198 [2] NCCL INFO Channel 00/0 : 2[201c0] -> 1[101d0] via P2P/IPC/read +ip-10-0-194-127:32772:33198 [2] NCCL INFO Channel 01/0 : 2[201c0] -> 1[101d0] via P2P/IPC/read +ip-10-0-194-127:32772:33198 [2] NCCL INFO Connected all rings +ip-10-0-194-127:32772:33198 [2] NCCL INFO Channel 00/0 : 2[201c0] -> 3[201d0] via P2P/IPC/read +ip-10-0-194-127:32772:33198 [2] NCCL INFO Channel 01/0 : 2[201c0] -> 3[201d0] via P2P/IPC/read +ip-10-0-194-127:32772:33198 [2] NCCL INFO Connected all trees +ip-10-0-194-127:32772:33198 [2] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-194-127:32772:33198 [2] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-194-127:32772:33198 [2] NCCL INFO comm 0x4f0df330 rank 2 nranks 16 cudaDev 2 busId 201c0 - Init COMPLETE +[ip-10-0-194-127:0/16] 2024-02-26 17:49:13,954 (distributed:1027) INFO: Reducer buckets have been rebuilt in this iteration. +[ip-10-0-194-127:0/16] 2024-02-26 17:50:42,168 (trainer:762) INFO: 1epoch:train:1-100batch: iter_time=2.901, forward_time=0.228, loss_ctc=7.111e+03, loss_att=378.837, acc=1.735e-05, loss=2.398e+03, backward_time=0.226, grad_norm=1.351e+04, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.047, optim0_lr0=8.583e-08, train_time=4.108 +[ip-10-0-194-127:0/16] 2024-02-26 17:52:09,125 (trainer:762) INFO: 1epoch:train:101-200batch: iter_time=1.287e-04, forward_time=0.211, loss_ctc=3.710e+03, loss_att=386.323, acc=7.526e-06, loss=1.384e+03, backward_time=0.237, grad_norm=1.736e+04, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.047, optim0_lr0=2.525e-07, train_time=0.870 +[ip-10-0-194-127:0/16] 2024-02-26 17:53:36,961 (trainer:762) INFO: 1epoch:train:201-300batch: iter_time=1.209e-04, forward_time=0.208, loss_ctc=586.929, loss_att=393.938, acc=1.931e-05, loss=451.835, backward_time=0.228, grad_norm=2.394e+03, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.046, optim0_lr0=4.192e-07, train_time=0.878 +[ip-10-0-194-127:0/16] 2024-02-26 17:55:05,579 (trainer:762) INFO: 1epoch:train:301-400batch: iter_time=1.247e-04, forward_time=0.208, loss_ctc=382.183, loss_att=374.263, acc=1.407e-05, loss=376.639, backward_time=0.227, grad_norm=239.076, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.046, optim0_lr0=5.858e-07, train_time=0.886 +[ip-10-0-194-127:0/16] 2024-02-26 17:56:33,765 (trainer:762) INFO: 1epoch:train:401-500batch: iter_time=1.331e-04, forward_time=0.208, loss_ctc=398.318, loss_att=396.527, acc=1.945e-05, loss=397.064, backward_time=0.227, grad_norm=223.174, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.046, optim0_lr0=7.525e-07, train_time=0.882 +[ip-10-0-194-127:0/16] 2024-02-26 17:58:01,106 (trainer:762) INFO: 1epoch:train:501-600batch: iter_time=1.193e-04, forward_time=0.208, loss_ctc=369.990, loss_att=375.082, acc=4.130e-05, loss=373.555, backward_time=0.228, grad_norm=185.619, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.046, optim0_lr0=9.192e-07, train_time=0.873 +[ip-10-0-194-127:0/16] 2024-02-26 17:59:28,153 (trainer:762) INFO: 1epoch:train:601-700batch: iter_time=1.278e-04, forward_time=0.212, loss_ctc=397.123, loss_att=404.329, acc=3.762e-05, loss=402.167, backward_time=0.232, grad_norm=184.082, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.046, optim0_lr0=1.086e-06, train_time=0.870 +[ip-10-0-194-127:0/16] 2024-02-26 18:00:54,078 (trainer:762) INFO: 1epoch:train:701-800batch: iter_time=1.385e-04, forward_time=0.206, loss_ctc=363.640, loss_att=374.369, acc=2.057e-04, loss=371.150, backward_time=0.225, grad_norm=156.166, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.046, optim0_lr0=1.252e-06, train_time=0.859 +[ip-10-0-194-127:0/16] 2024-02-26 18:02:19,837 (trainer:762) INFO: 1epoch:train:801-900batch: iter_time=1.234e-04, forward_time=0.205, loss_ctc=331.091, loss_att=346.754, acc=0.003, loss=342.055, backward_time=0.225, grad_norm=119.672, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.046, optim0_lr0=1.419e-06, train_time=0.857 +[ip-10-0-194-127:0/16] 2024-02-26 18:03:46,623 (trainer:762) INFO: 1epoch:train:901-1000batch: iter_time=1.240e-04, forward_time=0.206, loss_ctc=351.395, loss_att=365.098, acc=0.012, loss=360.987, backward_time=0.227, grad_norm=164.205, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.046, optim0_lr0=1.586e-06, train_time=0.868 +[ip-10-0-194-127:0/16] 2024-02-26 18:05:14,082 (trainer:762) INFO: 1epoch:train:1001-1100batch: iter_time=1.227e-04, forward_time=0.206, loss_ctc=332.785, loss_att=348.034, acc=0.033, loss=343.459, backward_time=0.223, grad_norm=149.036, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.046, optim0_lr0=1.752e-06, train_time=0.874 +[ip-10-0-194-127:0/16] 2024-02-26 18:06:41,517 (trainer:762) INFO: 1epoch:train:1101-1200batch: iter_time=1.259e-04, forward_time=0.208, loss_ctc=357.640, loss_att=373.473, acc=0.057, loss=368.723, backward_time=0.224, grad_norm=154.917, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.046, optim0_lr0=1.919e-06, train_time=0.874 +[ip-10-0-194-127:0/16] 2024-02-26 18:08:08,555 (trainer:762) INFO: 1epoch:train:1201-1300batch: iter_time=1.268e-04, forward_time=0.208, loss_ctc=415.967, loss_att=427.118, acc=0.062, loss=423.773, backward_time=0.224, grad_norm=184.073, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.046, optim0_lr0=2.086e-06, train_time=0.870 +[ip-10-0-194-127:0/16] 2024-02-26 18:09:35,413 (trainer:762) INFO: 1epoch:train:1301-1400batch: iter_time=1.230e-04, forward_time=0.211, loss_ctc=359.397, loss_att=372.169, acc=0.072, loss=368.338, backward_time=0.224, grad_norm=152.314, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.046, optim0_lr0=2.253e-06, train_time=0.868 +[ip-10-0-194-127:0/16] 2024-02-26 18:11:01,145 (trainer:762) INFO: 1epoch:train:1401-1500batch: iter_time=1.312e-04, forward_time=0.206, loss_ctc=330.382, loss_att=341.968, acc=0.076, loss=338.492, backward_time=0.227, grad_norm=111.303, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.046, optim0_lr0=2.419e-06, train_time=0.857 +[ip-10-0-194-127:0/16] 2024-02-26 18:12:27,341 (trainer:762) INFO: 1epoch:train:1501-1600batch: iter_time=1.321e-04, forward_time=0.205, loss_ctc=324.784, loss_att=334.818, acc=0.077, loss=331.808, backward_time=0.233, grad_norm=124.252, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.046, optim0_lr0=2.586e-06, train_time=0.862 +[ip-10-0-194-127:0/16] 2024-02-26 18:13:54,065 (trainer:762) INFO: 1epoch:train:1601-1700batch: iter_time=1.206e-04, forward_time=0.207, loss_ctc=354.480, loss_att=358.645, acc=0.073, loss=357.395, backward_time=0.227, grad_norm=136.015, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.046, optim0_lr0=2.752e-06, train_time=0.867 +[ip-10-0-194-127:0/16] 2024-02-26 18:15:21,439 (trainer:762) INFO: 1epoch:train:1701-1800batch: iter_time=1.213e-04, forward_time=0.207, loss_ctc=359.639, loss_att=359.558, acc=0.072, loss=359.582, backward_time=0.226, grad_norm=130.160, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.046, optim0_lr0=2.919e-06, train_time=0.874 +[ip-10-0-194-127:0/16] 2024-02-26 18:16:48,534 (trainer:762) INFO: 1epoch:train:1801-1900batch: iter_time=1.276e-04, forward_time=0.207, loss_ctc=367.130, loss_att=361.095, acc=0.071, loss=362.906, backward_time=0.227, grad_norm=134.048, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.046, optim0_lr0=3.086e-06, train_time=0.871 +[ip-10-0-194-127:0/16] 2024-02-26 18:18:15,342 (trainer:762) INFO: 1epoch:train:1901-2000batch: iter_time=1.238e-04, forward_time=0.206, loss_ctc=337.273, loss_att=331.043, acc=0.078, loss=332.912, backward_time=0.227, grad_norm=106.037, clip=100.000, loss_scale=6.554e+04, optim_step_time=0.046, optim0_lr0=3.253e-06, train_time=0.868 +[ip-10-0-194-127:0/16] 2024-02-26 18:19:42,745 (trainer:762) INFO: 1epoch:train:2001-2100batch: iter_time=1.203e-04, forward_time=0.210, loss_ctc=297.283, loss_att=292.412, acc=0.082, loss=293.874, backward_time=0.224, grad_norm=90.566, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.047, optim0_lr0=3.419e-06, train_time=0.874 +[ip-10-0-194-127:0/16] 2024-02-26 18:21:11,014 (trainer:762) INFO: 1epoch:train:2101-2200batch: iter_time=1.266e-04, forward_time=0.206, loss_ctc=325.531, loss_att=315.570, acc=0.077, loss=318.559, backward_time=0.229, grad_norm=117.815, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.046, optim0_lr0=3.586e-06, train_time=0.882 +[ip-10-0-194-127:0/16] 2024-02-26 18:22:37,790 (trainer:762) INFO: 1epoch:train:2201-2300batch: iter_time=1.205e-04, forward_time=0.206, loss_ctc=305.955, loss_att=296.228, acc=0.082, loss=299.146, backward_time=0.228, grad_norm=95.359, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.046, optim0_lr0=3.753e-06, train_time=0.868 +[ip-10-0-194-127:0/16] 2024-02-26 18:24:05,639 (trainer:762) INFO: 1epoch:train:2301-2400batch: iter_time=1.239e-04, forward_time=0.206, loss_ctc=332.544, loss_att=319.134, acc=0.074, loss=323.157, backward_time=0.235, grad_norm=113.508, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.046, optim0_lr0=3.919e-06, train_time=0.878 +[ip-10-0-194-127:0/16] 2024-02-26 18:25:34,120 (trainer:762) INFO: 1epoch:train:2401-2500batch: iter_time=1.273e-04, forward_time=0.205, loss_ctc=296.780, loss_att=286.950, acc=0.082, loss=289.899, backward_time=0.234, grad_norm=93.266, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.047, optim0_lr0=4.086e-06, train_time=0.885 +[ip-10-0-194-127:0/16] 2024-02-26 18:27:01,815 (trainer:762) INFO: 1epoch:train:2501-2600batch: iter_time=1.256e-04, forward_time=0.205, loss_ctc=264.585, loss_att=256.608, acc=0.091, loss=259.001, backward_time=0.240, grad_norm=83.685, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.047, optim0_lr0=4.253e-06, train_time=0.877 +[ip-10-0-194-127:0/16] 2024-02-26 18:28:28,842 (trainer:762) INFO: 1epoch:train:2601-2700batch: iter_time=1.339e-04, forward_time=0.206, loss_ctc=324.382, loss_att=309.150, acc=0.081, loss=313.720, backward_time=0.227, grad_norm=94.710, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.046, optim0_lr0=4.419e-06, train_time=0.870 +[ip-10-0-194-127:0/16] 2024-02-26 18:29:56,912 (trainer:762) INFO: 1epoch:train:2701-2800batch: iter_time=1.178e-04, forward_time=0.211, loss_ctc=319.564, loss_att=303.544, acc=0.084, loss=308.350, backward_time=0.232, grad_norm=95.382, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.046, optim0_lr0=4.586e-06, train_time=0.880 +[ip-10-0-194-127:0/16] 2024-02-26 18:31:25,676 (trainer:762) INFO: 1epoch:train:2801-2900batch: iter_time=1.259e-04, forward_time=0.206, loss_ctc=297.661, loss_att=283.667, acc=0.094, loss=287.865, backward_time=0.225, grad_norm=82.929, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.046, optim0_lr0=4.752e-06, train_time=0.887 +[ip-10-0-194-127:0/16] 2024-02-26 18:32:54,602 (trainer:762) INFO: 1epoch:train:2901-3000batch: iter_time=1.191e-04, forward_time=0.205, loss_ctc=308.907, loss_att=292.756, acc=0.095, loss=297.602, backward_time=0.227, grad_norm=84.556, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.046, optim0_lr0=4.919e-06, train_time=0.889 +[ip-10-0-194-127:0/16] 2024-02-26 18:34:22,742 (trainer:762) INFO: 1epoch:train:3001-3100batch: iter_time=1.262e-04, forward_time=0.204, loss_ctc=309.807, loss_att=293.250, acc=0.100, loss=298.217, backward_time=0.228, grad_norm=88.738, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.046, optim0_lr0=5.086e-06, train_time=0.881 +[ip-10-0-194-127:0/16] 2024-02-26 18:35:50,469 (trainer:762) INFO: 1epoch:train:3101-3200batch: iter_time=1.191e-04, forward_time=0.207, loss_ctc=290.061, loss_att=275.076, acc=0.111, loss=279.572, backward_time=0.228, grad_norm=78.017, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.046, optim0_lr0=5.252e-06, train_time=0.877 +[ip-10-0-194-127:0/16] 2024-02-26 18:37:16,944 (trainer:762) INFO: 1epoch:train:3201-3300batch: iter_time=1.259e-04, forward_time=0.207, loss_ctc=312.624, loss_att=294.140, acc=0.110, loss=299.686, backward_time=0.235, grad_norm=84.888, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.047, optim0_lr0=5.419e-06, train_time=0.865 +[ip-10-0-194-127:0/16] 2024-02-26 18:38:43,514 (trainer:762) INFO: 1epoch:train:3301-3400batch: iter_time=1.228e-04, forward_time=0.207, loss_ctc=319.263, loss_att=299.785, acc=0.108, loss=305.629, backward_time=0.230, grad_norm=94.943, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.046, optim0_lr0=5.586e-06, train_time=0.865 +[ip-10-0-194-127:0/16] 2024-02-26 18:40:10,639 (trainer:762) INFO: 1epoch:train:3401-3500batch: iter_time=1.229e-04, forward_time=0.210, loss_ctc=299.034, loss_att=281.820, acc=0.111, loss=286.984, backward_time=0.233, grad_norm=84.916, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.046, optim0_lr0=5.753e-06, train_time=0.871 +[ip-10-0-194-127:0/16] 2024-02-26 18:41:37,678 (trainer:762) INFO: 1epoch:train:3501-3600batch: iter_time=1.193e-04, forward_time=0.206, loss_ctc=279.221, loss_att=264.745, acc=0.119, loss=269.088, backward_time=0.233, grad_norm=67.783, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.046, optim0_lr0=5.919e-06, train_time=0.870 +[ip-10-0-194-127:0/16] 2024-02-26 18:43:04,840 (trainer:762) INFO: 1epoch:train:3601-3700batch: iter_time=1.193e-04, forward_time=0.205, loss_ctc=268.629, loss_att=254.368, acc=0.118, loss=258.646, backward_time=0.227, grad_norm=72.575, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.046, optim0_lr0=6.086e-06, train_time=0.871 +[ip-10-0-194-127:0/16] 2024-02-26 18:44:32,750 (trainer:762) INFO: 1epoch:train:3701-3800batch: iter_time=1.209e-04, forward_time=0.205, loss_ctc=311.056, loss_att=290.889, acc=0.109, loss=296.939, backward_time=0.229, grad_norm=93.179, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.046, optim0_lr0=6.253e-06, train_time=0.879 +[ip-10-0-194-127:0/16] 2024-02-26 18:46:01,515 (trainer:762) INFO: 1epoch:train:3801-3900batch: iter_time=1.213e-04, forward_time=0.204, loss_ctc=267.265, loss_att=252.796, acc=0.118, loss=257.137, backward_time=0.226, grad_norm=70.675, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.046, optim0_lr0=6.419e-06, train_time=0.887 +[ip-10-0-194-127:0/16] 2024-02-26 18:47:30,577 (trainer:762) INFO: 1epoch:train:3901-4000batch: iter_time=1.223e-04, forward_time=0.204, loss_ctc=273.172, loss_att=256.213, acc=0.119, loss=261.301, backward_time=0.231, grad_norm=109.252, clip=100.000, loss_scale=1.311e+05, optim_step_time=0.046, optim0_lr0=6.586e-06, train_time=0.890 +[ip-10-0-194-127:0/16] 2024-02-26 18:48:58,271 (trainer:762) INFO: 1epoch:train:4001-4100batch: iter_time=1.195e-04, forward_time=0.206, loss_ctc=278.346, loss_att=261.453, acc=0.119, loss=266.521, backward_time=0.232, grad_norm=95.216, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.046, optim0_lr0=6.752e-06, train_time=0.877 +[ip-10-0-194-127:0/16] 2024-02-26 18:50:25,700 (trainer:762) INFO: 1epoch:train:4101-4200batch: iter_time=1.285e-04, forward_time=0.204, loss_ctc=242.842, loss_att=230.135, acc=0.126, loss=233.947, backward_time=0.227, grad_norm=70.129, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.046, optim0_lr0=6.919e-06, train_time=0.874 +[ip-10-0-194-127:0/16] 2024-02-26 18:51:52,896 (trainer:762) INFO: 1epoch:train:4201-4300batch: iter_time=1.219e-04, forward_time=0.211, loss_ctc=294.754, loss_att=274.524, acc=0.115, loss=280.593, backward_time=0.229, grad_norm=89.119, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.046, optim0_lr0=7.086e-06, train_time=0.872 +[ip-10-0-194-127:0/16] 2024-02-26 18:53:19,807 (trainer:762) INFO: 1epoch:train:4301-4400batch: iter_time=1.294e-04, forward_time=0.206, loss_ctc=264.005, loss_att=246.970, acc=0.122, loss=252.080, backward_time=0.233, grad_norm=86.415, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.046, optim0_lr0=7.253e-06, train_time=0.869 +[ip-10-0-194-127:0/16] 2024-02-26 18:54:47,139 (trainer:762) INFO: 1epoch:train:4401-4500batch: iter_time=1.276e-04, forward_time=0.206, loss_ctc=282.706, loss_att=264.177, acc=0.125, loss=269.736, backward_time=0.230, grad_norm=80.949, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.046, optim0_lr0=7.419e-06, train_time=0.873 +[ip-10-0-194-127:0/16] 2024-02-26 18:56:16,184 (trainer:762) INFO: 1epoch:train:4501-4600batch: iter_time=1.305e-04, forward_time=0.205, loss_ctc=280.986, loss_att=261.920, acc=0.121, loss=267.640, backward_time=0.225, grad_norm=75.555, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.046, optim0_lr0=7.586e-06, train_time=0.890 +[ip-10-0-194-127:0/16] 2024-02-26 18:57:47,096 (trainer:762) INFO: 1epoch:train:4601-4700batch: iter_time=1.290e-04, forward_time=0.204, loss_ctc=269.278, loss_att=249.948, acc=0.129, loss=255.747, backward_time=0.222, grad_norm=86.582, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.046, optim0_lr0=7.753e-06, train_time=0.909 +[ip-10-0-194-127:0/16] 2024-02-26 18:59:16,373 (trainer:762) INFO: 1epoch:train:4701-4800batch: iter_time=1.262e-04, forward_time=0.206, loss_ctc=301.689, loss_att=278.379, acc=0.120, loss=285.372, backward_time=0.230, grad_norm=88.262, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.046, optim0_lr0=7.919e-06, train_time=0.893 +[ip-10-0-194-127:0/16] 2024-02-26 19:00:44,796 (trainer:762) INFO: 1epoch:train:4801-4900batch: iter_time=1.229e-04, forward_time=0.205, loss_ctc=301.211, loss_att=277.859, acc=0.116, loss=284.864, backward_time=0.227, grad_norm=85.124, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.046, optim0_lr0=8.086e-06, train_time=0.884 +[ip-10-0-194-127:0/16] 2024-02-26 19:02:12,615 (trainer:762) INFO: 1epoch:train:4901-5000batch: iter_time=1.187e-04, forward_time=0.205, loss_ctc=286.504, loss_att=264.270, acc=0.120, loss=270.940, backward_time=0.224, grad_norm=98.913, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.046, optim0_lr0=8.252e-06, train_time=0.878 +[ip-10-0-194-127:0/16] 2024-02-26 19:02:16,370 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-194-127:0/16] 2024-02-26 19:02:53,542 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-194-127:0/16] 2024-02-26 19:03:02,248 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-194-127:0/16] 2024-02-26 19:03:02,248 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-194-127:0/16] 2024-02-26 19:03:02,252 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-194-127:0/16] 2024-02-26 19:09:11,112 (trainer:762) INFO: 1epoch:train:5001-5100batch: iter_time=3.167, forward_time=0.208, loss_ctc=277.451, loss_att=256.593, acc=0.123, loss=262.850, backward_time=0.225, grad_norm=76.591, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.047, optim0_lr0=8.419e-06, train_time=4.185 +[ip-10-0-194-127:0/16] 2024-02-26 19:10:39,833 (trainer:762) INFO: 1epoch:train:5101-5200batch: iter_time=1.317e-04, forward_time=0.207, loss_ctc=276.279, loss_att=253.885, acc=0.128, loss=260.603, backward_time=0.227, grad_norm=73.545, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.047, optim0_lr0=8.586e-06, train_time=0.887 +[ip-10-0-194-127:0/16] 2024-02-26 19:12:08,672 (trainer:762) INFO: 1epoch:train:5201-5300batch: iter_time=1.207e-04, forward_time=0.205, loss_ctc=297.919, loss_att=272.415, acc=0.117, loss=280.067, backward_time=0.229, grad_norm=135.636, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.047, optim0_lr0=8.753e-06, train_time=0.888 +[ip-10-0-194-127:0/16] 2024-02-26 19:13:36,929 (trainer:762) INFO: 1epoch:train:5301-5400batch: iter_time=1.296e-04, forward_time=0.206, loss_ctc=277.984, loss_att=254.871, acc=0.127, loss=261.805, backward_time=0.226, grad_norm=97.575, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.047, optim0_lr0=8.919e-06, train_time=0.882 +[ip-10-0-194-127:0/16] 2024-02-26 19:15:05,821 (trainer:762) INFO: 1epoch:train:5401-5500batch: iter_time=1.266e-04, forward_time=0.211, loss_ctc=279.957, loss_att=256.448, acc=0.125, loss=263.501, backward_time=0.228, grad_norm=69.107, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.047, optim0_lr0=9.086e-06, train_time=0.889 +[ip-10-0-194-127:0/16] 2024-02-26 19:16:33,731 (trainer:762) INFO: 1epoch:train:5501-5600batch: iter_time=1.264e-04, forward_time=0.206, loss_ctc=272.546, loss_att=248.638, acc=0.129, loss=255.810, backward_time=0.237, grad_norm=93.766, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.047, optim0_lr0=9.252e-06, train_time=0.879 +[ip-10-0-194-127:0/16] 2024-02-26 19:18:00,284 (trainer:762) INFO: 1epoch:train:5601-5700batch: iter_time=1.214e-04, forward_time=0.208, loss_ctc=293.381, loss_att=267.041, acc=0.125, loss=274.943, backward_time=0.227, grad_norm=75.392, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.047, optim0_lr0=9.419e-06, train_time=0.865 +[ip-10-0-194-127:0/16] 2024-02-26 19:19:27,773 (trainer:762) INFO: 1epoch:train:5701-5800batch: iter_time=1.215e-04, forward_time=0.207, loss_ctc=285.605, loss_att=259.623, acc=0.130, loss=267.418, backward_time=0.229, grad_norm=94.575, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.047, optim0_lr0=9.586e-06, train_time=0.875 +[ip-10-0-194-127:0/16] 2024-02-26 19:20:53,985 (trainer:762) INFO: 1epoch:train:5801-5900batch: iter_time=1.238e-04, forward_time=0.206, loss_ctc=248.278, loss_att=226.111, acc=0.141, loss=232.761, backward_time=0.227, grad_norm=78.659, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.047, optim0_lr0=9.753e-06, train_time=0.862 +[ip-10-0-194-127:0/16] 2024-02-26 19:22:20,153 (trainer:762) INFO: 1epoch:train:5901-6000batch: iter_time=1.221e-04, forward_time=0.206, loss_ctc=277.947, loss_att=251.078, acc=0.131, loss=259.139, backward_time=0.230, grad_norm=106.103, clip=100.000, loss_scale=2.621e+05, optim_step_time=0.047, optim0_lr0=9.919e-06, train_time=0.861 +[ip-10-0-194-127:0/16] 2024-02-26 19:23:46,531 (trainer:762) INFO: 1epoch:train:6001-6100batch: iter_time=1.218e-04, forward_time=0.206, loss_ctc=261.811, loss_att=237.940, acc=0.135, loss=245.101, backward_time=0.235, grad_norm=95.022, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.009e-05, train_time=0.864 +[ip-10-0-194-127:0/16] 2024-02-26 19:25:12,885 (trainer:762) INFO: 1epoch:train:6101-6200batch: iter_time=1.249e-04, forward_time=0.208, loss_ctc=272.295, loss_att=246.242, acc=0.134, loss=254.058, backward_time=0.231, grad_norm=64.117, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.025e-05, train_time=0.863 +[ip-10-0-194-127:0/16] 2024-02-26 19:26:41,467 (trainer:762) INFO: 1epoch:train:6201-6300batch: iter_time=1.198e-04, forward_time=0.213, loss_ctc=334.577, loss_att=298.030, acc=0.125, loss=308.994, backward_time=0.234, grad_norm=92.297, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.042e-05, train_time=0.886 +[ip-10-0-194-127:0/16] 2024-02-26 19:28:09,431 (trainer:762) INFO: 1epoch:train:6301-6400batch: iter_time=1.258e-04, forward_time=0.208, loss_ctc=287.127, loss_att=257.654, acc=0.133, loss=266.496, backward_time=0.231, grad_norm=88.839, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.059e-05, train_time=0.879 +[ip-10-0-194-127:0/16] 2024-02-26 19:29:36,002 (trainer:762) INFO: 1epoch:train:6401-6500batch: iter_time=1.205e-04, forward_time=0.207, loss_ctc=273.679, loss_att=244.497, acc=0.135, loss=253.251, backward_time=0.225, grad_norm=98.685, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.075e-05, train_time=0.865 +[ip-10-0-194-127:0/16] 2024-02-26 19:31:05,591 (trainer:762) INFO: 1epoch:train:6501-6600batch: iter_time=1.226e-04, forward_time=0.206, loss_ctc=263.604, loss_att=236.343, acc=0.142, loss=244.521, backward_time=0.228, grad_norm=100.158, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.092e-05, train_time=0.896 +[ip-10-0-194-127:0/16] 2024-02-26 19:32:35,318 (trainer:762) INFO: 1epoch:train:6601-6700batch: iter_time=1.196e-04, forward_time=0.207, loss_ctc=291.770, loss_att=258.547, acc=0.137, loss=268.514, backward_time=0.228, grad_norm=104.222, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.109e-05, train_time=0.897 +[ip-10-0-194-127:0/16] 2024-02-26 19:34:02,613 (trainer:762) INFO: 1epoch:train:6701-6800batch: iter_time=1.188e-04, forward_time=0.208, loss_ctc=303.046, loss_att=268.186, acc=0.139, loss=278.644, backward_time=0.230, grad_norm=77.458, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.125e-05, train_time=0.873 +[ip-10-0-194-127:0/16] 2024-02-26 19:35:29,671 (trainer:762) INFO: 1epoch:train:6801-6900batch: iter_time=1.213e-04, forward_time=0.208, loss_ctc=317.919, loss_att=278.917, acc=0.134, loss=290.617, backward_time=0.230, grad_norm=94.846, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.142e-05, train_time=0.870 +[ip-10-0-194-127:0/16] 2024-02-26 19:36:58,003 (trainer:762) INFO: 1epoch:train:6901-7000batch: iter_time=1.214e-04, forward_time=0.207, loss_ctc=296.070, loss_att=259.075, acc=0.143, loss=270.173, backward_time=0.227, grad_norm=97.500, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.159e-05, train_time=0.883 +[ip-10-0-194-127:0/16] 2024-02-26 19:38:25,431 (trainer:762) INFO: 1epoch:train:7001-7100batch: iter_time=1.193e-04, forward_time=0.212, loss_ctc=260.655, loss_att=231.750, acc=0.147, loss=240.422, backward_time=0.227, grad_norm=92.821, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.175e-05, train_time=0.874 +[ip-10-0-194-127:0/16] 2024-02-26 19:39:52,531 (trainer:762) INFO: 1epoch:train:7101-7200batch: iter_time=1.297e-04, forward_time=0.208, loss_ctc=289.502, loss_att=253.895, acc=0.142, loss=264.577, backward_time=0.229, grad_norm=78.297, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.192e-05, train_time=0.871 +[ip-10-0-194-127:0/16] 2024-02-26 19:41:19,293 (trainer:762) INFO: 1epoch:train:7201-7300batch: iter_time=1.193e-04, forward_time=0.208, loss_ctc=275.926, loss_att=242.022, acc=0.144, loss=252.193, backward_time=0.227, grad_norm=73.335, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.209e-05, train_time=0.867 +[ip-10-0-194-127:0/16] 2024-02-26 19:42:46,166 (trainer:762) INFO: 1epoch:train:7301-7400batch: iter_time=1.229e-04, forward_time=0.208, loss_ctc=299.598, loss_att=260.355, acc=0.141, loss=272.128, backward_time=0.231, grad_norm=87.701, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.225e-05, train_time=0.868 +[ip-10-0-194-127:0/16] 2024-02-26 19:44:13,950 (trainer:762) INFO: 1epoch:train:7401-7500batch: iter_time=1.237e-04, forward_time=0.207, loss_ctc=270.860, loss_att=238.578, acc=0.144, loss=248.263, backward_time=0.230, grad_norm=65.354, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.242e-05, train_time=0.878 +[ip-10-0-194-127:0/16] 2024-02-26 19:45:40,538 (trainer:762) INFO: 1epoch:train:7501-7600batch: iter_time=1.185e-04, forward_time=0.207, loss_ctc=243.727, loss_att=215.062, acc=0.154, loss=223.662, backward_time=0.226, grad_norm=74.115, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.259e-05, train_time=0.866 +[ip-10-0-194-127:0/16] 2024-02-26 19:47:08,284 (trainer:762) INFO: 1epoch:train:7601-7700batch: iter_time=1.236e-04, forward_time=0.207, loss_ctc=304.529, loss_att=263.530, acc=0.144, loss=275.829, backward_time=0.228, grad_norm=78.166, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.275e-05, train_time=0.877 +[ip-10-0-194-127:0/16] 2024-02-26 19:48:37,260 (trainer:762) INFO: 1epoch:train:7701-7800batch: iter_time=1.195e-04, forward_time=0.205, loss_ctc=305.792, loss_att=262.582, acc=0.148, loss=275.545, backward_time=0.230, grad_norm=88.186, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.292e-05, train_time=0.890 +[ip-10-0-194-127:0/16] 2024-02-26 19:50:05,805 (trainer:762) INFO: 1epoch:train:7801-7900batch: iter_time=1.264e-04, forward_time=0.205, loss_ctc=281.026, loss_att=243.983, acc=0.148, loss=255.096, backward_time=0.226, grad_norm=83.355, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.309e-05, train_time=0.885 +[ip-10-0-194-127:0/16] 2024-02-26 19:51:34,021 (trainer:762) INFO: 1epoch:train:7901-8000batch: iter_time=1.310e-04, forward_time=0.212, loss_ctc=295.082, loss_att=254.133, acc=0.145, loss=266.417, backward_time=0.235, grad_norm=92.345, clip=100.000, loss_scale=5.243e+05, optim_step_time=0.047, optim0_lr0=1.325e-05, train_time=0.882 +[ip-10-0-194-127:0/16] 2024-02-26 19:53:02,160 (trainer:762) INFO: 1epoch:train:8001-8100batch: iter_time=1.232e-04, forward_time=0.207, loss_ctc=294.867, loss_att=253.557, acc=0.148, loss=265.950, backward_time=0.227, grad_norm=73.774, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.342e-05, train_time=0.881 +[ip-10-0-194-127:0/16] 2024-02-26 19:54:30,100 (trainer:762) INFO: 1epoch:train:8101-8200batch: iter_time=1.216e-04, forward_time=0.207, loss_ctc=278.344, loss_att=239.802, acc=0.154, loss=251.365, backward_time=0.228, grad_norm=63.387, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.359e-05, train_time=0.879 +[ip-10-0-194-127:0/16] 2024-02-26 19:55:59,580 (trainer:762) INFO: 1epoch:train:8201-8300batch: iter_time=1.207e-04, forward_time=0.208, loss_ctc=304.304, loss_att=258.932, acc=0.151, loss=272.544, backward_time=0.226, grad_norm=90.322, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.375e-05, train_time=0.895 +[ip-10-0-194-127:0/16] 2024-02-26 19:57:29,470 (trainer:762) INFO: 1epoch:train:8301-8400batch: iter_time=1.222e-04, forward_time=0.207, loss_ctc=310.901, loss_att=264.509, acc=0.148, loss=278.426, backward_time=0.226, grad_norm=103.182, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.392e-05, train_time=0.899 +[ip-10-0-194-127:0/16] 2024-02-26 19:58:57,890 (trainer:762) INFO: 1epoch:train:8401-8500batch: iter_time=1.213e-04, forward_time=0.207, loss_ctc=292.893, loss_att=249.679, acc=0.152, loss=262.644, backward_time=0.227, grad_norm=90.881, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.409e-05, train_time=0.884 +[ip-10-0-194-127:0/16] 2024-02-26 20:00:26,010 (trainer:762) INFO: 1epoch:train:8501-8600batch: iter_time=1.221e-04, forward_time=0.207, loss_ctc=269.743, loss_att=232.757, acc=0.158, loss=243.853, backward_time=0.230, grad_norm=60.230, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.425e-05, train_time=0.881 +[ip-10-0-194-127:0/16] 2024-02-26 20:01:52,263 (trainer:762) INFO: 1epoch:train:8601-8700batch: iter_time=1.225e-04, forward_time=0.207, loss_ctc=262.547, loss_att=225.659, acc=0.163, loss=236.725, backward_time=0.224, grad_norm=64.720, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.442e-05, train_time=0.862 +[ip-10-0-194-127:0/16] 2024-02-26 20:03:20,399 (trainer:762) INFO: 1epoch:train:8701-8800batch: iter_time=1.311e-04, forward_time=0.208, loss_ctc=305.190, loss_att=260.249, acc=0.148, loss=273.731, backward_time=0.228, grad_norm=66.523, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.459e-05, train_time=0.881 +[ip-10-0-194-127:0/16] 2024-02-26 20:04:47,007 (trainer:762) INFO: 1epoch:train:8801-8900batch: iter_time=1.203e-04, forward_time=0.212, loss_ctc=259.471, loss_att=223.594, acc=0.159, loss=234.357, backward_time=0.227, grad_norm=73.127, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.475e-05, train_time=0.866 +[ip-10-0-194-127:0/16] 2024-02-26 20:06:13,643 (trainer:762) INFO: 1epoch:train:8901-9000batch: iter_time=1.261e-04, forward_time=0.207, loss_ctc=264.430, loss_att=228.027, acc=0.162, loss=238.948, backward_time=0.234, grad_norm=59.111, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.492e-05, train_time=0.866 +[ip-10-0-194-127:0/16] 2024-02-26 20:07:39,912 (trainer:762) INFO: 1epoch:train:9001-9100batch: iter_time=1.262e-04, forward_time=0.208, loss_ctc=269.638, loss_att=232.835, acc=0.160, loss=243.876, backward_time=0.231, grad_norm=61.304, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.509e-05, train_time=0.862 +[ip-10-0-194-127:0/16] 2024-02-26 20:09:07,842 (trainer:762) INFO: 1epoch:train:9101-9200batch: iter_time=1.263e-04, forward_time=0.206, loss_ctc=237.598, loss_att=206.452, acc=0.166, loss=215.796, backward_time=0.232, grad_norm=61.439, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.525e-05, train_time=0.879 +[ip-10-0-194-127:0/16] 2024-02-26 20:10:34,379 (trainer:762) INFO: 1epoch:train:9201-9300batch: iter_time=1.203e-04, forward_time=0.207, loss_ctc=289.052, loss_att=246.029, acc=0.158, loss=258.936, backward_time=0.227, grad_norm=77.423, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.542e-05, train_time=0.865 +[ip-10-0-194-127:0/16] 2024-02-26 20:12:00,684 (trainer:762) INFO: 1epoch:train:9301-9400batch: iter_time=1.206e-04, forward_time=0.207, loss_ctc=257.369, loss_att=220.702, acc=0.163, loss=231.702, backward_time=0.225, grad_norm=83.790, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.559e-05, train_time=0.863 +[ip-10-0-194-127:0/16] 2024-02-26 20:13:28,746 (trainer:762) INFO: 1epoch:train:9401-9500batch: iter_time=1.240e-04, forward_time=0.207, loss_ctc=276.029, loss_att=237.682, acc=0.164, loss=249.186, backward_time=0.229, grad_norm=64.503, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.575e-05, train_time=0.880 +[ip-10-0-194-127:0/16] 2024-02-26 20:14:55,384 (trainer:762) INFO: 1epoch:train:9501-9600batch: iter_time=1.239e-04, forward_time=0.208, loss_ctc=273.993, loss_att=235.115, acc=0.160, loss=246.778, backward_time=0.227, grad_norm=71.500, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.592e-05, train_time=0.866 +[ip-10-0-194-127:0/16] 2024-02-26 20:16:21,901 (trainer:762) INFO: 1epoch:train:9601-9700batch: iter_time=1.219e-04, forward_time=0.207, loss_ctc=264.628, loss_att=226.467, acc=0.166, loss=237.916, backward_time=0.227, grad_norm=71.739, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.609e-05, train_time=0.865 +[ip-10-0-194-127:0/16] 2024-02-26 20:17:49,563 (trainer:762) INFO: 1epoch:train:9701-9800batch: iter_time=1.264e-04, forward_time=0.214, loss_ctc=295.024, loss_att=250.532, acc=0.161, loss=263.880, backward_time=0.235, grad_norm=69.373, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.625e-05, train_time=0.876 +[ip-10-0-194-127:0/16] 2024-02-26 20:19:16,139 (trainer:762) INFO: 1epoch:train:9801-9900batch: iter_time=1.245e-04, forward_time=0.208, loss_ctc=297.650, loss_att=251.969, acc=0.152, loss=265.674, backward_time=0.227, grad_norm=82.459, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.642e-05, train_time=0.866 +[ip-10-0-194-127:0/16] 2024-02-26 20:20:45,152 (trainer:762) INFO: 1epoch:train:9901-10000batch: iter_time=1.248e-04, forward_time=0.206, loss_ctc=280.965, loss_att=238.298, acc=0.161, loss=251.098, backward_time=0.226, grad_norm=69.757, clip=100.000, loss_scale=1.049e+06, optim_step_time=0.047, optim0_lr0=1.659e-05, train_time=0.890 +[ip-10-0-194-127:0/16] 2024-02-26 20:20:48,363 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-194-127:0/16] 2024-02-26 20:21:25,427 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-194-127:0/16] 2024-02-26 20:21:33,440 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-194-127:0/16] 2024-02-26 20:21:33,441 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-194-127:0/16] 2024-02-26 20:21:33,445 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-194-127:0/16] 2024-02-26 20:27:35,339 (trainer:762) INFO: 1epoch:train:10001-10100batch: iter_time=3.168, forward_time=0.208, loss_ctc=274.206, loss_att=233.204, acc=0.159, loss=245.505, backward_time=0.226, grad_norm=78.377, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.675e-05, train_time=4.102 +[ip-10-0-194-127:0/16] 2024-02-26 20:29:02,786 (trainer:762) INFO: 1epoch:train:10101-10200batch: iter_time=1.240e-04, forward_time=0.213, loss_ctc=270.821, loss_att=229.759, acc=0.167, loss=242.078, backward_time=0.230, grad_norm=70.798, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.692e-05, train_time=0.874 +[ip-10-0-194-127:0/16] 2024-02-26 20:30:30,312 (trainer:762) INFO: 1epoch:train:10201-10300batch: iter_time=1.189e-04, forward_time=0.208, loss_ctc=294.889, loss_att=248.095, acc=0.153, loss=262.133, backward_time=0.237, grad_norm=82.573, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.709e-05, train_time=0.875 +[ip-10-0-194-127:0/16] 2024-02-26 20:32:00,062 (trainer:762) INFO: 1epoch:train:10301-10400batch: iter_time=1.249e-04, forward_time=0.206, loss_ctc=271.551, loss_att=231.102, acc=0.162, loss=243.237, backward_time=0.233, grad_norm=64.014, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.725e-05, train_time=0.897 +[ip-10-0-194-127:0/16] 2024-02-26 20:33:27,147 (trainer:762) INFO: 1epoch:train:10401-10500batch: iter_time=1.262e-04, forward_time=0.208, loss_ctc=273.743, loss_att=234.023, acc=0.162, loss=245.939, backward_time=0.226, grad_norm=62.096, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.742e-05, train_time=0.871 +[ip-10-0-194-127:0/16] 2024-02-26 20:34:54,694 (trainer:762) INFO: 1epoch:train:10501-10600batch: iter_time=1.212e-04, forward_time=0.208, loss_ctc=265.311, loss_att=226.444, acc=0.166, loss=238.104, backward_time=0.230, grad_norm=72.303, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.759e-05, train_time=0.875 +[ip-10-0-194-127:0/16] 2024-02-26 20:36:22,632 (trainer:762) INFO: 1epoch:train:10601-10700batch: iter_time=1.242e-04, forward_time=0.208, loss_ctc=287.062, loss_att=244.508, acc=0.159, loss=257.274, backward_time=0.231, grad_norm=66.010, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.775e-05, train_time=0.879 +[ip-10-0-194-127:0/16] 2024-02-26 20:37:51,353 (trainer:762) INFO: 1epoch:train:10701-10800batch: iter_time=1.278e-04, forward_time=0.207, loss_ctc=279.507, loss_att=234.790, acc=0.165, loss=248.205, backward_time=0.230, grad_norm=69.539, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.792e-05, train_time=0.887 +[ip-10-0-194-127:0/16] 2024-02-26 20:39:19,738 (trainer:762) INFO: 1epoch:train:10801-10900batch: iter_time=1.256e-04, forward_time=0.206, loss_ctc=242.369, loss_att=206.944, acc=0.177, loss=217.572, backward_time=0.227, grad_norm=52.606, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.809e-05, train_time=0.884 +[ip-10-0-194-127:0/16] 2024-02-26 20:40:48,838 (trainer:762) INFO: 1epoch:train:10901-11000batch: iter_time=1.228e-04, forward_time=0.206, loss_ctc=269.985, loss_att=229.851, acc=0.166, loss=241.891, backward_time=0.226, grad_norm=76.759, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.825e-05, train_time=0.891 +[ip-10-0-194-127:0/16] 2024-02-26 20:42:16,178 (trainer:762) INFO: 1epoch:train:11001-11100batch: iter_time=1.253e-04, forward_time=0.206, loss_ctc=258.516, loss_att=219.082, acc=0.165, loss=230.912, backward_time=0.224, grad_norm=73.145, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.842e-05, train_time=0.873 +[ip-10-0-194-127:0/16] 2024-02-26 20:43:45,206 (trainer:762) INFO: 1epoch:train:11101-11200batch: iter_time=1.218e-04, forward_time=0.212, loss_ctc=265.786, loss_att=227.889, acc=0.162, loss=239.258, backward_time=0.227, grad_norm=53.494, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.859e-05, train_time=0.890 +[ip-10-0-194-127:0/16] 2024-02-26 20:45:13,786 (trainer:762) INFO: 1epoch:train:11201-11300batch: iter_time=1.222e-04, forward_time=0.209, loss_ctc=328.097, loss_att=271.344, acc=0.153, loss=288.370, backward_time=0.228, grad_norm=71.839, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.875e-05, train_time=0.886 +[ip-10-0-194-127:0/16] 2024-02-26 20:46:42,127 (trainer:762) INFO: 1epoch:train:11301-11400batch: iter_time=1.214e-04, forward_time=0.206, loss_ctc=281.339, loss_att=236.820, acc=0.157, loss=250.176, backward_time=0.225, grad_norm=63.493, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.892e-05, train_time=0.883 +[ip-10-0-194-127:0/16] 2024-02-26 20:48:10,670 (trainer:762) INFO: 1epoch:train:11401-11500batch: iter_time=1.208e-04, forward_time=0.207, loss_ctc=266.062, loss_att=224.188, acc=0.163, loss=236.750, backward_time=0.225, grad_norm=74.829, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.909e-05, train_time=0.885 +[ip-10-0-194-127:0/16] 2024-02-26 20:49:39,328 (trainer:762) INFO: 1epoch:train:11501-11600batch: iter_time=1.218e-04, forward_time=0.208, loss_ctc=257.506, loss_att=220.133, acc=0.168, loss=231.345, backward_time=0.227, grad_norm=67.180, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.925e-05, train_time=0.886 +[ip-10-0-194-127:0/16] 2024-02-26 20:51:09,208 (trainer:762) INFO: 1epoch:train:11601-11700batch: iter_time=1.204e-04, forward_time=0.205, loss_ctc=286.241, loss_att=241.440, acc=0.161, loss=254.880, backward_time=0.229, grad_norm=85.449, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.942e-05, train_time=0.899 +[ip-10-0-194-127:0/16] 2024-02-26 20:52:38,609 (trainer:762) INFO: 1epoch:train:11701-11800batch: iter_time=1.221e-04, forward_time=0.207, loss_ctc=296.663, loss_att=249.582, acc=0.167, loss=263.707, backward_time=0.229, grad_norm=62.021, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.959e-05, train_time=0.894 +[ip-10-0-194-127:0/16] 2024-02-26 20:54:08,239 (trainer:762) INFO: 1epoch:train:11801-11900batch: iter_time=1.213e-04, forward_time=0.206, loss_ctc=310.740, loss_att=258.270, acc=0.158, loss=274.011, backward_time=0.234, grad_norm=71.097, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.975e-05, train_time=0.896 +[ip-10-0-194-127:0/16] 2024-02-26 20:55:37,687 (trainer:762) INFO: 1epoch:train:11901-12000batch: iter_time=1.229e-04, forward_time=0.205, loss_ctc=287.200, loss_att=239.229, acc=0.167, loss=253.621, backward_time=0.226, grad_norm=76.108, clip=100.000, loss_scale=2.097e+06, optim_step_time=0.047, optim0_lr0=1.992e-05, train_time=0.894 +[ip-10-0-194-127:0/16] 2024-02-26 20:57:06,610 (trainer:762) INFO: 1epoch:train:12001-12100batch: iter_time=1.202e-04, forward_time=0.210, loss_ctc=255.147, loss_att=215.622, acc=0.170, loss=227.479, backward_time=0.226, grad_norm=56.142, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.009e-05, train_time=0.889 +[ip-10-0-194-127:0/16] 2024-02-26 20:58:35,163 (trainer:762) INFO: 1epoch:train:12101-12200batch: iter_time=1.200e-04, forward_time=0.207, loss_ctc=280.781, loss_att=235.336, acc=0.164, loss=248.969, backward_time=0.225, grad_norm=59.149, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.025e-05, train_time=0.885 +[ip-10-0-194-127:0/16] 2024-02-26 21:00:04,869 (trainer:762) INFO: 1epoch:train:12201-12300batch: iter_time=1.227e-04, forward_time=0.206, loss_ctc=270.286, loss_att=225.498, acc=0.166, loss=238.934, backward_time=0.237, grad_norm=73.088, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.042e-05, train_time=0.897 +[ip-10-0-194-127:0/16] 2024-02-26 21:01:34,534 (trainer:762) INFO: 1epoch:train:12301-12400batch: iter_time=1.308e-04, forward_time=0.206, loss_ctc=293.157, loss_att=244.544, acc=0.161, loss=259.128, backward_time=0.231, grad_norm=73.996, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.059e-05, train_time=0.896 +[ip-10-0-194-127:0/16] 2024-02-26 21:03:05,237 (trainer:762) INFO: 1epoch:train:12401-12500batch: iter_time=1.234e-04, forward_time=0.204, loss_ctc=265.070, loss_att=222.643, acc=0.162, loss=235.371, backward_time=0.231, grad_norm=65.333, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.075e-05, train_time=0.907 +[ip-10-0-194-127:0/16] 2024-02-26 21:04:35,638 (trainer:762) INFO: 1epoch:train:12501-12600batch: iter_time=1.200e-04, forward_time=0.203, loss_ctc=236.632, loss_att=199.922, acc=0.174, loss=210.935, backward_time=0.228, grad_norm=62.036, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.092e-05, train_time=0.904 +[ip-10-0-194-127:0/16] 2024-02-26 21:06:05,135 (trainer:762) INFO: 1epoch:train:12601-12700batch: iter_time=1.306e-04, forward_time=0.207, loss_ctc=297.039, loss_att=246.556, acc=0.163, loss=261.701, backward_time=0.229, grad_norm=74.163, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.109e-05, train_time=0.895 +[ip-10-0-194-127:0/16] 2024-02-26 21:07:34,700 (trainer:762) INFO: 1epoch:train:12701-12800batch: iter_time=1.216e-04, forward_time=0.206, loss_ctc=296.309, loss_att=245.351, acc=0.165, loss=260.638, backward_time=0.226, grad_norm=75.679, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.125e-05, train_time=0.895 +[ip-10-0-194-127:0/16] 2024-02-26 21:09:03,857 (trainer:762) INFO: 1epoch:train:12801-12900batch: iter_time=1.219e-04, forward_time=0.207, loss_ctc=271.767, loss_att=228.559, acc=0.166, loss=241.522, backward_time=0.227, grad_norm=79.418, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.142e-05, train_time=0.891 +[ip-10-0-194-127:0/16] 2024-02-26 21:10:33,239 (trainer:762) INFO: 1epoch:train:12901-13000batch: iter_time=1.241e-04, forward_time=0.207, loss_ctc=285.176, loss_att=238.270, acc=0.161, loss=252.342, backward_time=0.229, grad_norm=66.794, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.159e-05, train_time=0.894 +[ip-10-0-194-127:0/16] 2024-02-26 21:12:02,035 (trainer:762) INFO: 1epoch:train:13001-13100batch: iter_time=1.220e-04, forward_time=0.213, loss_ctc=286.943, loss_att=239.227, acc=0.166, loss=253.542, backward_time=0.233, grad_norm=71.261, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.175e-05, train_time=0.888 +[ip-10-0-194-127:0/16] 2024-02-26 21:13:29,414 (trainer:762) INFO: 1epoch:train:13101-13200batch: iter_time=1.234e-04, forward_time=0.208, loss_ctc=270.367, loss_att=225.939, acc=0.175, loss=239.267, backward_time=0.235, grad_norm=63.029, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.192e-05, train_time=0.874 +[ip-10-0-194-127:0/16] 2024-02-26 21:14:56,551 (trainer:762) INFO: 1epoch:train:13201-13300batch: iter_time=1.234e-04, forward_time=0.209, loss_ctc=294.938, loss_att=243.522, acc=0.167, loss=258.947, backward_time=0.232, grad_norm=73.530, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.209e-05, train_time=0.871 +[ip-10-0-194-127:0/16] 2024-02-26 21:16:23,770 (trainer:762) INFO: 1epoch:train:13301-13400batch: iter_time=1.288e-04, forward_time=0.208, loss_ctc=301.262, loss_att=249.999, acc=0.164, loss=265.378, backward_time=0.230, grad_norm=83.931, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.225e-05, train_time=0.872 +[ip-10-0-194-127:0/16] 2024-02-26 21:17:53,861 (trainer:762) INFO: 1epoch:train:13401-13500batch: iter_time=1.288e-04, forward_time=0.206, loss_ctc=284.965, loss_att=234.533, acc=0.167, loss=249.663, backward_time=0.230, grad_norm=70.857, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.242e-05, train_time=0.901 +[ip-10-0-194-127:0/16] 2024-02-26 21:19:23,429 (trainer:762) INFO: 1epoch:train:13501-13600batch: iter_time=1.323e-04, forward_time=0.205, loss_ctc=262.577, loss_att=221.048, acc=0.173, loss=233.507, backward_time=0.227, grad_norm=65.231, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.259e-05, train_time=0.895 +[ip-10-0-194-127:0/16] 2024-02-26 21:20:52,697 (trainer:762) INFO: 1epoch:train:13601-13700batch: iter_time=1.345e-04, forward_time=0.205, loss_ctc=254.896, loss_att=211.852, acc=0.178, loss=224.766, backward_time=0.228, grad_norm=57.879, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.275e-05, train_time=0.892 +[ip-10-0-194-127:0/16] 2024-02-26 21:22:22,695 (trainer:762) INFO: 1epoch:train:13701-13800batch: iter_time=1.226e-04, forward_time=0.206, loss_ctc=294.940, loss_att=244.213, acc=0.161, loss=259.431, backward_time=0.228, grad_norm=60.283, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.292e-05, train_time=0.900 +[ip-10-0-194-127:0/16] 2024-02-26 21:23:51,163 (trainer:762) INFO: 1epoch:train:13801-13900batch: iter_time=1.217e-04, forward_time=0.206, loss_ctc=250.450, loss_att=210.920, acc=0.175, loss=222.779, backward_time=0.226, grad_norm=70.338, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.309e-05, train_time=0.884 +[ip-10-0-194-127:0/16] 2024-02-26 21:25:18,807 (trainer:762) INFO: 1epoch:train:13901-14000batch: iter_time=1.204e-04, forward_time=0.207, loss_ctc=256.129, loss_att=214.181, acc=0.176, loss=226.766, backward_time=0.231, grad_norm=58.162, clip=100.000, loss_scale=4.194e+06, optim_step_time=0.047, optim0_lr0=2.325e-05, train_time=0.876 +[ip-10-0-194-127:0/16] 2024-02-26 21:26:46,376 (trainer:762) INFO: 1epoch:train:14001-14100batch: iter_time=1.153e-04, forward_time=0.213, loss_ctc=260.161, loss_att=220.731, acc=0.175, loss=232.560, backward_time=0.232, grad_norm=56.751, clip=100.000, loss_scale=8.389e+06, optim_step_time=0.047, optim0_lr0=2.342e-05, train_time=0.875 +[ip-10-0-194-127:0/16] 2024-02-26 21:28:12,398 (trainer:762) INFO: 1epoch:train:14101-14200batch: iter_time=1.262e-04, forward_time=0.206, loss_ctc=230.295, loss_att=194.488, acc=0.179, loss=205.230, backward_time=0.226, grad_norm=69.696, clip=100.000, loss_scale=8.389e+06, optim_step_time=0.047, optim0_lr0=2.359e-05, train_time=0.860 +[ip-10-0-194-127:0/16] 2024-02-26 21:29:39,642 (trainer:762) INFO: 1epoch:train:14201-14300batch: iter_time=1.311e-04, forward_time=0.208, loss_ctc=279.281, loss_att=233.178, acc=0.171, loss=247.009, backward_time=0.231, grad_norm=78.201, clip=100.000, loss_scale=8.389e+06, optim_step_time=0.048, optim0_lr0=2.375e-05, train_time=0.872 +[ip-10-0-194-127:0/16] 2024-02-26 21:31:07,058 (trainer:762) INFO: 1epoch:train:14301-14400batch: iter_time=1.209e-04, forward_time=0.208, loss_ctc=248.757, loss_att=207.664, acc=0.177, loss=219.992, backward_time=0.235, grad_norm=77.868, clip=100.000, loss_scale=8.389e+06, optim_step_time=0.047, optim0_lr0=2.392e-05, train_time=0.874 +[ip-10-0-194-127:0/16] 2024-02-26 21:32:37,153 (trainer:762) INFO: 1epoch:train:14401-14500batch: iter_time=1.303e-04, forward_time=0.206, loss_ctc=264.503, loss_att=225.138, acc=0.178, loss=236.947, backward_time=0.231, grad_norm=59.921, clip=100.000, loss_scale=8.389e+06, optim_step_time=0.047, optim0_lr0=2.409e-05, train_time=0.901 +[ip-10-0-194-127:0/16] 2024-02-26 21:34:07,078 (trainer:762) INFO: 1epoch:train:14501-14600batch: iter_time=1.272e-04, forward_time=0.206, loss_ctc=265.304, loss_att=222.492, acc=0.173, loss=235.335, backward_time=0.228, grad_norm=57.736, clip=100.000, loss_scale=8.389e+06, optim_step_time=0.047, optim0_lr0=2.425e-05, train_time=0.899 +[ip-10-0-194-127:0/16] 2024-02-26 21:35:35,682 (trainer:762) INFO: 1epoch:train:14601-14700batch: iter_time=1.268e-04, forward_time=0.205, loss_ctc=254.661, loss_att=213.470, acc=0.181, loss=225.828, backward_time=0.230, grad_norm=67.707, clip=100.000, loss_scale=8.389e+06, optim_step_time=0.047, optim0_lr0=2.442e-05, train_time=0.886 +[ip-10-0-194-127:0/16] 2024-02-26 21:37:03,796 (trainer:762) INFO: 1epoch:train:14701-14800batch: iter_time=1.240e-04, forward_time=0.208, loss_ctc=284.194, loss_att=237.421, acc=0.172, loss=251.453, backward_time=0.229, grad_norm=60.057, clip=100.000, loss_scale=8.389e+06, optim_step_time=0.047, optim0_lr0=2.459e-05, train_time=0.881 +[ip-10-0-194-127:0/16] 2024-02-26 21:38:32,415 (trainer:762) INFO: 1epoch:train:14801-14900batch: iter_time=1.207e-04, forward_time=0.207, loss_ctc=286.827, loss_att=237.332, acc=0.165, loss=252.180, backward_time=0.228, grad_norm=63.796, clip=100.000, loss_scale=8.389e+06, optim_step_time=0.048, optim0_lr0=2.475e-05, train_time=0.886 +[ip-10-0-194-127:0/16] 2024-02-26 21:39:59,409 (trainer:762) INFO: 1epoch:train:14901-15000batch: iter_time=1.238e-04, forward_time=0.208, loss_ctc=269.988, loss_att=224.008, acc=0.176, loss=237.802, backward_time=0.227, grad_norm=72.390, clip=100.000, loss_scale=8.389e+06, optim_step_time=0.047, optim0_lr0=2.492e-05, train_time=0.870 +[ip-10-0-194-127:0/16] 2024-02-26 21:54:20,516 (trainer:361) INFO: 1epoch results: [train] iter_time=0.062, forward_time=0.207, loss_ctc=361.118, loss_att=264.223, acc=0.130, loss=293.291, backward_time=0.229, grad_norm=307.142, clip=100.000, loss_scale=1.669e+06, optim_step_time=0.047, optim0_lr0=1.250e-05, train_time=0.944, time=3 hours, 56 minutes and 21.96 seconds, total_count=15000, gpu_max_cached_mem_GB=35.193, [valid] loss_ctc=214.855, cer_ctc=0.963, loss_att=189.728, acc=0.145, cer=0.728, wer=1.000, loss=197.266, time=14 minutes and 6.91 seconds, total_count=2133, gpu_max_cached_mem_GB=35.193 +[ip-10-0-194-127:0/16] 2024-02-26 21:54:22,703 (font_manager:1547) INFO: generated new fontManager +[ip-10-0-194-127:0/16] 2024-02-26 21:54:32,529 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-194-127:0/16] 2024-02-26 21:54:32,530 (trainer:290) INFO: 2/45epoch started. Estimated time to finish: 1 week, 15 hours and 50 minutes +[ip-10-0-194-127:0/16] 2024-02-26 21:54:32,542 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-194-127:0/16] 2024-02-26 21:55:08,460 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-194-127:0/16] 2024-02-26 21:55:16,256 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-194-127:0/16] 2024-02-26 21:55:16,257 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-194-127:0/16] 2024-02-26 21:55:16,261 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +srun: Job step aborted: Waiting up to 32 seconds for job step to finish. +slurmstepd: error: *** REASON: burst_buffer/lua: Stage-out in progress *** +slurmstepd: error: *** STEP 81044.0 ON ip-10-0-194-127 CANCELLED AT 2024-02-26T21:58:53 *** +slurmstepd: error: *** REASON: burst_buffer/lua: Stage-out in progress *** diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/train.log b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/train.log new file mode 100644 index 0000000000000000000000000000000000000000..70d17cf01f6b3e65afd5b3441b0ad4bb1f2d8c01 --- /dev/null +++ b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/train.log @@ -0,0 +1,4364 @@ +# Running on ip-10-0-216-33 +# Started at Mon Feb 26 22:03:32 UTC 2024 +# SLURMD_NODENAME=ip-10-0-216-33 +# SLURM_CLUSTER_NAME=external +# SLURM_CONF=/opt/slurm/etc/slurm.conf +# SLURM_CPUS_ON_NODE=80 +# SLURM_EXPORT_ENV=PATH +# SLURM_GET_USER_ENV=1 +# SLURM_GPUS_ON_NODE=8 +# SLURM_GPUS_PER_NODE=8 +# SLURM_GTIDS=0 +# SLURM_JOBID=81480 +# SLURM_JOB_ACCOUNT=wavlab +# SLURM_JOB_CPUS_PER_NODE='80(x2)' +# SLURM_JOB_END_TIME=1709762608 +# SLURM_JOB_GID=1879800513 +# SLURM_JOB_GPUS=0,1,2,3,4,5,6,7 +# SLURM_JOB_ID=81480 +# SLURM_JOB_NAME=exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/train.log +# SLURM_JOB_NODELIST=ip-10-0-216-33,ip-10-0-224-187 +# SLURM_JOB_NUM_NODES=2 +# SLURM_JOB_PARTITION=a40x +# SLURM_JOB_QOS=normal +# SLURM_JOB_START_TIME=1708985007 +# SLURM_JOB_UID=1879802134 +# SLURM_JOB_USER=pengyf +# SLURM_LOCALID=0 +# SLURM_MEM_PER_CPU=10000 +# SLURM_NNODES=2 +# SLURM_NODEID=0 +# SLURM_NODELIST=ip-10-0-216-33,ip-10-0-224-187 +# SLURM_NODE_ALIASES='(null)' +# SLURM_OPEN_MODE=a +# SLURM_PRIO_PROCESS=0 +# SLURM_PROCID=0 +# SLURM_SCRIPT_CONTEXT=prolog_task +# SLURM_SUBMIT_DIR=/weka/home-pengyf/espnet-owsm-train/egs2/owsm_v3.1_licensefree_nosa/s2t1 +# SLURM_SUBMIT_HOST=ip-172-17-22-214 +# SLURM_TASKS_PER_NODE='80(x2)' +# SLURM_TASK_PID=311799 +# SLURM_TOPOLOGY_ADDR=ip-10-0-216-33 +# SLURM_TOPOLOGY_ADDR_PATTERN=node +# SLURM_WORKING_CLUSTER=external:ip-172-17-55-206:6817:9984:109 +# srun --export=ALL python3 -m espnet2.bin.s2t_train --use_preprocessor true --bpemodel data/token_list/bpe_unigram50000/bpe.model --token_type bpe --token_list data/token_list/bpe_unigram50000/tokens.txt --non_linguistic_symbols none --cleaner none --g2p none --valid_data_path_and_name_and_type dump/raw/dev/wav.scp,speech,kaldi_ark --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/speech_shape --resume true --fold_length 80000 --output_dir exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000 --config conf/train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn.yaml --frontend_conf fs=16k --normalize=global_mvn --normalize_conf stats_file=exp/s2t_stats_raw_bpe50000/train/feats_stats.npz --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/wav.scp,speech,kaldi_ark --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/speech_shape --fold_length 150 --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/text.prev,text_prev,text --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/text_prev_shape.bpe --fold_length 150 --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/text.ctc,text_ctc,text --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/text_ctc_shape.bpe --fold_length 150 --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/text,text,text --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/text_shape.bpe --multiple_iterator true --valid_data_path_and_name_and_type dump/raw/dev/text.prev,text_prev,text --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/text_prev_shape.bpe --valid_data_path_and_name_and_type dump/raw/dev/text.ctc,text_ctc,text --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/text_ctc_shape.bpe --valid_data_path_and_name_and_type dump/raw/dev/text,text,text --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/text_shape.bpe --ngpu 8 --multiprocessing_distributed true --dist_launcher slurm --dist_init_method file:///weka/home-pengyf/espnet-owsm-train/egs2/owsm_v3.1_licensefree_nosa/s2t1/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/.dist_init_d681a8ba-15d8-4a99-88f9-39beba562b1a +/weka/home-pengyf/espnet-owsm-train/tools/venv/bin/python3 /weka/home-pengyf/espnet-owsm-train/espnet2/bin/s2t_train.py --use_preprocessor true --bpemodel data/token_list/bpe_unigram50000/bpe.model --token_type bpe --token_list data/token_list/bpe_unigram50000/tokens.txt --non_linguistic_symbols none --cleaner none --g2p none --valid_data_path_and_name_and_type dump/raw/dev/wav.scp,speech,kaldi_ark --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/speech_shape --resume true --fold_length 80000 --output_dir exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000 --config conf/train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn.yaml --frontend_conf fs=16k --normalize=global_mvn --normalize_conf stats_file=exp/s2t_stats_raw_bpe50000/train/feats_stats.npz --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/wav.scp,speech,kaldi_ark --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/speech_shape --fold_length 150 --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/text.prev,text_prev,text --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/text_prev_shape.bpe --fold_length 150 --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/text.ctc,text_ctc,text --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/text_ctc_shape.bpe --fold_length 150 --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/text,text,text --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/text_shape.bpe --multiple_iterator true --valid_data_path_and_name_and_type dump/raw/dev/text.prev,text_prev,text --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/text_prev_shape.bpe --valid_data_path_and_name_and_type dump/raw/dev/text.ctc,text_ctc,text --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/text_ctc_shape.bpe --valid_data_path_and_name_and_type dump/raw/dev/text,text,text --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/text_shape.bpe --ngpu 8 --multiprocessing_distributed true --dist_launcher slurm --dist_init_method file:///weka/home-pengyf/espnet-owsm-train/egs2/owsm_v3.1_licensefree_nosa/s2t1/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/.dist_init_d681a8ba-15d8-4a99-88f9-39beba562b1a +/weka/home-pengyf/espnet-owsm-train/tools/venv/bin/python3 /weka/home-pengyf/espnet-owsm-train/espnet2/bin/s2t_train.py --use_preprocessor true --bpemodel data/token_list/bpe_unigram50000/bpe.model --token_type bpe --token_list data/token_list/bpe_unigram50000/tokens.txt --non_linguistic_symbols none --cleaner none --g2p none --valid_data_path_and_name_and_type dump/raw/dev/wav.scp,speech,kaldi_ark --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/speech_shape --resume true --fold_length 80000 --output_dir exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000 --config conf/train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn.yaml --frontend_conf fs=16k --normalize=global_mvn --normalize_conf stats_file=exp/s2t_stats_raw_bpe50000/train/feats_stats.npz --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/wav.scp,speech,kaldi_ark --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/speech_shape --fold_length 150 --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/text.prev,text_prev,text --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/text_prev_shape.bpe --fold_length 150 --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/text.ctc,text_ctc,text --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/text_ctc_shape.bpe --fold_length 150 --train_data_path_and_name_and_type exp/s2t_stats_raw_bpe50000/splits3/text,text,text --train_shape_file exp/s2t_stats_raw_bpe50000/splits3/text_shape.bpe --multiple_iterator true --valid_data_path_and_name_and_type dump/raw/dev/text.prev,text_prev,text --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/text_prev_shape.bpe --valid_data_path_and_name_and_type dump/raw/dev/text.ctc,text_ctc,text --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/text_ctc_shape.bpe --valid_data_path_and_name_and_type dump/raw/dev/text,text,text --valid_shape_file exp/s2t_stats_raw_bpe50000/valid/text_shape.bpe --ngpu 8 --multiprocessing_distributed true --dist_launcher slurm --dist_init_method file:///weka/home-pengyf/espnet-owsm-train/egs2/owsm_v3.1_licensefree_nosa/s2t1/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/.dist_init_d681a8ba-15d8-4a99-88f9-39beba562b1a +[ip-10-0-216-33:0/16] 2024-02-26 22:03:44,136 (distributed_c10d:319) INFO: Added key: store_based_barrier_key:1 to store for rank: 0 +[ip-10-0-216-33:0/16] 2024-02-26 22:03:50,057 (distributed_c10d:353) INFO: Rank 0: Completed store-based barrier for key:store_based_barrier_key:1 with 16 nodes. +[ip-10-0-216-33:0/16] 2024-02-26 22:03:50,099 (s2t:464) INFO: Vocabulary size: 50002 +[ip-10-0-216-33:0/16] 2024-02-26 22:03:55,322 (abs_task:1271) INFO: pytorch.version=1.13.1+cu117, cuda.available=True, cudnn.version=8500, cudnn.benchmark=False, cudnn.deterministic=True +[ip-10-0-216-33:0/16] 2024-02-26 22:03:55,329 (abs_task:1272) INFO: Model structure: +ESPnetS2TModel( + (frontend): DefaultFrontend( + (stft): Stft(n_fft=512, win_length=400, hop_length=160, center=True, normalized=False, onesided=True) + (frontend): Frontend() + (logmel): LogMel(sr=16000, n_fft=512, n_mels=80, fmin=0, fmax=8000.0, htk=False) + ) + (specaug): SpecAug( + (freq_mask): MaskAlongAxis(mask_width_range=[0, 27], num_mask=2, axis=freq) + (time_mask): MaskAlongAxisVariableMaxWidth(mask_width_ratio_range=[0.0, 0.05], num_mask=4, axis=time) + ) + (normalize): GlobalMVN(stats_file=exp/s2t_stats_raw_bpe50000/train/feats_stats.npz, norm_means=True, norm_vars=True) + (encoder): EBranchformerEncoder( + (embed): Conv2dSubsampling( + (conv): Sequential( + (0): Conv2d(1, 768, kernel_size=(3, 3), stride=(2, 2)) + (1): ReLU() + (2): Conv2d(768, 768, kernel_size=(3, 3), stride=(2, 2)) + (3): ReLU() + ) + (out): Sequential( + (0): Linear(in_features=14592, out_features=768, bias=True) + (1): PositionalEncoding( + (dropout): Dropout(p=0.1, inplace=False) + ) + ) + ) + (encoders): MultiSequential( + (0): EBranchformerEncoderLayer( + (attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (cgmlp): ConvolutionalGatingMLP( + (channel_proj1): Sequential( + (0): Linear(in_features=768, out_features=3072, bias=True) + (1): GELU(approximate='none') + ) + (csgu): ConvolutionalSpatialGatingUnit( + (norm): LayerNorm((1536,), eps=1e-12, elementwise_affine=True) + (conv): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (act): Identity() + (dropout): Dropout(p=0.1, inplace=False) + ) + (channel_proj2): Linear(in_features=1536, out_features=768, bias=True) + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (feed_forward_macaron): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (norm_ff): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_ff_macaron): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mha): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mlp): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_final): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + (depthwise_conv_fusion): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (merge_proj): Linear(in_features=1536, out_features=768, bias=True) + ) + (1): EBranchformerEncoderLayer( + (attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (cgmlp): ConvolutionalGatingMLP( + (channel_proj1): Sequential( + (0): Linear(in_features=768, out_features=3072, bias=True) + (1): GELU(approximate='none') + ) + (csgu): ConvolutionalSpatialGatingUnit( + (norm): LayerNorm((1536,), eps=1e-12, elementwise_affine=True) + (conv): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (act): Identity() + (dropout): Dropout(p=0.1, inplace=False) + ) + (channel_proj2): Linear(in_features=1536, out_features=768, bias=True) + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (feed_forward_macaron): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (norm_ff): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_ff_macaron): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mha): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mlp): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_final): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + (depthwise_conv_fusion): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (merge_proj): Linear(in_features=1536, out_features=768, bias=True) + ) + (2): EBranchformerEncoderLayer( + (attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (cgmlp): ConvolutionalGatingMLP( + (channel_proj1): Sequential( + (0): Linear(in_features=768, out_features=3072, bias=True) + (1): GELU(approximate='none') + ) + (csgu): ConvolutionalSpatialGatingUnit( + (norm): LayerNorm((1536,), eps=1e-12, elementwise_affine=True) + (conv): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (act): Identity() + (dropout): Dropout(p=0.1, inplace=False) + ) + (channel_proj2): Linear(in_features=1536, out_features=768, bias=True) + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (feed_forward_macaron): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (norm_ff): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_ff_macaron): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mha): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mlp): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_final): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + (depthwise_conv_fusion): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (merge_proj): Linear(in_features=1536, out_features=768, bias=True) + ) + (3): EBranchformerEncoderLayer( + (attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (cgmlp): ConvolutionalGatingMLP( + (channel_proj1): Sequential( + (0): Linear(in_features=768, out_features=3072, bias=True) + (1): GELU(approximate='none') + ) + (csgu): ConvolutionalSpatialGatingUnit( + (norm): LayerNorm((1536,), eps=1e-12, elementwise_affine=True) + (conv): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (act): Identity() + (dropout): Dropout(p=0.1, inplace=False) + ) + (channel_proj2): Linear(in_features=1536, out_features=768, bias=True) + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (feed_forward_macaron): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (norm_ff): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_ff_macaron): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mha): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mlp): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_final): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + (depthwise_conv_fusion): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (merge_proj): Linear(in_features=1536, out_features=768, bias=True) + ) + (4): EBranchformerEncoderLayer( + (attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (cgmlp): ConvolutionalGatingMLP( + (channel_proj1): Sequential( + (0): Linear(in_features=768, out_features=3072, bias=True) + (1): GELU(approximate='none') + ) + (csgu): ConvolutionalSpatialGatingUnit( + (norm): LayerNorm((1536,), eps=1e-12, elementwise_affine=True) + (conv): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (act): Identity() + (dropout): Dropout(p=0.1, inplace=False) + ) + (channel_proj2): Linear(in_features=1536, out_features=768, bias=True) + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (feed_forward_macaron): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (norm_ff): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_ff_macaron): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mha): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mlp): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_final): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + (depthwise_conv_fusion): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (merge_proj): Linear(in_features=1536, out_features=768, bias=True) + ) + (5): EBranchformerEncoderLayer( + (attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (cgmlp): ConvolutionalGatingMLP( + (channel_proj1): Sequential( + (0): Linear(in_features=768, out_features=3072, bias=True) + (1): GELU(approximate='none') + ) + (csgu): ConvolutionalSpatialGatingUnit( + (norm): LayerNorm((1536,), eps=1e-12, elementwise_affine=True) + (conv): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (act): Identity() + (dropout): Dropout(p=0.1, inplace=False) + ) + (channel_proj2): Linear(in_features=1536, out_features=768, bias=True) + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (feed_forward_macaron): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (norm_ff): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_ff_macaron): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mha): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mlp): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_final): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + (depthwise_conv_fusion): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (merge_proj): Linear(in_features=1536, out_features=768, bias=True) + ) + (6): EBranchformerEncoderLayer( + (attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (cgmlp): ConvolutionalGatingMLP( + (channel_proj1): Sequential( + (0): Linear(in_features=768, out_features=3072, bias=True) + (1): GELU(approximate='none') + ) + (csgu): ConvolutionalSpatialGatingUnit( + (norm): LayerNorm((1536,), eps=1e-12, elementwise_affine=True) + (conv): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (act): Identity() + (dropout): Dropout(p=0.1, inplace=False) + ) + (channel_proj2): Linear(in_features=1536, out_features=768, bias=True) + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (feed_forward_macaron): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (norm_ff): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_ff_macaron): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mha): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mlp): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_final): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + (depthwise_conv_fusion): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (merge_proj): Linear(in_features=1536, out_features=768, bias=True) + ) + (7): EBranchformerEncoderLayer( + (attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (cgmlp): ConvolutionalGatingMLP( + (channel_proj1): Sequential( + (0): Linear(in_features=768, out_features=3072, bias=True) + (1): GELU(approximate='none') + ) + (csgu): ConvolutionalSpatialGatingUnit( + (norm): LayerNorm((1536,), eps=1e-12, elementwise_affine=True) + (conv): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (act): Identity() + (dropout): Dropout(p=0.1, inplace=False) + ) + (channel_proj2): Linear(in_features=1536, out_features=768, bias=True) + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (feed_forward_macaron): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (norm_ff): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_ff_macaron): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mha): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mlp): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_final): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + (depthwise_conv_fusion): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (merge_proj): Linear(in_features=1536, out_features=768, bias=True) + ) + (8): EBranchformerEncoderLayer( + (attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (cgmlp): ConvolutionalGatingMLP( + (channel_proj1): Sequential( + (0): Linear(in_features=768, out_features=3072, bias=True) + (1): GELU(approximate='none') + ) + (csgu): ConvolutionalSpatialGatingUnit( + (norm): LayerNorm((1536,), eps=1e-12, elementwise_affine=True) + (conv): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (act): Identity() + (dropout): Dropout(p=0.1, inplace=False) + ) + (channel_proj2): Linear(in_features=1536, out_features=768, bias=True) + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (feed_forward_macaron): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): Swish() + ) + (norm_ff): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_ff_macaron): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mha): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_mlp): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm_final): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + (depthwise_conv_fusion): Conv1d(1536, 1536, kernel_size=(31,), stride=(1,), padding=(15,), groups=1536) + (merge_proj): Linear(in_features=1536, out_features=768, bias=True) + ) + ) + (after_norm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + ) + (decoder): TransformerDecoder( + (embed): Sequential( + (0): Embedding(50002, 768) + (1): PositionalEncoding( + (dropout): Dropout(p=0.1, inplace=False) + ) + ) + (after_norm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (output_layer): Linear(in_features=768, out_features=50002, bias=True) + (decoders): MultiSequential( + (0): DecoderLayer( + (self_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (src_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): ReLU() + ) + (norm1): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm2): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm3): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + ) + (1): DecoderLayer( + (self_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (src_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): ReLU() + ) + (norm1): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm2): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm3): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + ) + (2): DecoderLayer( + (self_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (src_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): ReLU() + ) + (norm1): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm2): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm3): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + ) + (3): DecoderLayer( + (self_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (src_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): ReLU() + ) + (norm1): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm2): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm3): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + ) + (4): DecoderLayer( + (self_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (src_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): ReLU() + ) + (norm1): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm2): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm3): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + ) + (5): DecoderLayer( + (self_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (src_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): ReLU() + ) + (norm1): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm2): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm3): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + ) + (6): DecoderLayer( + (self_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (src_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): ReLU() + ) + (norm1): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm2): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm3): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + ) + (7): DecoderLayer( + (self_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (src_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): ReLU() + ) + (norm1): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm2): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm3): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + ) + (8): DecoderLayer( + (self_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (src_attn): MultiHeadedAttention( + (linear_q): Linear(in_features=768, out_features=768, bias=True) + (linear_k): Linear(in_features=768, out_features=768, bias=True) + (linear_v): Linear(in_features=768, out_features=768, bias=True) + (linear_out): Linear(in_features=768, out_features=768, bias=True) + (dropout): Identity() + (q_norm): Identity() + (k_norm): Identity() + ) + (feed_forward): PositionwiseFeedForward( + (w_1): Linear(in_features=768, out_features=3072, bias=True) + (w_2): Linear(in_features=3072, out_features=768, bias=True) + (dropout): Dropout(p=0.1, inplace=False) + (activation): ReLU() + ) + (norm1): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm2): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (norm3): LayerNorm((768,), eps=1e-12, elementwise_affine=True) + (dropout): Dropout(p=0.1, inplace=False) + ) + ) + ) + (criterion_att): LabelSmoothingLoss( + (criterion): KLDivLoss() + ) + (ctc): CTC( + (ctc_lo): Linear(in_features=768, out_features=50002, bias=True) + (ctc_loss): CTCLoss() + ) +) + +Model summary: + Class Name: ESPnetS2TModel + Total Number of model parameters: 366.65 M + Number of trainable parameters: 366.65 M (100.0%) + Size: 1.47 GB + Type: torch.float32 +[ip-10-0-216-33:0/16] 2024-02-26 22:03:55,330 (abs_task:1275) INFO: Optimizer: +AdamW ( +Parameter Group 0 + amsgrad: False + betas: [0.9, 0.98] + capturable: False + eps: 1e-06 + foreach: None + initial_lr: 0.0005 + lr: 1.6666666666666667e-09 + maximize: False + weight_decay: 0.0 +) +[ip-10-0-216-33:0/16] 2024-02-26 22:03:55,330 (abs_task:1276) INFO: Scheduler: PiecewiseLinearWarmupLR(warmup_steps_list=[0, 30000, 60000], warmup_lr_list=[0.0, 5e-05, 0.0005]) +[ip-10-0-216-33:0/16] 2024-02-26 22:03:55,331 (abs_task:1285) INFO: Saving the configuration in exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/config.yaml +[ip-10-0-216-33:0/16] 2024-02-26 22:03:59,181 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-26 22:03:59,737 (abs_task:1663) INFO: [valid] dataset: +ESPnetDataset( + speech: {"path": "dump/raw/dev/wav.scp", "type": "kaldi_ark"} + text_prev: {"path": "dump/raw/dev/text.prev", "type": "text"} + text_ctc: {"path": "dump/raw/dev/text.ctc", "type": "text"} + text: {"path": "dump/raw/dev/text", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-26 22:03:59,737 (abs_task:1664) INFO: [valid] Batch sampler: UnsortedBatchSampler(N-batch=2133, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/valid/speech_shape, +[ip-10-0-216-33:0/16] 2024-02-26 22:03:59,738 (abs_task:1665) INFO: [valid] mini-batch sizes summary: N-batch=2133, mean=256.1, min=256, max=257 +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: Currently logged in as: pengyf. Use `wandb login --relogin` to force relogin +wandb: - Waiting for wandb.init()... wandb: \ Waiting for wandb.init()... wandb: | Waiting for wandb.init()... wandb: / Waiting for wandb.init()... wandb: WARNING Serializing object of type list that is 406488 bytes +wandb: Tracking run with wandb version 0.16.3 +wandb: Run data is saved locally in exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/wandb/run-20240226_220402-zw9g4dhv +wandb: Run `wandb offline` to turn off syncing. +wandb: Resuming run _weka_home-pengyf_espnet-owsm-train_egs2_owsm_v3.1_licensefree_nosa_s2t1 +wandb: ⭐️ View project at https://stability.wandb.io/pengyf/ESPnet_S2TTask +wandb: 🚀 View run at https://stability.wandb.io/pengyf/ESPnet_S2TTask/runs/zw9g4dhv +wandb: WARNING Serializing object of type list that is 400072 bytes +wandb: WARNING Serializing object of type list that is 406488 bytes +[ip-10-0-216-33:0/16] 2024-02-26 22:04:09,855 (trainer:168) INFO: The training was resumed using exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/checkpoint.pth +ip-10-0-216-33:311880:311880 [0] NCCL INFO Bootstrap : Using ens32:10.0.216.33<0> +ip-10-0-216-33:311880:311880 [0] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-216-33:311880:311880 [0] NCCL INFO cudaDriverVersion 12000 +NCCL version 2.14.3+cuda11.7 +[ip-10-0-216-33:0/16] 2024-02-26 22:04:12,263 (trainer:302) INFO: 2/45epoch started +[ip-10-0-216-33:0/16] 2024-02-26 22:04:12,307 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-26 22:04:48,404 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-26 22:04:56,324 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-26 22:04:56,324 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-02-26 22:04:56,329 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +ip-10-0-216-33:311883:311883 [3] NCCL INFO cudaDriverVersion 12000 +ip-10-0-216-33:311883:311883 [3] NCCL INFO Bootstrap : Using ens32:10.0.216.33<0> +ip-10-0-216-33:311883:311883 [3] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-216-33:311883:312363 [3] NCCL INFO NET/IB : No device found. +ip-10-0-216-33:311883:312363 [3] NCCL INFO NET/Socket : Using [0]ens32:10.0.216.33<0> +ip-10-0-216-33:311883:312363 [3] NCCL INFO Using network Socket +ip-10-0-216-33:311883:312363 [3] NCCL INFO Setting affinity for GPU 3 to 0f,ffff0000,000fffff +ip-10-0-216-33:311883:312363 [3] NCCL INFO Trees [0] 4/-1/-1->3->2 [1] 4/-1/-1->3->2 +ip-10-0-216-33:311883:312363 [3] NCCL INFO Channel 00/0 : 3[201d0] -> 2[201c0] via P2P/IPC/read +ip-10-0-216-33:311883:312363 [3] NCCL INFO Channel 01/0 : 3[201d0] -> 2[201c0] via P2P/IPC/read +ip-10-0-216-33:311883:312363 [3] NCCL INFO Connected all rings +ip-10-0-216-33:311883:312363 [3] NCCL INFO Channel 00/0 : 3[201d0] -> 4[901c0] via P2P/IPC/read +ip-10-0-216-33:311883:312363 [3] NCCL INFO Channel 01/0 : 3[201d0] -> 4[901c0] via P2P/IPC/read +ip-10-0-216-33:311883:312363 [3] NCCL INFO Connected all trees +ip-10-0-216-33:311883:312363 [3] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-216-33:311883:312363 [3] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-216-33:311883:312363 [3] NCCL INFO comm 0x6f267bc0 rank 3 nranks 16 cudaDev 3 busId 201d0 - Init COMPLETE +ip-10-0-216-33:311884:311884 [4] NCCL INFO cudaDriverVersion 12000 +ip-10-0-216-33:311884:311884 [4] NCCL INFO Bootstrap : Using ens32:10.0.216.33<0> +ip-10-0-216-33:311884:311884 [4] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-216-33:311884:312360 [4] NCCL INFO NET/IB : No device found. +ip-10-0-216-33:311884:312360 [4] NCCL INFO NET/Socket : Using [0]ens32:10.0.216.33<0> +ip-10-0-216-33:311884:312360 [4] NCCL INFO Using network Socket +ip-10-0-216-33:311884:312360 [4] NCCL INFO Setting affinity for GPU 4 to 0fffff00,00000fff,ff000000 +ip-10-0-216-33:311884:312360 [4] NCCL INFO Trees [0] 5/-1/-1->4->3 [1] 5/-1/-1->4->3 +ip-10-0-216-33:311884:312360 [4] NCCL INFO Channel 00/0 : 4[901c0] -> 3[201d0] via P2P/IPC/read +ip-10-0-216-33:311884:312360 [4] NCCL INFO Channel 01/0 : 4[901c0] -> 3[201d0] via P2P/IPC/read +ip-10-0-216-33:311884:312360 [4] NCCL INFO Connected all rings +ip-10-0-216-33:311884:312360 [4] NCCL INFO Channel 00/0 : 4[901c0] -> 5[901d0] via P2P/IPC/read +ip-10-0-216-33:311884:312360 [4] NCCL INFO Channel 01/0 : 4[901c0] -> 5[901d0] via P2P/IPC/read +ip-10-0-216-33:311884:312360 [4] NCCL INFO Connected all trees +ip-10-0-216-33:311884:312360 [4] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-216-33:311884:312360 [4] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-216-33:311884:312360 [4] NCCL INFO comm 0x9d5b200 rank 4 nranks 16 cudaDev 4 busId 901c0 - Init COMPLETE +ip-10-0-216-33:311881:311881 [1] NCCL INFO cudaDriverVersion 12000 +ip-10-0-216-33:311881:311881 [1] NCCL INFO Bootstrap : Using ens32:10.0.216.33<0> +ip-10-0-216-33:311881:311881 [1] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-216-33:311881:312361 [1] NCCL INFO NET/IB : No device found. +ip-10-0-216-33:311881:312361 [1] NCCL INFO NET/Socket : Using [0]ens32:10.0.216.33<0> +ip-10-0-216-33:311881:312361 [1] NCCL INFO Using network Socket +ip-10-0-216-33:311881:312361 [1] NCCL INFO Setting affinity for GPU 1 to 0f,ffff0000,000fffff +ip-10-0-216-33:311881:312361 [1] NCCL INFO Trees [0] 2/-1/-1->1->0 [1] 2/-1/-1->1->0 +ip-10-0-216-33:311881:312361 [1] NCCL INFO Channel 00/0 : 1[101d0] -> 8[101c0] [send] via NET/Socket/0 +ip-10-0-216-33:311881:312361 [1] NCCL INFO Channel 01/0 : 1[101d0] -> 8[101c0] [send] via NET/Socket/0 +ip-10-0-216-33:311881:312361 [1] NCCL INFO Connected all rings +ip-10-0-216-33:311881:312361 [1] NCCL INFO Channel 00/0 : 1[101d0] -> 2[201c0] via P2P/IPC/read +ip-10-0-216-33:311881:312361 [1] NCCL INFO Channel 01/0 : 1[101d0] -> 2[201c0] via P2P/IPC/read +ip-10-0-216-33:311881:312361 [1] NCCL INFO Channel 00/0 : 1[101d0] -> 0[101c0] via P2P/IPC/read +ip-10-0-216-33:311881:312361 [1] NCCL INFO Channel 01/0 : 1[101d0] -> 0[101c0] via P2P/IPC/read +ip-10-0-216-33:311881:312361 [1] NCCL INFO Connected all trees +ip-10-0-216-33:311881:312361 [1] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-216-33:311881:312361 [1] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-216-33:311881:312361 [1] NCCL INFO comm 0x9a92260 rank 1 nranks 16 cudaDev 1 busId 101d0 - Init COMPLETE +ip-10-0-216-33:311886:311886 [6] NCCL INFO cudaDriverVersion 12000 +ip-10-0-216-33:311886:311886 [6] NCCL INFO Bootstrap : Using ens32:10.0.216.33<0> +ip-10-0-216-33:311886:311886 [6] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-216-33:311886:312359 [6] NCCL INFO NET/IB : No device found. +ip-10-0-216-33:311886:312359 [6] NCCL INFO NET/Socket : Using [0]ens32:10.0.216.33<0> +ip-10-0-216-33:311886:312359 [6] NCCL INFO Using network Socket +ip-10-0-216-33:311886:312359 [6] NCCL INFO Setting affinity for GPU 6 to 0fffff00,00000fff,ff000000 +ip-10-0-216-33:311886:312359 [6] NCCL INFO Trees [0] 7/-1/-1->6->5 [1] 7/-1/-1->6->5 +ip-10-0-216-33:311886:312359 [6] NCCL INFO Channel 00/0 : 6[a01c0] -> 5[901d0] via P2P/IPC/read +ip-10-0-216-33:311886:312359 [6] NCCL INFO Channel 01/0 : 6[a01c0] -> 5[901d0] via P2P/IPC/read +ip-10-0-216-33:311886:312359 [6] NCCL INFO Connected all rings +ip-10-0-216-33:311886:312359 [6] NCCL INFO Channel 00/0 : 6[a01c0] -> 7[a01d0] via P2P/IPC/read +ip-10-0-216-33:311886:312359 [6] NCCL INFO Channel 01/0 : 6[a01c0] -> 7[a01d0] via P2P/IPC/read +ip-10-0-216-33:311886:312359 [6] NCCL INFO Connected all trees +ip-10-0-216-33:311886:312359 [6] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-216-33:311886:312359 [6] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-216-33:311886:312359 [6] NCCL INFO comm 0x42c0fbc0 rank 6 nranks 16 cudaDev 6 busId a01c0 - Init COMPLETE +ip-10-0-216-33:311882:311882 [2] NCCL INFO cudaDriverVersion 12000 +ip-10-0-216-33:311882:311882 [2] NCCL INFO Bootstrap : Using ens32:10.0.216.33<0> +ip-10-0-216-33:311882:311882 [2] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-216-33:311882:312358 [2] NCCL INFO NET/IB : No device found. +ip-10-0-216-33:311882:312358 [2] NCCL INFO NET/Socket : Using [0]ens32:10.0.216.33<0> +ip-10-0-216-33:311882:312358 [2] NCCL INFO Using network Socket +ip-10-0-216-33:311882:312358 [2] NCCL INFO Setting affinity for GPU 2 to 0f,ffff0000,000fffff +ip-10-0-216-33:311882:312358 [2] NCCL INFO Trees [0] 3/-1/-1->2->1 [1] 3/-1/-1->2->1 +ip-10-0-216-33:311882:312358 [2] NCCL INFO Channel 00/0 : 2[201c0] -> 1[101d0] via P2P/IPC/read +ip-10-0-216-33:311882:312358 [2] NCCL INFO Channel 01/0 : 2[201c0] -> 1[101d0] via P2P/IPC/read +ip-10-0-216-33:311882:312358 [2] NCCL INFO Connected all rings +ip-10-0-216-33:311882:312358 [2] NCCL INFO Channel 00/0 : 2[201c0] -> 3[201d0] via P2P/IPC/read +ip-10-0-216-33:311882:312358 [2] NCCL INFO Channel 01/0 : 2[201c0] -> 3[201d0] via P2P/IPC/read +ip-10-0-216-33:311882:312358 [2] NCCL INFO Connected all trees +ip-10-0-216-33:311882:312358 [2] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-216-33:311882:312358 [2] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-216-33:311882:312358 [2] NCCL INFO comm 0x511a1b90 rank 2 nranks 16 cudaDev 2 busId 201c0 - Init COMPLETE +ip-10-0-224-187:3788824:3788824 [6] NCCL INFO cudaDriverVersion 12000 +ip-10-0-224-187:3788824:3788824 [6] NCCL INFO Bootstrap : Using ens32:10.0.224.187<0> +ip-10-0-224-187:3788824:3788824 [6] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-224-187:3788824:3789267 [6] NCCL INFO NET/IB : No device found. +ip-10-0-224-187:3788824:3789267 [6] NCCL INFO NET/Socket : Using [0]ens32:10.0.224.187<0> +ip-10-0-224-187:3788824:3789267 [6] NCCL INFO Using network Socket +ip-10-0-224-187:3788824:3789267 [6] NCCL INFO Setting affinity for GPU 6 to 0fffff00,00000fff,ff000000 +ip-10-0-224-187:3788824:3789267 [6] NCCL INFO Trees [0] 15/-1/-1->14->13 [1] 15/-1/-1->14->13 +ip-10-0-224-187:3788824:3789267 [6] NCCL INFO Channel 00/0 : 14[a01c0] -> 13[901d0] via P2P/IPC/read +ip-10-0-224-187:3788824:3789267 [6] NCCL INFO Channel 01/0 : 14[a01c0] -> 13[901d0] via P2P/IPC/read +ip-10-0-224-187:3788824:3789267 [6] NCCL INFO Connected all rings +ip-10-0-224-187:3788824:3789267 [6] NCCL INFO Channel 00/0 : 14[a01c0] -> 15[a01d0] via P2P/IPC/read +ip-10-0-224-187:3788824:3789267 [6] NCCL INFO Channel 01/0 : 14[a01c0] -> 15[a01d0] via P2P/IPC/read +ip-10-0-224-187:3788824:3789267 [6] NCCL INFO Connected all trees +ip-10-0-224-187:3788824:3789267 [6] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-224-187:3788824:3789267 [6] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-224-187:3788824:3789267 [6] NCCL INFO comm 0x704da1d0 rank 14 nranks 16 cudaDev 6 busId a01c0 - Init COMPLETE +ip-10-0-224-187:3788822:3788822 [4] NCCL INFO cudaDriverVersion 12000 +ip-10-0-224-187:3788822:3788822 [4] NCCL INFO Bootstrap : Using ens32:10.0.224.187<0> +ip-10-0-224-187:3788822:3788822 [4] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-224-187:3788822:3789266 [4] NCCL INFO NET/IB : No device found. +ip-10-0-224-187:3788822:3789266 [4] NCCL INFO NET/Socket : Using [0]ens32:10.0.224.187<0> +ip-10-0-224-187:3788822:3789266 [4] NCCL INFO Using network Socket +ip-10-0-224-187:3788822:3789266 [4] NCCL INFO Setting affinity for GPU 4 to 0fffff00,00000fff,ff000000 +ip-10-0-224-187:3788822:3789266 [4] NCCL INFO Trees [0] 13/-1/-1->12->11 [1] 13/-1/-1->12->11 +ip-10-0-224-187:3788822:3789266 [4] NCCL INFO Channel 00/0 : 12[901c0] -> 11[201d0] via P2P/IPC/read +ip-10-0-224-187:3788822:3789266 [4] NCCL INFO Channel 01/0 : 12[901c0] -> 11[201d0] via P2P/IPC/read +ip-10-0-224-187:3788822:3789266 [4] NCCL INFO Connected all rings +ip-10-0-224-187:3788822:3789266 [4] NCCL INFO Channel 00/0 : 12[901c0] -> 13[901d0] via P2P/IPC/read +ip-10-0-224-187:3788822:3789266 [4] NCCL INFO Channel 01/0 : 12[901c0] -> 13[901d0] via P2P/IPC/read +ip-10-0-224-187:3788822:3789266 [4] NCCL INFO Connected all trees +ip-10-0-224-187:3788822:3789266 [4] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-224-187:3788822:3789266 [4] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-224-187:3788822:3789266 [4] NCCL INFO comm 0x51831900 rank 12 nranks 16 cudaDev 4 busId 901c0 - Init COMPLETE +ip-10-0-224-187:3788826:3788826 [7] NCCL INFO cudaDriverVersion 12000 +ip-10-0-224-187:3788826:3788826 [7] NCCL INFO Bootstrap : Using ens32:10.0.224.187<0> +ip-10-0-224-187:3788826:3788826 [7] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-224-187:3788826:3789273 [7] NCCL INFO NET/IB : No device found. +ip-10-0-224-187:3788826:3789273 [7] NCCL INFO NET/Socket : Using [0]ens32:10.0.224.187<0> +ip-10-0-224-187:3788826:3789273 [7] NCCL INFO Using network Socket +ip-10-0-224-187:3788826:3789273 [7] NCCL INFO Setting affinity for GPU 7 to 0fffff00,00000fff,ff000000 +ip-10-0-224-187:3788826:3789273 [7] NCCL INFO Trees [0] -1/-1/-1->15->14 [1] -1/-1/-1->15->14 +ip-10-0-224-187:3788826:3789273 [7] NCCL INFO Channel 00/0 : 15[a01d0] -> 14[a01c0] via P2P/IPC/read +ip-10-0-224-187:3788826:3789273 [7] NCCL INFO Channel 01/0 : 15[a01d0] -> 14[a01c0] via P2P/IPC/read +ip-10-0-224-187:3788826:3789273 [7] NCCL INFO Connected all rings +ip-10-0-224-187:3788826:3789273 [7] NCCL INFO Connected all trees +ip-10-0-224-187:3788826:3789273 [7] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-224-187:3788826:3789273 [7] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-224-187:3788826:3789273 [7] NCCL INFO comm 0x42154f70 rank 15 nranks 16 cudaDev 7 busId a01d0 - Init COMPLETE +ip-10-0-224-187:3788818:3788818 [0] NCCL INFO cudaDriverVersion 12000 +ip-10-0-224-187:3788818:3788818 [0] NCCL INFO Bootstrap : Using ens32:10.0.224.187<0> +ip-10-0-224-187:3788818:3788818 [0] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO NET/IB : No device found. +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO NET/Socket : Using [0]ens32:10.0.224.187<0> +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO Using network Socket +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO Setting affinity for GPU 0 to 0f,ffff0000,000fffff +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO Trees [0] 9/-1/-1->8->0 [1] 9/0/-1->8->-1 +ip-10-0-224-187:3788818:3789278 [0] NCCL INFO NET/Socket: Using 2 threads and 8 sockets per thread +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO Channel 00/0 : 1[101d0] -> 8[101c0] [receive] via NET/Socket/0 +ip-10-0-224-187:3788818:3789278 [0] NCCL INFO NET/Socket: Using 2 threads and 8 sockets per thread +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO Channel 01/0 : 1[101d0] -> 8[101c0] [receive] via NET/Socket/0 +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO Channel 00/0 : 8[101c0] -> 15[a01d0] via P2P/IPC/read +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO Channel 01/0 : 8[101c0] -> 15[a01d0] via P2P/IPC/read +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO Connected all rings +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO Channel 00/0 : 8[101c0] -> 9[101d0] via P2P/IPC/read +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO Channel 01/0 : 8[101c0] -> 9[101d0] via P2P/IPC/read +ip-10-0-224-187:3788818:3789278 [0] NCCL INFO NET/Socket: Using 2 threads and 8 sockets per thread +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO Channel 00/0 : 0[101c0] -> 8[101c0] [receive] via NET/Socket/0 +ip-10-0-224-187:3788818:3789278 [0] NCCL INFO NET/Socket: Using 2 threads and 8 sockets per thread +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO Channel 01/0 : 0[101c0] -> 8[101c0] [receive] via NET/Socket/0 +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO Channel 00/0 : 8[101c0] -> 0[101c0] [send] via NET/Socket/0 +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO Channel 01/0 : 8[101c0] -> 0[101c0] [send] via NET/Socket/0 +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO Connected all trees +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-224-187:3788818:3789270 [0] NCCL INFO comm 0x804dad0 rank 8 nranks 16 cudaDev 0 busId 101c0 - Init COMPLETE +ip-10-0-224-187:3788823:3788823 [5] NCCL INFO cudaDriverVersion 12000 +ip-10-0-224-187:3788823:3788823 [5] NCCL INFO Bootstrap : Using ens32:10.0.224.187<0> +ip-10-0-224-187:3788823:3788823 [5] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-224-187:3788823:3789269 [5] NCCL INFO NET/IB : No device found. +ip-10-0-224-187:3788823:3789269 [5] NCCL INFO NET/Socket : Using [0]ens32:10.0.224.187<0> +ip-10-0-224-187:3788823:3789269 [5] NCCL INFO Using network Socket +ip-10-0-224-187:3788823:3789269 [5] NCCL INFO Setting affinity for GPU 5 to 0fffff00,00000fff,ff000000 +ip-10-0-224-187:3788823:3789269 [5] NCCL INFO Trees [0] 14/-1/-1->13->12 [1] 14/-1/-1->13->12 +ip-10-0-224-187:3788823:3789269 [5] NCCL INFO Channel 00/0 : 13[901d0] -> 12[901c0] via P2P/IPC/read +ip-10-0-224-187:3788823:3789269 [5] NCCL INFO Channel 01/0 : 13[901d0] -> 12[901c0] via P2P/IPC/read +ip-10-0-224-187:3788823:3789269 [5] NCCL INFO Connected all rings +ip-10-0-224-187:3788823:3789269 [5] NCCL INFO Channel 00/0 : 13[901d0] -> 14[a01c0] via P2P/IPC/read +ip-10-0-224-187:3788823:3789269 [5] NCCL INFO Channel 01/0 : 13[901d0] -> 14[a01c0] via P2P/IPC/read +ip-10-0-224-187:3788823:3789269 [5] NCCL INFO Connected all trees +ip-10-0-224-187:3788823:3789269 [5] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-224-187:3788823:3789269 [5] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-224-187:3788823:3789269 [5] NCCL INFO comm 0xa4dcd80 rank 13 nranks 16 cudaDev 5 busId 901d0 - Init COMPLETE +ip-10-0-224-187:3788821:3788821 [3] NCCL INFO cudaDriverVersion 12000 +ip-10-0-224-187:3788821:3788821 [3] NCCL INFO Bootstrap : Using ens32:10.0.224.187<0> +ip-10-0-224-187:3788821:3788821 [3] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-224-187:3788821:3789268 [3] NCCL INFO NET/IB : No device found. +ip-10-0-224-187:3788821:3789268 [3] NCCL INFO NET/Socket : Using [0]ens32:10.0.224.187<0> +ip-10-0-224-187:3788821:3789268 [3] NCCL INFO Using network Socket +ip-10-0-224-187:3788821:3789268 [3] NCCL INFO Setting affinity for GPU 3 to 0f,ffff0000,000fffff +ip-10-0-224-187:3788821:3789268 [3] NCCL INFO Trees [0] 12/-1/-1->11->10 [1] 12/-1/-1->11->10 +ip-10-0-224-187:3788821:3789268 [3] NCCL INFO Channel 00/0 : 11[201d0] -> 10[201c0] via P2P/IPC/read +ip-10-0-224-187:3788821:3789268 [3] NCCL INFO Channel 01/0 : 11[201d0] -> 10[201c0] via P2P/IPC/read +ip-10-0-224-187:3788821:3789268 [3] NCCL INFO Connected all rings +ip-10-0-224-187:3788821:3789268 [3] NCCL INFO Channel 00/0 : 11[201d0] -> 12[901c0] via P2P/IPC/read +ip-10-0-224-187:3788821:3789268 [3] NCCL INFO Channel 01/0 : 11[201d0] -> 12[901c0] via P2P/IPC/read +ip-10-0-224-187:3788821:3789268 [3] NCCL INFO Connected all trees +ip-10-0-224-187:3788821:3789268 [3] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-224-187:3788821:3789268 [3] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-224-187:3788821:3789268 [3] NCCL INFO comm 0x729fe240 rank 11 nranks 16 cudaDev 3 busId 201d0 - Init COMPLETE +ip-10-0-224-187:3788819:3788819 [1] NCCL INFO cudaDriverVersion 12000 +ip-10-0-224-187:3788819:3788819 [1] NCCL INFO Bootstrap : Using ens32:10.0.224.187<0> +ip-10-0-224-187:3788819:3788819 [1] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-224-187:3788819:3789272 [1] NCCL INFO NET/IB : No device found. +ip-10-0-224-187:3788819:3789272 [1] NCCL INFO NET/Socket : Using [0]ens32:10.0.224.187<0> +ip-10-0-224-187:3788819:3789272 [1] NCCL INFO Using network Socket +ip-10-0-224-187:3788819:3789272 [1] NCCL INFO Setting affinity for GPU 1 to 0f,ffff0000,000fffff +ip-10-0-224-187:3788819:3789272 [1] NCCL INFO Trees [0] 10/-1/-1->9->8 [1] 10/-1/-1->9->8 +ip-10-0-224-187:3788819:3789272 [1] NCCL INFO Channel 00/0 : 9[101d0] -> 0[101c0] [send] via NET/Socket/0 +ip-10-0-224-187:3788819:3789272 [1] NCCL INFO Channel 01/0 : 9[101d0] -> 0[101c0] [send] via NET/Socket/0 +ip-10-0-224-187:3788819:3789272 [1] NCCL INFO Connected all rings +ip-10-0-224-187:3788819:3789272 [1] NCCL INFO Channel 00/0 : 9[101d0] -> 10[201c0] via P2P/IPC/read +ip-10-0-224-187:3788819:3789272 [1] NCCL INFO Channel 01/0 : 9[101d0] -> 10[201c0] via P2P/IPC/read +ip-10-0-224-187:3788819:3789272 [1] NCCL INFO Channel 00/0 : 9[101d0] -> 8[101c0] via P2P/IPC/read +ip-10-0-224-187:3788819:3789272 [1] NCCL INFO Channel 01/0 : 9[101d0] -> 8[101c0] via P2P/IPC/read +ip-10-0-224-187:3788819:3789272 [1] NCCL INFO Connected all trees +ip-10-0-224-187:3788819:3789272 [1] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-224-187:3788819:3789272 [1] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-224-187:3788819:3789272 [1] NCCL INFO comm 0x1236d920 rank 9 nranks 16 cudaDev 1 busId 101d0 - Init COMPLETE +ip-10-0-216-33:311887:311887 [7] NCCL INFO cudaDriverVersion 12000 +ip-10-0-216-33:311887:311887 [7] NCCL INFO Bootstrap : Using ens32:10.0.216.33<0> +ip-10-0-216-33:311887:311887 [7] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-216-33:311887:312362 [7] NCCL INFO NET/IB : No device found. +ip-10-0-216-33:311887:312362 [7] NCCL INFO NET/Socket : Using [0]ens32:10.0.216.33<0> +ip-10-0-216-33:311887:312362 [7] NCCL INFO Using network Socket +ip-10-0-216-33:311887:312362 [7] NCCL INFO Setting affinity for GPU 7 to 0fffff00,00000fff,ff000000 +ip-10-0-216-33:311887:312362 [7] NCCL INFO Trees [0] -1/-1/-1->7->6 [1] -1/-1/-1->7->6 +ip-10-0-216-33:311887:312362 [7] NCCL INFO Channel 00/0 : 7[a01d0] -> 6[a01c0] via P2P/IPC/read +ip-10-0-216-33:311887:312362 [7] NCCL INFO Channel 01/0 : 7[a01d0] -> 6[a01c0] via P2P/IPC/read +ip-10-0-216-33:311887:312362 [7] NCCL INFO Connected all rings +ip-10-0-216-33:311887:312362 [7] NCCL INFO Connected all trees +ip-10-0-216-33:311887:312362 [7] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-216-33:311887:312362 [7] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-216-33:311887:312362 [7] NCCL INFO comm 0x6ee1f170 rank 7 nranks 16 cudaDev 7 busId a01d0 - Init COMPLETE +ip-10-0-216-33:311885:311885 [5] NCCL INFO cudaDriverVersion 12000 +ip-10-0-216-33:311885:311885 [5] NCCL INFO Bootstrap : Using ens32:10.0.216.33<0> +ip-10-0-216-33:311885:311885 [5] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-216-33:311885:312364 [5] NCCL INFO NET/IB : No device found. +ip-10-0-216-33:311885:312364 [5] NCCL INFO NET/Socket : Using [0]ens32:10.0.216.33<0> +ip-10-0-216-33:311885:312364 [5] NCCL INFO Using network Socket +ip-10-0-216-33:311885:312364 [5] NCCL INFO Setting affinity for GPU 5 to 0fffff00,00000fff,ff000000 +ip-10-0-216-33:311885:312364 [5] NCCL INFO Trees [0] 6/-1/-1->5->4 [1] 6/-1/-1->5->4 +ip-10-0-216-33:311885:312364 [5] NCCL INFO Channel 00/0 : 5[901d0] -> 4[901c0] via P2P/IPC/read +ip-10-0-216-33:311885:312364 [5] NCCL INFO Channel 01/0 : 5[901d0] -> 4[901c0] via P2P/IPC/read +ip-10-0-216-33:311885:312364 [5] NCCL INFO Connected all rings +ip-10-0-216-33:311885:312364 [5] NCCL INFO Channel 00/0 : 5[901d0] -> 6[a01c0] via P2P/IPC/read +ip-10-0-216-33:311885:312364 [5] NCCL INFO Channel 01/0 : 5[901d0] -> 6[a01c0] via P2P/IPC/read +ip-10-0-216-33:311885:312364 [5] NCCL INFO Connected all trees +ip-10-0-216-33:311885:312364 [5] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-216-33:311885:312364 [5] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-216-33:311885:312364 [5] NCCL INFO comm 0x45e56440 rank 5 nranks 16 cudaDev 5 busId 901d0 - Init COMPLETE +ip-10-0-224-187:3788820:3788820 [2] NCCL INFO cudaDriverVersion 12000 +ip-10-0-224-187:3788820:3788820 [2] NCCL INFO Bootstrap : Using ens32:10.0.224.187<0> +ip-10-0-224-187:3788820:3788820 [2] NCCL INFO NET/Plugin : No plugin found (libnccl-net.so), using internal implementation +ip-10-0-224-187:3788820:3789271 [2] NCCL INFO NET/IB : No device found. +ip-10-0-224-187:3788820:3789271 [2] NCCL INFO NET/Socket : Using [0]ens32:10.0.224.187<0> +ip-10-0-224-187:3788820:3789271 [2] NCCL INFO Using network Socket +ip-10-0-224-187:3788820:3789271 [2] NCCL INFO Setting affinity for GPU 2 to 0f,ffff0000,000fffff +ip-10-0-224-187:3788820:3789271 [2] NCCL INFO Trees [0] 11/-1/-1->10->9 [1] 11/-1/-1->10->9 +ip-10-0-224-187:3788820:3789271 [2] NCCL INFO Channel 00/0 : 10[201c0] -> 9[101d0] via P2P/IPC/read +ip-10-0-224-187:3788820:3789271 [2] NCCL INFO Channel 01/0 : 10[201c0] -> 9[101d0] via P2P/IPC/read +ip-10-0-224-187:3788820:3789271 [2] NCCL INFO Connected all rings +ip-10-0-224-187:3788820:3789271 [2] NCCL INFO Channel 00/0 : 10[201c0] -> 11[201d0] via P2P/IPC/read +ip-10-0-224-187:3788820:3789271 [2] NCCL INFO Channel 01/0 : 10[201c0] -> 11[201d0] via P2P/IPC/read +ip-10-0-224-187:3788820:3789271 [2] NCCL INFO Connected all trees +ip-10-0-224-187:3788820:3789271 [2] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-224-187:3788820:3789271 [2] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-224-187:3788820:3789271 [2] NCCL INFO comm 0x965e2c0 rank 10 nranks 16 cudaDev 2 busId 201c0 - Init COMPLETE +ip-10-0-216-33:311880:312357 [0] NCCL INFO NET/IB : No device found. +ip-10-0-216-33:311880:312357 [0] NCCL INFO NET/Socket : Using [0]ens32:10.0.216.33<0> +ip-10-0-216-33:311880:312357 [0] NCCL INFO Using network Socket +ip-10-0-216-33:311880:312357 [0] NCCL INFO Setting affinity for GPU 0 to 0f,ffff0000,000fffff +ip-10-0-216-33:311880:312357 [0] NCCL INFO Channel 00/02 : 0 7 6 5 4 3 2 1 8 15 14 13 12 11 10 9 +ip-10-0-216-33:311880:312357 [0] NCCL INFO Channel 01/02 : 0 7 6 5 4 3 2 1 8 15 14 13 12 11 10 9 +ip-10-0-216-33:311880:312357 [0] NCCL INFO Trees [0] 1/8/-1->0->-1 [1] 1/-1/-1->0->8 +ip-10-0-216-33:311880:312455 [0] NCCL INFO NET/Socket: Using 2 threads and 8 sockets per thread +ip-10-0-216-33:311880:312357 [0] NCCL INFO Channel 00/0 : 9[101d0] -> 0[101c0] [receive] via NET/Socket/0 +ip-10-0-216-33:311880:312455 [0] NCCL INFO NET/Socket: Using 2 threads and 8 sockets per thread +ip-10-0-216-33:311880:312357 [0] NCCL INFO Channel 01/0 : 9[101d0] -> 0[101c0] [receive] via NET/Socket/0 +ip-10-0-216-33:311880:312357 [0] NCCL INFO Channel 00/0 : 0[101c0] -> 7[a01d0] via P2P/IPC/read +ip-10-0-216-33:311880:312357 [0] NCCL INFO Channel 01/0 : 0[101c0] -> 7[a01d0] via P2P/IPC/read +ip-10-0-216-33:311880:312357 [0] NCCL INFO Connected all rings +ip-10-0-216-33:311880:312357 [0] NCCL INFO Channel 00/0 : 0[101c0] -> 1[101d0] via P2P/IPC/read +ip-10-0-216-33:311880:312357 [0] NCCL INFO Channel 01/0 : 0[101c0] -> 1[101d0] via P2P/IPC/read +ip-10-0-216-33:311880:312455 [0] NCCL INFO NET/Socket: Using 2 threads and 8 sockets per thread +ip-10-0-216-33:311880:312357 [0] NCCL INFO Channel 00/0 : 8[101c0] -> 0[101c0] [receive] via NET/Socket/0 +ip-10-0-216-33:311880:312455 [0] NCCL INFO NET/Socket: Using 2 threads and 8 sockets per thread +ip-10-0-216-33:311880:312357 [0] NCCL INFO Channel 01/0 : 8[101c0] -> 0[101c0] [receive] via NET/Socket/0 +ip-10-0-216-33:311880:312357 [0] NCCL INFO Channel 00/0 : 0[101c0] -> 8[101c0] [send] via NET/Socket/0 +ip-10-0-216-33:311880:312357 [0] NCCL INFO Channel 01/0 : 0[101c0] -> 8[101c0] [send] via NET/Socket/0 +ip-10-0-216-33:311880:312357 [0] NCCL INFO Connected all trees +ip-10-0-216-33:311880:312357 [0] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 +ip-10-0-216-33:311880:312357 [0] NCCL INFO 2 coll channels, 2 p2p channels, 2 p2p channels per peer +ip-10-0-216-33:311880:312357 [0] NCCL INFO comm 0x90d2b80 rank 0 nranks 16 cudaDev 0 busId 101c0 - Init COMPLETE +[ip-10-0-216-33:0/16] 2024-02-26 22:11:41,417 (distributed:1027) INFO: Reducer buckets have been rebuilt in this iteration. +[ip-10-0-216-33:0/16] 2024-02-26 22:19:15,625 (trainer:762) INFO: 2epoch:train:1-500batch: iter_time=0.878, forward_time=0.211, loss_ctc=256.046, loss_att=215.117, acc=0.175, loss=227.395, backward_time=0.230, grad_norm=71.288, clip=100.000, loss_scale=8.389e+06, optim_step_time=0.075, optim0_lr0=2.542e-05, train_time=1.806 +[ip-10-0-216-33:0/16] 2024-02-26 22:26:45,984 (trainer:762) INFO: 2epoch:train:501-1000batch: iter_time=1.560e-04, forward_time=0.205, loss_ctc=264.017, loss_att=222.907, acc=0.179, loss=235.240, backward_time=0.226, grad_norm=78.517, clip=100.000, loss_scale=8.389e+06, optim_step_time=0.074, optim0_lr0=2.625e-05, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-02-26 22:34:18,504 (trainer:762) INFO: 2epoch:train:1001-1500batch: iter_time=1.508e-04, forward_time=0.207, loss_ctc=252.138, loss_att=216.662, acc=0.180, loss=227.305, backward_time=0.227, grad_norm=74.385, clip=100.000, loss_scale=1.678e+07, optim_step_time=0.074, optim0_lr0=2.709e-05, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-26 22:41:52,449 (trainer:762) INFO: 2epoch:train:1501-2000batch: iter_time=1.466e-04, forward_time=0.206, loss_ctc=244.770, loss_att=214.569, acc=0.180, loss=223.629, backward_time=0.228, grad_norm=78.179, clip=100.000, loss_scale=1.678e+07, optim_step_time=0.074, optim0_lr0=2.792e-05, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-26 22:49:28,708 (trainer:762) INFO: 2epoch:train:2001-2500batch: iter_time=1.502e-04, forward_time=0.205, loss_ctc=248.980, loss_att=224.210, acc=0.179, loss=231.641, backward_time=0.225, grad_norm=76.967, clip=100.000, loss_scale=1.678e+07, optim_step_time=0.074, optim0_lr0=2.875e-05, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-26 22:56:57,090 (trainer:762) INFO: 2epoch:train:2501-3000batch: iter_time=1.455e-04, forward_time=0.206, loss_ctc=222.364, loss_att=203.249, acc=0.183, loss=208.983, backward_time=0.227, grad_norm=80.769, clip=100.000, loss_scale=1.678e+07, optim_step_time=0.074, optim0_lr0=2.959e-05, train_time=0.896 +[ip-10-0-216-33:0/16] 2024-02-26 23:04:26,477 (trainer:762) INFO: 2epoch:train:3001-3500batch: iter_time=1.456e-04, forward_time=0.206, loss_ctc=225.000, loss_att=210.333, acc=0.184, loss=214.733, backward_time=0.229, grad_norm=83.386, clip=100.000, loss_scale=3.355e+07, optim_step_time=0.074, optim0_lr0=3.042e-05, train_time=0.898 +[ip-10-0-216-33:0/16] 2024-02-26 23:11:59,520 (trainer:762) INFO: 2epoch:train:3501-4000batch: iter_time=1.482e-04, forward_time=0.205, loss_ctc=229.381, loss_att=220.539, acc=0.181, loss=223.191, backward_time=0.229, grad_norm=86.785, clip=100.000, loss_scale=3.355e+07, optim_step_time=0.074, optim0_lr0=3.125e-05, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-02-26 23:19:31,753 (trainer:762) INFO: 2epoch:train:4001-4500batch: iter_time=1.463e-04, forward_time=0.206, loss_ctc=222.672, loss_att=220.824, acc=0.183, loss=221.379, backward_time=0.227, grad_norm=83.928, clip=100.000, loss_scale=3.355e+07, optim_step_time=0.073, optim0_lr0=3.209e-05, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-02-26 23:27:06,817 (trainer:762) INFO: 2epoch:train:4501-5000batch: iter_time=1.458e-04, forward_time=0.206, loss_ctc=210.652, loss_att=208.617, acc=0.190, loss=209.228, backward_time=0.226, grad_norm=82.040, clip=100.000, loss_scale=3.355e+07, optim_step_time=0.074, optim0_lr0=3.292e-05, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-26 23:27:13,412 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-26 23:27:50,474 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-26 23:27:58,432 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-26 23:27:58,433 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-02-26 23:27:58,437 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-26 23:42:10,656 (trainer:762) INFO: 2epoch:train:5001-5500batch: iter_time=0.876, forward_time=0.207, loss_ctc=200.892, loss_att=204.306, acc=0.189, loss=203.281, backward_time=0.230, grad_norm=81.583, clip=100.000, loss_scale=6.711e+07, optim_step_time=0.073, optim0_lr0=3.375e-05, train_time=1.807 +[ip-10-0-216-33:0/16] 2024-02-26 23:49:42,183 (trainer:762) INFO: 2epoch:train:5501-6000batch: iter_time=1.485e-04, forward_time=0.208, loss_ctc=199.205, loss_att=212.214, acc=0.192, loss=208.312, backward_time=0.229, grad_norm=77.359, clip=100.000, loss_scale=6.711e+07, optim_step_time=0.073, optim0_lr0=3.459e-05, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-26 23:57:15,796 (trainer:762) INFO: 2epoch:train:6001-6500batch: iter_time=1.494e-04, forward_time=0.206, loss_ctc=191.261, loss_att=206.538, acc=0.194, loss=201.955, backward_time=0.233, grad_norm=77.979, clip=100.000, loss_scale=6.711e+07, optim_step_time=0.073, optim0_lr0=3.542e-05, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-02-27 00:04:42,410 (trainer:762) INFO: 2epoch:train:6501-7000batch: iter_time=1.455e-04, forward_time=0.208, loss_ctc=187.001, loss_att=204.608, acc=0.195, loss=199.326, backward_time=0.229, grad_norm=75.064, clip=100.000, loss_scale=6.711e+07, optim_step_time=0.073, optim0_lr0=3.625e-05, train_time=0.893 +[ip-10-0-216-33:0/16] 2024-02-27 00:12:17,971 (trainer:762) INFO: 2epoch:train:7001-7500batch: iter_time=1.487e-04, forward_time=0.207, loss_ctc=186.698, loss_att=214.496, acc=0.193, loss=206.157, backward_time=0.230, grad_norm=73.954, clip=100.000, loss_scale=1.342e+08, optim_step_time=0.073, optim0_lr0=3.709e-05, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-02-27 00:19:45,681 (trainer:762) INFO: 2epoch:train:7501-8000batch: iter_time=1.459e-04, forward_time=0.207, loss_ctc=171.050, loss_att=193.820, acc=0.197, loss=186.989, backward_time=0.231, grad_norm=66.564, clip=100.000, loss_scale=1.342e+08, optim_step_time=0.073, optim0_lr0=3.792e-05, train_time=0.895 +[ip-10-0-216-33:0/16] 2024-02-27 00:27:16,194 (trainer:762) INFO: 2epoch:train:8001-8500batch: iter_time=1.467e-04, forward_time=0.208, loss_ctc=170.404, loss_att=200.856, acc=0.199, loss=191.720, backward_time=0.230, grad_norm=66.618, clip=100.000, loss_scale=1.342e+08, optim_step_time=0.074, optim0_lr0=3.875e-05, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-02-27 00:34:48,445 (trainer:762) INFO: 2epoch:train:8501-9000batch: iter_time=1.544e-04, forward_time=0.207, loss_ctc=175.160, loss_att=210.792, acc=0.196, loss=200.102, backward_time=0.226, grad_norm=72.661, clip=100.000, loss_scale=1.342e+08, optim_step_time=0.074, optim0_lr0=3.959e-05, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-02-27 00:42:14,488 (trainer:762) INFO: 2epoch:train:9001-9500batch: iter_time=1.511e-04, forward_time=0.209, loss_ctc=172.136, loss_att=212.137, acc=0.199, loss=200.137, backward_time=0.229, grad_norm=68.953, clip=100.000, loss_scale=2.684e+08, optim_step_time=0.074, optim0_lr0=4.042e-05, train_time=0.892 +[ip-10-0-216-33:0/16] 2024-02-27 00:49:47,158 (trainer:762) INFO: 2epoch:train:9501-10000batch: iter_time=1.526e-04, forward_time=0.207, loss_ctc=164.211, loss_att=199.953, acc=0.206, loss=189.231, backward_time=0.230, grad_norm=64.642, clip=100.000, loss_scale=2.684e+08, optim_step_time=0.073, optim0_lr0=4.125e-05, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-27 00:49:51,618 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-27 00:50:29,712 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-27 00:50:38,084 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-27 00:50:38,084 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-02-27 00:50:38,088 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-27 01:04:55,686 (trainer:762) INFO: 2epoch:train:10001-10500batch: iter_time=0.901, forward_time=0.208, loss_ctc=158.463, loss_att=195.850, acc=0.204, loss=184.634, backward_time=0.226, grad_norm=64.366, clip=100.000, loss_scale=2.684e+08, optim_step_time=0.074, optim0_lr0=4.209e-05, train_time=1.817 +[ip-10-0-216-33:0/16] 2024-02-27 01:12:23,475 (trainer:762) INFO: 2epoch:train:10501-11000batch: iter_time=1.477e-04, forward_time=0.209, loss_ctc=155.730, loss_att=203.552, acc=0.207, loss=189.205, backward_time=0.227, grad_norm=62.679, clip=100.000, loss_scale=2.684e+08, optim_step_time=0.074, optim0_lr0=4.292e-05, train_time=0.895 +[ip-10-0-216-33:0/16] 2024-02-27 01:19:52,735 (trainer:762) INFO: 2epoch:train:11001-11500batch: iter_time=1.530e-04, forward_time=0.209, loss_ctc=151.626, loss_att=198.325, acc=0.210, loss=184.315, backward_time=0.228, grad_norm=62.646, clip=100.000, loss_scale=5.369e+08, optim_step_time=0.074, optim0_lr0=4.375e-05, train_time=0.898 +[ip-10-0-216-33:0/16] 2024-02-27 01:27:25,077 (trainer:762) INFO: 2epoch:train:11501-12000batch: iter_time=1.498e-04, forward_time=0.208, loss_ctc=151.375, loss_att=197.014, acc=0.210, loss=183.322, backward_time=0.229, grad_norm=61.172, clip=100.000, loss_scale=5.369e+08, optim_step_time=0.074, optim0_lr0=4.459e-05, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-02-27 01:34:53,578 (trainer:762) INFO: 2epoch:train:12001-12500batch: iter_time=1.487e-04, forward_time=0.209, loss_ctc=149.224, loss_att=206.618, acc=0.209, loss=189.400, backward_time=0.228, grad_norm=59.147, clip=100.000, loss_scale=5.369e+08, optim_step_time=0.074, optim0_lr0=4.542e-05, train_time=0.897 +[ip-10-0-216-33:0/16] 2024-02-27 01:42:26,491 (trainer:762) INFO: 2epoch:train:12501-13000batch: iter_time=1.507e-04, forward_time=0.206, loss_ctc=139.965, loss_att=185.753, acc=0.215, loss=172.016, backward_time=0.229, grad_norm=56.759, clip=100.000, loss_scale=5.369e+08, optim_step_time=0.074, optim0_lr0=4.625e-05, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-02-27 01:49:56,749 (trainer:762) INFO: 2epoch:train:13001-13500batch: iter_time=1.556e-04, forward_time=0.209, loss_ctc=138.346, loss_att=193.083, acc=0.217, loss=176.662, backward_time=0.232, grad_norm=56.796, clip=100.000, loss_scale=1.074e+09, optim_step_time=0.074, optim0_lr0=4.709e-05, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-02-27 01:57:33,250 (trainer:762) INFO: 2epoch:train:13501-14000batch: iter_time=1.487e-04, forward_time=0.207, loss_ctc=142.910, loss_att=202.412, acc=0.215, loss=184.561, backward_time=0.228, grad_norm=58.482, clip=100.000, loss_scale=1.074e+09, optim_step_time=0.074, optim0_lr0=4.792e-05, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-02-27 02:04:59,661 (trainer:762) INFO: 2epoch:train:14001-14500batch: iter_time=1.471e-04, forward_time=0.209, loss_ctc=141.863, loss_att=203.245, acc=0.219, loss=184.831, backward_time=0.226, grad_norm=59.454, clip=100.000, loss_scale=1.074e+09, optim_step_time=0.074, optim0_lr0=4.875e-05, train_time=0.893 +[ip-10-0-216-33:0/16] 2024-02-27 02:12:30,672 (trainer:762) INFO: 2epoch:train:14501-15000batch: iter_time=1.487e-04, forward_time=0.208, loss_ctc=136.369, loss_att=192.084, acc=0.228, loss=175.369, backward_time=0.227, grad_norm=54.535, clip=100.000, loss_scale=1.074e+09, optim_step_time=0.074, optim0_lr0=4.959e-05, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-02-27 02:26:25,064 (trainer:361) INFO: 2epoch results: [train] iter_time=0.089, forward_time=0.207, loss_ctc=188.664, loss_att=206.489, acc=0.197, loss=201.142, backward_time=0.228, grad_norm=70.589, clip=100.000, loss_scale=2.847e+08, optim_step_time=0.074, optim0_lr0=3.750e-05, train_time=0.993, time=4 hours, 8 minutes and 34.41 seconds, total_count=30000, gpu_max_cached_mem_GB=36.488, [valid] loss_ctc=157.611, cer_ctc=0.761, loss_att=155.798, acc=0.181, cer=0.695, wer=1.000, loss=156.342, time=13 minutes and 37.86 seconds, total_count=4266, gpu_max_cached_mem_GB=36.488 +[ip-10-0-216-33:0/16] 2024-02-27 02:26:35,611 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-02-27 02:26:35,613 (trainer:290) INFO: 3/45epoch started. Estimated time to finish: 1 week, 20 hours and 2 minutes +[ip-10-0-216-33:0/16] 2024-02-27 02:26:35,620 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-27 02:27:11,863 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-27 02:27:20,162 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-27 02:27:20,162 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-02-27 02:27:20,167 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-27 02:41:13,923 (trainer:762) INFO: 3epoch:train:1-500batch: iter_time=0.826, forward_time=0.209, loss_ctc=135.945, loss_att=193.788, acc=0.222, loss=176.435, backward_time=0.236, grad_norm=57.775, clip=100.000, loss_scale=2.147e+09, optim_step_time=0.076, optim0_lr0=5.377e-05, train_time=1.756 +[ip-10-0-216-33:0/16] 2024-02-27 02:48:48,093 (trainer:762) INFO: 3epoch:train:501-1000batch: iter_time=1.616e-04, forward_time=0.207, loss_ctc=128.326, loss_att=188.941, acc=0.228, loss=170.757, backward_time=0.231, grad_norm=52.671, clip=100.000, loss_scale=2.147e+09, optim_step_time=0.076, optim0_lr0=6.127e-05, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-27 02:56:18,565 (trainer:762) INFO: 3epoch:train:1001-1500batch: iter_time=1.595e-04, forward_time=0.209, loss_ctc=136.890, loss_att=192.633, acc=0.229, loss=175.910, backward_time=0.235, grad_norm=60.697, clip=100.000, loss_scale=2.147e+09, optim_step_time=0.076, optim0_lr0=6.877e-05, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-02-27 03:03:44,536 (trainer:762) INFO: 3epoch:train:1501-2000batch: iter_time=1.589e-04, forward_time=0.209, loss_ctc=127.544, loss_att=187.574, acc=0.235, loss=169.565, backward_time=0.231, grad_norm=54.367, clip=100.000, loss_scale=2.147e+09, optim_step_time=0.076, optim0_lr0=7.627e-05, train_time=0.892 +[ip-10-0-216-33:0/16] 2024-02-27 03:11:13,737 (trainer:762) INFO: 3epoch:train:2001-2500batch: iter_time=1.620e-04, forward_time=0.209, loss_ctc=132.618, loss_att=194.409, acc=0.238, loss=175.872, backward_time=0.233, grad_norm=56.408, clip=100.000, loss_scale=4.295e+09, optim_step_time=0.077, optim0_lr0=8.377e-05, train_time=0.898 +[ip-10-0-216-33:0/16] 2024-02-27 03:18:43,505 (trainer:762) INFO: 3epoch:train:2501-3000batch: iter_time=1.600e-04, forward_time=0.209, loss_ctc=118.924, loss_att=177.962, acc=0.247, loss=160.251, backward_time=0.234, grad_norm=52.048, clip=100.000, loss_scale=4.295e+09, optim_step_time=0.076, optim0_lr0=9.127e-05, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-02-27 03:26:15,388 (trainer:762) INFO: 3epoch:train:3001-3500batch: iter_time=1.655e-04, forward_time=0.210, loss_ctc=125.405, loss_att=190.989, acc=0.255, loss=171.313, backward_time=0.230, grad_norm=56.934, clip=100.000, loss_scale=4.295e+09, optim_step_time=0.076, optim0_lr0=9.877e-05, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-27 03:33:43,188 (trainer:762) INFO: 3epoch:train:3501-4000batch: iter_time=1.567e-04, forward_time=0.208, loss_ctc=121.248, loss_att=174.649, acc=0.270, loss=158.629, backward_time=0.233, grad_norm=56.527, clip=100.000, loss_scale=4.295e+09, optim_step_time=0.076, optim0_lr0=1.063e-04, train_time=0.895 +[ip-10-0-216-33:0/16] 2024-02-27 03:41:14,821 (trainer:762) INFO: 3epoch:train:4001-4500batch: iter_time=1.580e-04, forward_time=0.209, loss_ctc=123.202, loss_att=171.310, acc=0.282, loss=156.878, backward_time=0.234, grad_norm=63.597, clip=100.000, loss_scale=8.590e+09, optim_step_time=0.076, optim0_lr0=1.138e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-27 03:48:45,888 (trainer:762) INFO: 3epoch:train:4501-5000batch: iter_time=1.564e-04, forward_time=0.209, loss_ctc=124.991, loss_att=172.746, acc=0.298, loss=158.420, backward_time=0.233, grad_norm=67.897, clip=100.000, loss_scale=8.590e+09, optim_step_time=0.076, optim0_lr0=1.213e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-02-27 03:48:51,600 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-27 03:49:28,552 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-27 03:49:36,441 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-27 03:49:36,441 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-02-27 03:49:36,446 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-27 04:03:51,603 (trainer:762) INFO: 3epoch:train:5001-5500batch: iter_time=0.858, forward_time=0.208, loss_ctc=121.894, loss_att=160.912, acc=0.307, loss=149.207, backward_time=0.230, grad_norm=69.924, clip=100.000, loss_scale=8.590e+09, optim_step_time=0.075, optim0_lr0=1.288e-04, train_time=1.811 +[ip-10-0-216-33:0/16] 2024-02-27 04:11:20,815 (trainer:762) INFO: 3epoch:train:5501-6000batch: iter_time=1.456e-04, forward_time=0.209, loss_ctc=113.492, loss_att=150.486, acc=0.330, loss=139.388, backward_time=0.232, grad_norm=64.511, clip=100.000, loss_scale=8.590e+09, optim_step_time=0.075, optim0_lr0=1.363e-04, train_time=0.898 +[ip-10-0-216-33:0/16] 2024-02-27 04:18:56,625 (trainer:762) INFO: 3epoch:train:6001-6500batch: iter_time=1.478e-04, forward_time=0.208, loss_ctc=121.604, loss_att=148.927, acc=0.343, loss=140.730, backward_time=0.231, grad_norm=69.876, clip=100.000, loss_scale=1.718e+10, optim_step_time=0.075, optim0_lr0=1.438e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-02-27 04:26:33,295 (trainer:762) INFO: 3epoch:train:6501-7000batch: iter_time=1.517e-04, forward_time=0.209, loss_ctc=111.241, loss_att=136.597, acc=0.371, loss=128.990, backward_time=0.233, grad_norm=63.932, clip=100.000, loss_scale=1.718e+10, optim_step_time=0.075, optim0_lr0=1.513e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-02-27 04:34:08,281 (trainer:762) INFO: 3epoch:train:7001-7500batch: iter_time=1.503e-04, forward_time=0.209, loss_ctc=114.210, loss_att=137.943, acc=0.390, loss=130.823, backward_time=0.231, grad_norm=69.867, clip=100.000, loss_scale=1.718e+10, optim_step_time=0.075, optim0_lr0=1.588e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-27 04:41:40,310 (trainer:762) INFO: 3epoch:train:7501-8000batch: iter_time=1.490e-04, forward_time=0.208, loss_ctc=102.651, loss_att=120.730, acc=0.405, loss=115.306, backward_time=0.233, grad_norm=64.007, clip=100.000, loss_scale=1.718e+10, optim_step_time=0.075, optim0_lr0=1.663e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-02-27 04:49:09,708 (trainer:762) INFO: 3epoch:train:8001-8500batch: iter_time=1.509e-04, forward_time=0.210, loss_ctc=107.805, loss_att=124.129, acc=0.434, loss=119.232, backward_time=0.230, grad_norm=67.397, clip=100.000, loss_scale=3.436e+10, optim_step_time=0.075, optim0_lr0=1.738e-04, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-02-27 04:56:38,541 (trainer:762) INFO: 3epoch:train:8501-9000batch: iter_time=1.493e-04, forward_time=0.208, loss_ctc=104.751, loss_att=114.477, acc=0.439, loss=111.559, backward_time=0.233, grad_norm=66.045, clip=100.000, loss_scale=3.436e+10, optim_step_time=0.075, optim0_lr0=1.813e-04, train_time=0.897 +[ip-10-0-216-33:0/16] 2024-02-27 05:04:10,795 (trainer:762) INFO: 3epoch:train:9001-9500batch: iter_time=1.486e-04, forward_time=0.210, loss_ctc=105.849, loss_att=111.652, acc=0.451, loss=109.911, backward_time=0.233, grad_norm=64.720, clip=100.000, loss_scale=3.436e+10, optim_step_time=0.075, optim0_lr0=1.888e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-02-27 05:11:43,972 (trainer:762) INFO: 3epoch:train:9501-10000batch: iter_time=1.495e-04, forward_time=0.209, loss_ctc=107.498, loss_att=113.025, acc=0.464, loss=111.367, backward_time=0.231, grad_norm=64.861, clip=100.000, loss_scale=3.436e+10, optim_step_time=0.075, optim0_lr0=1.963e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-02-27 05:11:48,750 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-27 05:12:26,339 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-27 05:12:34,270 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-27 05:12:34,270 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-02-27 05:12:34,275 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-27 05:26:48,665 (trainer:762) INFO: 3epoch:train:10001-10500batch: iter_time=0.870, forward_time=0.209, loss_ctc=104.848, loss_att=105.161, acc=0.468, loss=105.067, backward_time=0.232, grad_norm=60.487, clip=100.000, loss_scale=6.872e+10, optim_step_time=0.075, optim0_lr0=2.038e-04, train_time=1.809 +[ip-10-0-216-33:0/16] 2024-02-27 05:34:17,580 (trainer:762) INFO: 3epoch:train:10501-11000batch: iter_time=1.533e-04, forward_time=0.210, loss_ctc=97.527, loss_att=95.828, acc=0.494, loss=96.337, backward_time=0.236, grad_norm=54.260, clip=100.000, loss_scale=6.872e+10, optim_step_time=0.075, optim0_lr0=2.113e-04, train_time=0.898 +[ip-10-0-216-33:0/16] 2024-02-27 05:41:46,444 (trainer:762) INFO: 3epoch:train:11001-11500batch: iter_time=1.555e-04, forward_time=0.207, loss_ctc=105.042, loss_att=99.354, acc=0.492, loss=101.061, backward_time=0.229, grad_norm=59.958, clip=100.000, loss_scale=6.872e+10, optim_step_time=0.075, optim0_lr0=2.188e-04, train_time=0.897 +[ip-10-0-216-33:0/16] 2024-02-27 05:49:14,719 (trainer:762) INFO: 3epoch:train:11501-12000batch: iter_time=1.496e-04, forward_time=0.210, loss_ctc=96.238, loss_att=90.789, acc=0.515, loss=92.424, backward_time=0.229, grad_norm=51.190, clip=100.000, loss_scale=6.872e+10, optim_step_time=0.075, optim0_lr0=2.263e-04, train_time=0.896 +[ip-10-0-216-33:0/16] 2024-02-27 05:56:40,991 (trainer:762) INFO: 3epoch:train:12001-12500batch: iter_time=1.573e-04, forward_time=0.209, loss_ctc=99.778, loss_att=94.010, acc=0.527, loss=95.740, backward_time=0.229, grad_norm=54.650, clip=100.000, loss_scale=1.374e+11, optim_step_time=0.075, optim0_lr0=2.338e-04, train_time=0.892 +[ip-10-0-216-33:0/16] 2024-02-27 06:04:14,310 (trainer:762) INFO: 3epoch:train:12501-13000batch: iter_time=1.460e-04, forward_time=0.209, loss_ctc=89.097, loss_att=81.066, acc=0.540, loss=83.475, backward_time=0.231, grad_norm=49.938, clip=100.000, loss_scale=1.374e+11, optim_step_time=0.075, optim0_lr0=2.413e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-02-27 06:11:48,631 (trainer:762) INFO: 3epoch:train:13001-13500batch: iter_time=1.531e-04, forward_time=0.208, loss_ctc=94.148, loss_att=85.374, acc=0.559, loss=88.006, backward_time=0.234, grad_norm=52.360, clip=100.000, loss_scale=1.374e+11, optim_step_time=0.075, optim0_lr0=2.488e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-27 06:19:23,950 (trainer:762) INFO: 3epoch:train:13501-14000batch: iter_time=1.515e-04, forward_time=0.209, loss_ctc=91.811, loss_att=80.539, acc=0.555, loss=83.921, backward_time=0.233, grad_norm=49.335, clip=100.000, loss_scale=1.374e+11, optim_step_time=0.075, optim0_lr0=2.563e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-27 06:26:56,849 (trainer:762) INFO: 3epoch:train:14001-14500batch: iter_time=1.593e-04, forward_time=0.209, loss_ctc=94.035, loss_att=81.132, acc=0.557, loss=85.003, backward_time=0.236, grad_norm=52.272, clip=100.000, loss_scale=2.749e+11, optim_step_time=0.075, optim0_lr0=2.638e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-02-27 06:34:32,508 (trainer:762) INFO: 3epoch:train:14501-15000batch: iter_time=1.549e-04, forward_time=0.209, loss_ctc=95.925, loss_att=82.365, acc=0.564, loss=86.433, backward_time=0.231, grad_norm=52.475, clip=100.000, loss_scale=2.749e+11, optim_step_time=0.075, optim0_lr0=2.713e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-02-27 06:48:34,517 (trainer:361) INFO: 3epoch results: [train] iter_time=0.085, forward_time=0.209, loss_ctc=111.818, loss_att=135.317, acc=0.390, loss=128.267, backward_time=0.232, grad_norm=59.366, clip=100.000, loss_scale=5.469e+10, optim_step_time=0.076, optim0_lr0=1.625e-04, train_time=0.992, time=4 hours, 8 minutes and 15.84 seconds, total_count=45000, gpu_max_cached_mem_GB=36.488, [valid] loss_ctc=122.762, cer_ctc=0.641, loss_att=94.393, acc=0.368, cer=0.564, wer=1.000, loss=102.904, time=13 minutes and 42.76 seconds, total_count=6399, gpu_max_cached_mem_GB=36.488 +[ip-10-0-216-33:0/16] 2024-02-27 06:48:43,958 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-02-27 06:48:43,964 (trainer:290) INFO: 4/45epoch started. Estimated time to finish: 1 week, 15 hours and 35 minutes +[ip-10-0-216-33:0/16] 2024-02-27 06:48:43,971 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-27 06:49:19,869 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-27 06:49:28,426 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-27 06:49:28,426 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-02-27 06:49:28,431 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-27 07:03:19,395 (trainer:762) INFO: 4epoch:train:1-500batch: iter_time=0.838, forward_time=0.209, loss_ctc=89.908, loss_att=76.108, acc=0.581, loss=80.248, backward_time=0.231, grad_norm=47.409, clip=100.000, loss_scale=2.749e+11, optim_step_time=0.076, optim0_lr0=2.788e-04, train_time=1.751 +[ip-10-0-216-33:0/16] 2024-02-27 07:10:54,532 (trainer:762) INFO: 4epoch:train:501-1000batch: iter_time=1.619e-04, forward_time=0.208, loss_ctc=91.105, loss_att=76.163, acc=0.583, loss=80.645, backward_time=0.230, grad_norm=47.963, clip=100.000, loss_scale=2.749e+11, optim_step_time=0.076, optim0_lr0=2.863e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-27 07:18:30,465 (trainer:762) INFO: 4epoch:train:1001-1500batch: iter_time=1.577e-04, forward_time=0.209, loss_ctc=87.124, loss_att=72.397, acc=0.562, loss=76.815, backward_time=0.232, grad_norm=47.813, clip=100.000, loss_scale=5.498e+11, optim_step_time=0.077, optim0_lr0=2.938e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-27 07:26:01,344 (trainer:762) INFO: 4epoch:train:1501-2000batch: iter_time=1.586e-04, forward_time=0.209, loss_ctc=92.127, loss_att=74.093, acc=0.588, loss=79.504, backward_time=0.236, grad_norm=49.163, clip=100.000, loss_scale=5.498e+11, optim_step_time=0.076, optim0_lr0=3.013e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-02-27 07:33:38,071 (trainer:762) INFO: 4epoch:train:2001-2500batch: iter_time=1.631e-04, forward_time=0.208, loss_ctc=83.559, loss_att=67.890, acc=0.586, loss=72.591, backward_time=0.235, grad_norm=44.398, clip=100.000, loss_scale=5.498e+11, optim_step_time=0.077, optim0_lr0=3.088e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-02-27 07:41:12,036 (trainer:762) INFO: 4epoch:train:2501-3000batch: iter_time=1.622e-04, forward_time=0.210, loss_ctc=92.337, loss_att=73.519, acc=0.589, loss=79.164, backward_time=0.232, grad_norm=48.594, clip=100.000, loss_scale=5.498e+11, optim_step_time=0.077, optim0_lr0=3.163e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-27 07:48:45,675 (trainer:762) INFO: 4epoch:train:3001-3500batch: iter_time=1.652e-04, forward_time=0.208, loss_ctc=90.005, loss_att=71.692, acc=0.592, loss=77.186, backward_time=0.236, grad_norm=49.674, clip=100.000, loss_scale=1.100e+12, optim_step_time=0.076, optim0_lr0=3.238e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-02-27 07:56:17,666 (trainer:762) INFO: 4epoch:train:3501-4000batch: iter_time=1.619e-04, forward_time=0.209, loss_ctc=85.248, loss_att=68.407, acc=0.605, loss=73.459, backward_time=0.231, grad_norm=45.809, clip=100.000, loss_scale=1.100e+12, optim_step_time=0.076, optim0_lr0=3.313e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-02-27 08:03:52,552 (trainer:762) INFO: 4epoch:train:4001-4500batch: iter_time=1.608e-04, forward_time=0.209, loss_ctc=88.440, loss_att=69.571, acc=0.609, loss=75.232, backward_time=0.233, grad_norm=44.831, clip=100.000, loss_scale=1.100e+12, optim_step_time=0.076, optim0_lr0=3.388e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-27 08:11:22,675 (trainer:762) INFO: 4epoch:train:4501-5000batch: iter_time=1.637e-04, forward_time=0.209, loss_ctc=89.398, loss_att=69.407, acc=0.614, loss=75.404, backward_time=0.230, grad_norm=46.979, clip=100.000, loss_scale=1.100e+12, optim_step_time=0.077, optim0_lr0=3.463e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-02-27 08:11:27,356 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-27 08:12:03,891 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-27 08:12:11,898 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-27 08:12:11,898 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-02-27 08:12:11,903 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-27 08:26:27,989 (trainer:762) INFO: 4epoch:train:5001-5500batch: iter_time=0.859, forward_time=0.209, loss_ctc=83.607, loss_att=65.366, acc=0.622, loss=70.838, backward_time=0.231, grad_norm=43.176, clip=100.000, loss_scale=2.199e+12, optim_step_time=0.075, optim0_lr0=3.538e-04, train_time=1.810 +[ip-10-0-216-33:0/16] 2024-02-27 08:33:56,866 (trainer:762) INFO: 4epoch:train:5501-6000batch: iter_time=1.521e-04, forward_time=0.210, loss_ctc=85.301, loss_att=65.728, acc=0.623, loss=71.600, backward_time=0.238, grad_norm=44.177, clip=100.000, loss_scale=2.199e+12, optim_step_time=0.075, optim0_lr0=3.613e-04, train_time=0.898 +[ip-10-0-216-33:0/16] 2024-02-27 08:41:24,628 (trainer:762) INFO: 4epoch:train:6001-6500batch: iter_time=1.516e-04, forward_time=0.208, loss_ctc=81.639, loss_att=62.222, acc=0.604, loss=68.047, backward_time=0.234, grad_norm=46.103, clip=100.000, loss_scale=2.199e+12, optim_step_time=0.075, optim0_lr0=3.688e-04, train_time=0.895 +[ip-10-0-216-33:0/16] 2024-02-27 08:48:51,164 (trainer:762) INFO: 4epoch:train:6501-7000batch: iter_time=1.513e-04, forward_time=0.210, loss_ctc=86.938, loss_att=65.196, acc=0.624, loss=71.718, backward_time=0.230, grad_norm=42.860, clip=100.000, loss_scale=2.199e+12, optim_step_time=0.075, optim0_lr0=3.763e-04, train_time=0.893 +[ip-10-0-216-33:0/16] 2024-02-27 08:56:22,696 (trainer:762) INFO: 4epoch:train:7001-7500batch: iter_time=1.504e-04, forward_time=0.208, loss_ctc=79.158, loss_att=60.141, acc=0.621, loss=65.846, backward_time=0.235, grad_norm=41.406, clip=100.000, loss_scale=4.398e+12, optim_step_time=0.075, optim0_lr0=3.838e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-27 09:03:58,953 (trainer:762) INFO: 4epoch:train:7501-8000batch: iter_time=1.487e-04, forward_time=0.209, loss_ctc=87.307, loss_att=65.402, acc=0.623, loss=71.973, backward_time=0.235, grad_norm=46.315, clip=100.000, loss_scale=4.398e+12, optim_step_time=0.075, optim0_lr0=3.913e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-27 09:11:26,596 (trainer:762) INFO: 4epoch:train:8001-8500batch: iter_time=1.504e-04, forward_time=0.209, loss_ctc=84.155, loss_att=63.665, acc=0.626, loss=69.812, backward_time=0.226, grad_norm=43.179, clip=100.000, loss_scale=4.398e+12, optim_step_time=0.075, optim0_lr0=3.988e-04, train_time=0.895 +[ip-10-0-216-33:0/16] 2024-02-27 09:19:00,692 (trainer:762) INFO: 4epoch:train:8501-9000batch: iter_time=1.504e-04, forward_time=0.208, loss_ctc=80.802, loss_att=61.303, acc=0.636, loss=67.152, backward_time=0.225, grad_norm=42.446, clip=100.000, loss_scale=4.398e+12, optim_step_time=0.075, optim0_lr0=4.063e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-27 09:26:30,820 (trainer:762) INFO: 4epoch:train:9001-9500batch: iter_time=1.545e-04, forward_time=0.210, loss_ctc=83.775, loss_att=62.770, acc=0.638, loss=69.072, backward_time=0.233, grad_norm=42.727, clip=100.000, loss_scale=8.796e+12, optim_step_time=0.075, optim0_lr0=4.138e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-02-27 09:34:08,536 (trainer:762) INFO: 4epoch:train:9501-10000batch: iter_time=1.500e-04, forward_time=0.208, loss_ctc=85.144, loss_att=62.825, acc=0.641, loss=69.520, backward_time=0.224, grad_norm=43.320, clip=100.000, loss_scale=8.796e+12, optim_step_time=0.075, optim0_lr0=4.213e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-02-27 09:34:13,301 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-27 09:34:50,461 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-27 09:34:58,431 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-27 09:34:58,432 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-02-27 09:34:58,437 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-27 09:49:42,174 (trainer:762) INFO: 4epoch:train:10001-10500batch: iter_time=0.870, forward_time=0.209, loss_ctc=79.965, loss_att=59.814, acc=0.647, loss=65.859, backward_time=0.231, grad_norm=40.840, clip=100.000, loss_scale=8.796e+12, optim_step_time=0.075, optim0_lr0=4.288e-04, train_time=1.867 +[ip-10-0-216-33:0/16] 2024-02-27 09:57:24,148 (trainer:762) INFO: 4epoch:train:10501-11000batch: iter_time=1.492e-04, forward_time=0.209, loss_ctc=81.199, loss_att=60.307, acc=0.647, loss=66.575, backward_time=0.236, grad_norm=41.587, clip=100.000, loss_scale=8.796e+12, optim_step_time=0.075, optim0_lr0=4.363e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-02-27 10:04:54,705 (trainer:762) INFO: 4epoch:train:11001-11500batch: iter_time=1.587e-04, forward_time=0.208, loss_ctc=78.102, loss_att=57.441, acc=0.627, loss=63.639, backward_time=0.234, grad_norm=41.880, clip=100.000, loss_scale=1.759e+13, optim_step_time=0.076, optim0_lr0=4.438e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-02-27 10:12:30,589 (trainer:762) INFO: 4epoch:train:11501-12000batch: iter_time=1.512e-04, forward_time=0.210, loss_ctc=83.247, loss_att=60.272, acc=0.645, loss=67.165, backward_time=0.231, grad_norm=41.062, clip=100.000, loss_scale=1.759e+13, optim_step_time=0.075, optim0_lr0=4.513e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-27 10:20:03,359 (trainer:762) INFO: 4epoch:train:12001-12500batch: iter_time=1.574e-04, forward_time=0.208, loss_ctc=75.921, loss_att=55.904, acc=0.641, loss=61.909, backward_time=0.232, grad_norm=39.086, clip=100.000, loss_scale=1.759e+13, optim_step_time=0.075, optim0_lr0=4.588e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-27 10:27:39,323 (trainer:762) INFO: 4epoch:train:12501-13000batch: iter_time=1.506e-04, forward_time=0.210, loss_ctc=83.684, loss_att=61.040, acc=0.641, loss=67.834, backward_time=0.242, grad_norm=43.179, clip=100.000, loss_scale=1.759e+13, optim_step_time=0.075, optim0_lr0=4.663e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-27 10:35:13,836 (trainer:762) INFO: 4epoch:train:13001-13500batch: iter_time=1.480e-04, forward_time=0.208, loss_ctc=81.368, loss_att=59.631, acc=0.644, loss=66.152, backward_time=0.235, grad_norm=42.340, clip=100.000, loss_scale=3.518e+13, optim_step_time=0.075, optim0_lr0=4.738e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-27 10:42:47,918 (trainer:762) INFO: 4epoch:train:13501-14000batch: iter_time=1.479e-04, forward_time=0.209, loss_ctc=77.680, loss_att=57.072, acc=0.655, loss=63.254, backward_time=0.235, grad_norm=42.128, clip=100.000, loss_scale=3.518e+13, optim_step_time=0.075, optim0_lr0=4.813e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-27 10:50:21,924 (trainer:762) INFO: 4epoch:train:14001-14500batch: iter_time=1.538e-04, forward_time=0.210, loss_ctc=80.874, loss_att=59.017, acc=0.655, loss=65.574, backward_time=0.238, grad_norm=39.740, clip=100.000, loss_scale=3.518e+13, optim_step_time=0.075, optim0_lr0=4.888e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-27 10:57:55,813 (trainer:762) INFO: 4epoch:train:14501-15000batch: iter_time=1.558e-04, forward_time=0.209, loss_ctc=82.264, loss_att=59.042, acc=0.658, loss=66.009, backward_time=0.234, grad_norm=41.081, clip=100.000, loss_scale=3.518e+13, optim_step_time=0.075, optim0_lr0=4.963e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-27 11:11:55,600 (trainer:361) INFO: 4epoch results: [train] iter_time=0.086, forward_time=0.209, loss_ctc=84.379, loss_att=64.780, acc=0.621, loss=70.660, backward_time=0.233, grad_norm=44.042, clip=100.000, loss_scale=9.328e+12, optim_step_time=0.076, optim0_lr0=3.875e-04, train_time=0.997, time=4 hours, 9 minutes and 29.05 seconds, total_count=60000, gpu_max_cached_mem_GB=36.488, [valid] loss_ctc=105.137, cer_ctc=0.547, loss_att=71.042, acc=0.482, cer=0.478, wer=1.000, loss=81.270, time=13 minutes and 42.29 seconds, total_count=8532, gpu_max_cached_mem_GB=36.488 +[ip-10-0-216-33:0/16] 2024-02-27 11:12:04,705 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-02-27 11:12:04,710 (trainer:290) INFO: 5/45epoch started. Estimated time to finish: 1 week, 11 hours and 27 minutes +[ip-10-0-216-33:0/16] 2024-02-27 11:12:04,718 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-27 11:12:40,711 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-27 11:12:49,091 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-27 11:12:49,092 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-02-27 11:12:49,096 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-27 11:26:47,490 (trainer:762) INFO: 5epoch:train:1-500batch: iter_time=0.842, forward_time=0.209, loss_ctc=78.099, loss_att=57.223, acc=0.649, loss=63.486, backward_time=0.235, grad_norm=39.053, clip=100.000, loss_scale=7.037e+13, optim_step_time=0.076, optim0_lr0=4.990e-04, train_time=1.765 +[ip-10-0-216-33:0/16] 2024-02-27 11:34:24,900 (trainer:762) INFO: 5epoch:train:501-1000batch: iter_time=1.574e-04, forward_time=0.209, loss_ctc=81.470, loss_att=59.317, acc=0.653, loss=65.963, backward_time=0.237, grad_norm=40.972, clip=100.000, loss_scale=7.037e+13, optim_step_time=0.077, optim0_lr0=4.969e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-02-27 11:42:04,851 (trainer:762) INFO: 5epoch:train:1001-1500batch: iter_time=1.622e-04, forward_time=0.209, loss_ctc=75.352, loss_att=54.506, acc=0.665, loss=60.760, backward_time=0.236, grad_norm=36.750, clip=100.000, loss_scale=7.037e+13, optim_step_time=0.076, optim0_lr0=4.949e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-02-27 11:49:35,868 (trainer:762) INFO: 5epoch:train:1501-2000batch: iter_time=1.559e-04, forward_time=0.209, loss_ctc=76.724, loss_att=55.314, acc=0.664, loss=61.737, backward_time=0.234, grad_norm=37.156, clip=100.000, loss_scale=7.037e+13, optim_step_time=0.076, optim0_lr0=4.929e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-02-27 11:57:11,605 (trainer:762) INFO: 5epoch:train:2001-2500batch: iter_time=1.596e-04, forward_time=0.208, loss_ctc=76.989, loss_att=55.658, acc=0.659, loss=62.057, backward_time=0.232, grad_norm=38.117, clip=100.000, loss_scale=1.407e+14, optim_step_time=0.076, optim0_lr0=4.909e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-02-27 12:04:46,983 (trainer:762) INFO: 5epoch:train:2501-3000batch: iter_time=1.605e-04, forward_time=0.209, loss_ctc=78.420, loss_att=55.917, acc=0.657, loss=62.668, backward_time=0.234, grad_norm=39.851, clip=100.000, loss_scale=1.407e+14, optim_step_time=0.076, optim0_lr0=4.889e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-27 12:12:25,549 (trainer:762) INFO: 5epoch:train:3001-3500batch: iter_time=1.553e-04, forward_time=0.209, loss_ctc=78.645, loss_att=55.853, acc=0.674, loss=62.690, backward_time=0.231, grad_norm=37.075, clip=100.000, loss_scale=1.407e+14, optim_step_time=0.076, optim0_lr0=4.870e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-02-27 12:20:01,183 (trainer:762) INFO: 5epoch:train:3501-4000batch: iter_time=1.587e-04, forward_time=0.208, loss_ctc=76.079, loss_att=54.128, acc=0.661, loss=60.714, backward_time=0.235, grad_norm=36.804, clip=100.000, loss_scale=1.407e+14, optim_step_time=0.076, optim0_lr0=4.851e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-02-27 12:27:39,654 (trainer:762) INFO: 5epoch:train:4001-4500batch: iter_time=1.593e-04, forward_time=0.210, loss_ctc=77.331, loss_att=54.769, acc=0.669, loss=61.537, backward_time=0.235, grad_norm=37.711, clip=100.000, loss_scale=2.815e+14, optim_step_time=0.076, optim0_lr0=4.832e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-02-27 12:35:13,140 (trainer:762) INFO: 5epoch:train:4501-5000batch: iter_time=1.565e-04, forward_time=0.208, loss_ctc=78.513, loss_att=55.151, acc=0.659, loss=62.159, backward_time=0.236, grad_norm=37.515, clip=100.000, loss_scale=2.815e+14, optim_step_time=0.076, optim0_lr0=4.813e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-02-27 12:35:17,971 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-27 12:35:54,485 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-27 12:36:03,208 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-27 12:36:03,209 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-02-27 12:36:03,213 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-27 12:50:19,737 (trainer:762) INFO: 5epoch:train:5001-5500batch: iter_time=0.862, forward_time=0.208, loss_ctc=71.945, loss_att=51.314, acc=0.677, loss=57.503, backward_time=0.231, grad_norm=34.374, clip=100.000, loss_scale=2.815e+14, optim_step_time=0.075, optim0_lr0=4.795e-04, train_time=1.813 +[ip-10-0-216-33:0/16] 2024-02-27 12:57:45,762 (trainer:762) INFO: 5epoch:train:5501-6000batch: iter_time=1.489e-04, forward_time=0.209, loss_ctc=74.887, loss_att=53.196, acc=0.681, loss=59.703, backward_time=0.234, grad_norm=35.571, clip=100.000, loss_scale=2.815e+14, optim_step_time=0.075, optim0_lr0=4.776e-04, train_time=0.892 +[ip-10-0-216-33:0/16] 2024-02-27 13:05:23,737 (trainer:762) INFO: 5epoch:train:6001-6500batch: iter_time=1.485e-04, forward_time=0.208, loss_ctc=69.972, loss_att=49.428, acc=0.690, loss=55.591, backward_time=0.236, grad_norm=33.388, clip=100.000, loss_scale=5.629e+14, optim_step_time=0.075, optim0_lr0=4.758e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-02-27 13:12:56,462 (trainer:762) INFO: 5epoch:train:6501-7000batch: iter_time=1.454e-04, forward_time=0.209, loss_ctc=71.738, loss_att=50.553, acc=0.689, loss=56.908, backward_time=0.236, grad_norm=33.246, clip=100.000, loss_scale=5.629e+14, optim_step_time=0.075, optim0_lr0=4.740e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-27 13:20:31,269 (trainer:762) INFO: 5epoch:train:7001-7500batch: iter_time=1.525e-04, forward_time=0.208, loss_ctc=71.918, loss_att=50.815, acc=0.685, loss=57.146, backward_time=0.237, grad_norm=34.711, clip=100.000, loss_scale=5.629e+14, optim_step_time=0.075, optim0_lr0=4.723e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-27 13:28:07,320 (trainer:762) INFO: 5epoch:train:7501-8000batch: iter_time=1.455e-04, forward_time=0.208, loss_ctc=73.038, loss_att=50.965, acc=0.682, loss=57.587, backward_time=0.237, grad_norm=33.418, clip=100.000, loss_scale=5.629e+14, optim_step_time=0.075, optim0_lr0=4.705e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-27 13:35:37,698 (trainer:762) INFO: 5epoch:train:8001-8500batch: iter_time=1.480e-04, forward_time=0.210, loss_ctc=73.700, loss_att=51.441, acc=0.696, loss=58.118, backward_time=0.237, grad_norm=33.600, clip=100.000, loss_scale=1.126e+15, optim_step_time=0.075, optim0_lr0=4.688e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-02-27 13:43:12,690 (trainer:762) INFO: 5epoch:train:8501-9000batch: iter_time=1.476e-04, forward_time=0.207, loss_ctc=70.588, loss_att=49.394, acc=0.685, loss=55.752, backward_time=0.237, grad_norm=33.395, clip=100.000, loss_scale=1.126e+15, optim_step_time=0.075, optim0_lr0=4.671e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-27 13:50:51,586 (trainer:762) INFO: 5epoch:train:9001-9500batch: iter_time=1.498e-04, forward_time=0.209, loss_ctc=72.179, loss_att=50.129, acc=0.691, loss=56.744, backward_time=0.237, grad_norm=34.855, clip=100.000, loss_scale=1.126e+15, optim_step_time=0.075, optim0_lr0=4.654e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-02-27 13:58:20,527 (trainer:762) INFO: 5epoch:train:9501-10000batch: iter_time=1.435e-04, forward_time=0.208, loss_ctc=73.841, loss_att=50.899, acc=0.681, loss=57.781, backward_time=0.232, grad_norm=34.180, clip=100.000, loss_scale=1.126e+15, optim_step_time=0.075, optim0_lr0=4.637e-04, train_time=0.898 +[ip-10-0-216-33:0/16] 2024-02-27 13:58:25,577 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-27 13:59:03,193 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-27 13:59:11,421 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-27 13:59:11,422 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-02-27 13:59:11,426 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-27 14:15:57,202 (trainer:762) INFO: 5epoch:train:10001-10500batch: iter_time=1.173, forward_time=0.209, loss_ctc=67.739, loss_att=47.632, acc=0.697, loss=53.664, backward_time=0.231, grad_norm=35.429, clip=100.000, loss_scale=2.252e+15, optim_step_time=0.075, optim0_lr0=4.621e-04, train_time=2.113 +[ip-10-0-216-33:0/16] 2024-02-27 14:23:31,557 (trainer:762) INFO: 5epoch:train:10501-11000batch: iter_time=1.448e-04, forward_time=0.210, loss_ctc=70.352, loss_att=49.517, acc=0.700, loss=55.767, backward_time=0.236, grad_norm=32.618, clip=100.000, loss_scale=2.252e+15, optim_step_time=0.075, optim0_lr0=4.604e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-27 14:30:57,929 (trainer:762) INFO: 5epoch:train:11001-11500batch: iter_time=1.461e-04, forward_time=0.209, loss_ctc=66.046, loss_att=46.009, acc=0.709, loss=52.020, backward_time=0.235, grad_norm=30.961, clip=100.000, loss_scale=2.252e+15, optim_step_time=0.075, optim0_lr0=4.588e-04, train_time=0.892 +[ip-10-0-216-33:0/16] 2024-02-27 14:38:31,329 (trainer:762) INFO: 5epoch:train:11501-12000batch: iter_time=1.475e-04, forward_time=0.210, loss_ctc=67.486, loss_att=47.029, acc=0.707, loss=53.166, backward_time=0.233, grad_norm=31.119, clip=100.000, loss_scale=2.252e+15, optim_step_time=0.075, optim0_lr0=4.572e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-02-27 14:45:57,128 (trainer:762) INFO: 5epoch:train:12001-12500batch: iter_time=1.483e-04, forward_time=0.209, loss_ctc=68.263, loss_att=47.686, acc=0.702, loss=53.859, backward_time=0.231, grad_norm=33.056, clip=100.000, loss_scale=4.504e+15, optim_step_time=0.075, optim0_lr0=4.556e-04, train_time=0.891 +[ip-10-0-216-33:0/16] 2024-02-27 14:53:31,370 (trainer:762) INFO: 5epoch:train:12501-13000batch: iter_time=1.474e-04, forward_time=0.209, loss_ctc=69.519, loss_att=47.872, acc=0.698, loss=54.366, backward_time=0.232, grad_norm=31.249, clip=100.000, loss_scale=4.504e+15, optim_step_time=0.075, optim0_lr0=4.541e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-27 15:01:06,605 (trainer:762) INFO: 5epoch:train:13001-13500batch: iter_time=1.480e-04, forward_time=0.209, loss_ctc=70.029, loss_att=48.234, acc=0.712, loss=54.772, backward_time=0.237, grad_norm=31.055, clip=100.000, loss_scale=4.504e+15, optim_step_time=0.075, optim0_lr0=4.525e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-27 15:08:43,091 (trainer:762) INFO: 5epoch:train:13501-14000batch: iter_time=1.515e-04, forward_time=0.210, loss_ctc=67.284, loss_att=46.431, acc=0.701, loss=52.687, backward_time=0.240, grad_norm=31.950, clip=100.000, loss_scale=4.504e+15, optim_step_time=0.075, optim0_lr0=4.510e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-02-27 15:16:13,577 (trainer:762) INFO: 5epoch:train:14001-14500batch: iter_time=1.565e-04, forward_time=0.209, loss_ctc=68.770, loss_att=47.324, acc=0.707, loss=53.757, backward_time=0.237, grad_norm=33.846, clip=100.000, loss_scale=9.007e+15, optim_step_time=0.075, optim0_lr0=4.495e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-02-27 15:23:48,672 (trainer:762) INFO: 5epoch:train:14501-15000batch: iter_time=1.540e-04, forward_time=0.210, loss_ctc=70.077, loss_att=47.984, acc=0.697, loss=54.612, backward_time=0.239, grad_norm=33.259, clip=100.000, loss_scale=9.007e+15, optim_step_time=0.075, optim0_lr0=4.480e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-27 15:37:51,128 (trainer:361) INFO: 5epoch results: [train] iter_time=0.096, forward_time=0.209, loss_ctc=72.900, loss_att=51.390, acc=0.683, loss=57.843, backward_time=0.235, grad_norm=34.876, clip=100.000, loss_scale=1.792e+15, optim_step_time=0.076, optim0_lr0=4.721e-04, train_time=1.007, time=4 hours, 11 minutes and 59.87 seconds, total_count=75000, gpu_max_cached_mem_GB=36.488, [valid] loss_ctc=83.757, cer_ctc=0.462, loss_att=52.534, acc=0.573, cer=0.420, wer=1.000, loss=61.901, time=13 minutes and 46.25 seconds, total_count=10665, gpu_max_cached_mem_GB=36.488 +[ip-10-0-216-33:0/16] 2024-02-27 15:38:00,792 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-02-27 15:38:00,799 (trainer:290) INFO: 6/45epoch started. Estimated time to finish: 1 week, 7 hours and 38 minutes +[ip-10-0-216-33:0/16] 2024-02-27 15:38:00,807 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-27 15:38:37,182 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-27 15:38:45,275 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-27 15:38:45,275 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-02-27 15:38:45,279 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-27 15:52:45,687 (trainer:762) INFO: 6epoch:train:1-500batch: iter_time=0.845, forward_time=0.208, loss_ctc=67.586, loss_att=46.720, acc=0.705, loss=52.980, backward_time=0.231, grad_norm=33.341, clip=100.000, loss_scale=9.007e+15, optim_step_time=0.076, optim0_lr0=4.465e-04, train_time=1.769 +[ip-10-0-216-33:0/16] 2024-02-27 16:00:23,649 (trainer:762) INFO: 6epoch:train:501-1000batch: iter_time=1.580e-04, forward_time=0.209, loss_ctc=70.295, loss_att=48.599, acc=0.707, loss=55.108, backward_time=0.232, grad_norm=32.098, clip=100.000, loss_scale=9.007e+15, optim_step_time=0.076, optim0_lr0=4.450e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-02-27 16:08:01,595 (trainer:762) INFO: 6epoch:train:1001-1500batch: iter_time=1.583e-04, forward_time=0.209, loss_ctc=68.225, loss_att=46.862, acc=0.704, loss=53.271, backward_time=0.224, grad_norm=32.100, clip=100.000, loss_scale=1.801e+16, optim_step_time=0.076, optim0_lr0=4.435e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-02-27 16:15:37,945 (trainer:762) INFO: 6epoch:train:1501-2000batch: iter_time=1.614e-04, forward_time=0.208, loss_ctc=63.373, loss_att=43.921, acc=0.725, loss=49.757, backward_time=0.232, grad_norm=29.323, clip=100.000, loss_scale=1.801e+16, optim_step_time=0.076, optim0_lr0=4.421e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-27 16:23:14,089 (trainer:762) INFO: 6epoch:train:2001-2500batch: iter_time=1.565e-04, forward_time=0.209, loss_ctc=61.601, loss_att=43.094, acc=0.723, loss=48.647, backward_time=0.232, grad_norm=30.103, clip=100.000, loss_scale=1.801e+16, optim_step_time=0.076, optim0_lr0=4.406e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-27 16:30:50,219 (trainer:762) INFO: 6epoch:train:2501-3000batch: iter_time=1.576e-04, forward_time=0.209, loss_ctc=65.737, loss_att=45.271, acc=0.712, loss=51.411, backward_time=0.236, grad_norm=30.929, clip=100.000, loss_scale=1.801e+16, optim_step_time=0.076, optim0_lr0=4.392e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-27 16:38:29,122 (trainer:762) INFO: 6epoch:train:3001-3500batch: iter_time=1.471e-04, forward_time=0.208, loss_ctc=64.790, loss_att=44.976, acc=0.718, loss=50.920, backward_time=0.232, grad_norm=31.579, clip=100.000, loss_scale=3.603e+16, optim_step_time=0.076, optim0_lr0=4.378e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-02-27 16:46:01,558 (trainer:762) INFO: 6epoch:train:3501-4000batch: iter_time=1.464e-04, forward_time=0.209, loss_ctc=68.739, loss_att=46.938, acc=0.718, loss=53.478, backward_time=0.237, grad_norm=31.106, clip=100.000, loss_scale=3.603e+16, optim_step_time=0.075, optim0_lr0=4.364e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-27 16:53:34,499 (trainer:762) INFO: 6epoch:train:4001-4500batch: iter_time=1.471e-04, forward_time=0.208, loss_ctc=61.897, loss_att=42.818, acc=0.724, loss=48.542, backward_time=0.237, grad_norm=30.098, clip=100.000, loss_scale=3.603e+16, optim_step_time=0.075, optim0_lr0=4.351e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-02-27 17:01:20,979 (trainer:762) INFO: 6epoch:train:4501-5000batch: iter_time=0.003, forward_time=0.209, loss_ctc=64.274, loss_att=44.586, acc=0.725, loss=50.493, backward_time=0.235, grad_norm=32.178, clip=100.000, loss_scale=3.603e+16, optim_step_time=0.075, optim0_lr0=4.337e-04, train_time=0.933 +[ip-10-0-216-33:0/16] 2024-02-27 17:01:25,688 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-27 17:02:02,483 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-27 17:02:10,505 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-27 17:02:10,506 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-02-27 17:02:10,510 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-27 17:17:00,706 (trainer:762) INFO: 6epoch:train:5001-5500batch: iter_time=0.888, forward_time=0.209, loss_ctc=65.050, loss_att=44.363, acc=0.718, loss=50.569, backward_time=0.239, grad_norm=32.010, clip=100.000, loss_scale=7.206e+16, optim_step_time=0.075, optim0_lr0=4.323e-04, train_time=1.879 +[ip-10-0-216-33:0/16] 2024-02-27 17:24:40,574 (trainer:762) INFO: 6epoch:train:5501-6000batch: iter_time=1.535e-04, forward_time=0.209, loss_ctc=67.182, loss_att=46.296, acc=0.719, loss=52.562, backward_time=0.233, grad_norm=32.510, clip=100.000, loss_scale=7.206e+16, optim_step_time=0.075, optim0_lr0=4.310e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-02-27 17:32:08,392 (trainer:762) INFO: 6epoch:train:6001-6500batch: iter_time=1.501e-04, forward_time=0.208, loss_ctc=64.818, loss_att=44.459, acc=0.717, loss=50.567, backward_time=0.237, grad_norm=31.276, clip=100.000, loss_scale=7.206e+16, optim_step_time=0.075, optim0_lr0=4.297e-04, train_time=0.895 +[ip-10-0-216-33:0/16] 2024-02-27 17:39:48,309 (trainer:762) INFO: 6epoch:train:6501-7000batch: iter_time=1.505e-04, forward_time=0.209, loss_ctc=61.070, loss_att=42.062, acc=0.735, loss=47.764, backward_time=0.237, grad_norm=35.136, clip=100.000, loss_scale=7.206e+16, optim_step_time=0.075, optim0_lr0=4.283e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-02-27 17:47:18,171 (trainer:762) INFO: 6epoch:train:7001-7500batch: iter_time=1.489e-04, forward_time=0.209, loss_ctc=59.201, loss_att=41.252, acc=0.734, loss=46.636, backward_time=0.232, grad_norm=28.837, clip=100.000, loss_scale=1.441e+17, optim_step_time=0.075, optim0_lr0=4.270e-04, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-02-27 17:54:50,177 (trainer:762) INFO: 6epoch:train:7501-8000batch: iter_time=1.515e-04, forward_time=0.210, loss_ctc=63.383, loss_att=43.480, acc=0.722, loss=49.451, backward_time=0.240, grad_norm=30.185, clip=100.000, loss_scale=1.441e+17, optim_step_time=0.075, optim0_lr0=4.258e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-02-27 18:02:29,623 (trainer:762) INFO: 6epoch:train:8001-8500batch: iter_time=1.476e-04, forward_time=0.208, loss_ctc=62.483, loss_att=43.059, acc=0.728, loss=48.886, backward_time=0.232, grad_norm=30.214, clip=100.000, loss_scale=1.441e+17, optim_step_time=0.075, optim0_lr0=4.245e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-02-27 18:09:58,035 (trainer:762) INFO: 6epoch:train:8501-9000batch: iter_time=1.477e-04, forward_time=0.210, loss_ctc=66.005, loss_att=45.109, acc=0.728, loss=51.378, backward_time=0.235, grad_norm=32.121, clip=100.000, loss_scale=1.441e+17, optim_step_time=0.075, optim0_lr0=4.232e-04, train_time=0.897 +[ip-10-0-216-33:0/16] 2024-02-27 18:17:34,801 (trainer:762) INFO: 6epoch:train:9001-9500batch: iter_time=1.494e-04, forward_time=0.208, loss_ctc=59.733, loss_att=41.128, acc=0.733, loss=46.709, backward_time=0.237, grad_norm=31.719, clip=100.000, loss_scale=2.882e+17, optim_step_time=0.075, optim0_lr0=4.219e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-02-27 18:25:09,337 (trainer:762) INFO: 6epoch:train:9501-10000batch: iter_time=1.470e-04, forward_time=0.209, loss_ctc=62.050, loss_att=42.895, acc=0.735, loss=48.642, backward_time=0.241, grad_norm=30.371, clip=100.000, loss_scale=2.882e+17, optim_step_time=0.075, optim0_lr0=4.207e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-27 18:25:14,560 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-27 18:25:51,816 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-27 18:26:00,018 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-27 18:26:00,018 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-02-27 18:26:00,023 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-27 18:40:36,691 (trainer:762) INFO: 6epoch:train:10001-10500batch: iter_time=0.906, forward_time=0.209, loss_ctc=62.941, loss_att=42.752, acc=0.727, loss=48.809, backward_time=0.233, grad_norm=31.245, clip=100.000, loss_scale=2.882e+17, optim_step_time=0.075, optim0_lr0=4.195e-04, train_time=1.854 +[ip-10-0-216-33:0/16] 2024-02-27 18:48:14,742 (trainer:762) INFO: 6epoch:train:10501-11000batch: iter_time=1.533e-04, forward_time=0.209, loss_ctc=64.964, loss_att=44.693, acc=0.729, loss=50.774, backward_time=0.235, grad_norm=31.216, clip=100.000, loss_scale=2.882e+17, optim_step_time=0.075, optim0_lr0=4.182e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-02-27 18:55:49,684 (trainer:762) INFO: 6epoch:train:11001-11500batch: iter_time=1.575e-04, forward_time=0.210, loss_ctc=62.915, loss_att=43.124, acc=0.726, loss=49.061, backward_time=0.234, grad_norm=30.338, clip=100.000, loss_scale=5.765e+17, optim_step_time=0.075, optim0_lr0=4.170e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-27 19:03:23,605 (trainer:762) INFO: 6epoch:train:11501-12000batch: iter_time=1.470e-04, forward_time=0.209, loss_ctc=58.921, loss_att=40.451, acc=0.745, loss=45.992, backward_time=0.237, grad_norm=28.504, clip=100.000, loss_scale=5.765e+17, optim_step_time=0.075, optim0_lr0=4.158e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-27 19:11:01,160 (trainer:762) INFO: 6epoch:train:12001-12500batch: iter_time=1.536e-04, forward_time=0.210, loss_ctc=57.176, loss_att=39.822, acc=0.742, loss=45.028, backward_time=0.238, grad_norm=29.198, clip=100.000, loss_scale=5.765e+17, optim_step_time=0.075, optim0_lr0=4.146e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-02-27 19:18:32,432 (trainer:762) INFO: 6epoch:train:12501-13000batch: iter_time=1.523e-04, forward_time=0.208, loss_ctc=61.335, loss_att=41.977, acc=0.731, loss=47.784, backward_time=0.241, grad_norm=30.590, clip=100.000, loss_scale=5.765e+17, optim_step_time=0.075, optim0_lr0=4.134e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-02-27 19:26:11,918 (trainer:762) INFO: 6epoch:train:13001-13500batch: iter_time=1.573e-04, forward_time=0.210, loss_ctc=60.353, loss_att=41.645, acc=0.738, loss=47.257, backward_time=0.230, grad_norm=29.761, clip=100.000, loss_scale=1.153e+18, optim_step_time=0.075, optim0_lr0=4.123e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-02-27 19:33:43,453 (trainer:762) INFO: 6epoch:train:13501-14000batch: iter_time=1.522e-04, forward_time=0.209, loss_ctc=64.273, loss_att=43.639, acc=0.736, loss=49.829, backward_time=0.235, grad_norm=30.641, clip=100.000, loss_scale=1.153e+18, optim_step_time=0.075, optim0_lr0=4.111e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-27 19:41:18,927 (trainer:762) INFO: 6epoch:train:14001-14500batch: iter_time=1.514e-04, forward_time=0.210, loss_ctc=57.865, loss_att=39.840, acc=0.740, loss=45.248, backward_time=0.236, grad_norm=28.762, clip=100.000, loss_scale=1.153e+18, optim_step_time=0.075, optim0_lr0=4.100e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-02-27 19:48:47,743 (trainer:762) INFO: 6epoch:train:14501-15000batch: iter_time=1.483e-04, forward_time=0.209, loss_ctc=59.996, loss_att=41.406, acc=0.743, loss=46.983, backward_time=0.240, grad_norm=29.913, clip=100.000, loss_scale=1.153e+18, optim_step_time=0.075, optim0_lr0=4.088e-04, train_time=0.897 +[ip-10-0-216-33:0/16] 2024-02-27 20:07:33,786 (trainer:361) INFO: 6epoch results: [train] iter_time=0.088, forward_time=0.209, loss_ctc=63.274, loss_att=43.575, acc=0.726, loss=49.484, backward_time=0.235, grad_norm=30.913, clip=100.000, loss_scale=3.056e+17, optim_step_time=0.075, optim0_lr0=4.268e-04, train_time=1.003, time=4 hours, 11 minutes and 2.93 seconds, total_count=90000, gpu_max_cached_mem_GB=36.488, [valid] loss_ctc=69.057, cer_ctc=0.374, loss_att=47.036, acc=0.613, cer=0.398, wer=1.000, loss=53.642, time=18 minutes and 29.77 seconds, total_count=12798, gpu_max_cached_mem_GB=36.488 +[ip-10-0-216-33:0/16] 2024-02-27 20:07:43,212 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-02-27 20:07:43,231 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/1epoch.pth +[ip-10-0-216-33:0/16] 2024-02-27 20:07:43,232 (trainer:290) INFO: 7/45epoch started. Estimated time to finish: 1 week, 4 hours and 3 minutes +[ip-10-0-216-33:0/16] 2024-02-27 20:07:43,240 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-27 20:08:19,191 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-27 20:08:27,606 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-27 20:08:27,606 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-02-27 20:08:27,611 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-27 20:22:23,156 (trainer:762) INFO: 7epoch:train:1-500batch: iter_time=0.839, forward_time=0.210, loss_ctc=62.995, loss_att=43.472, acc=0.738, loss=49.329, backward_time=0.232, grad_norm=39.714, clip=100.000, loss_scale=2.306e+18, optim_step_time=0.075, optim0_lr0=4.077e-04, train_time=1.760 +[ip-10-0-216-33:0/16] 2024-02-27 20:30:00,005 (trainer:762) INFO: 7epoch:train:501-1000batch: iter_time=1.529e-04, forward_time=0.208, loss_ctc=60.657, loss_att=41.588, acc=0.744, loss=47.309, backward_time=0.237, grad_norm=29.663, clip=100.000, loss_scale=2.306e+18, optim_step_time=0.075, optim0_lr0=4.066e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-02-27 20:37:39,562 (trainer:762) INFO: 7epoch:train:1001-1500batch: iter_time=1.538e-04, forward_time=0.210, loss_ctc=57.001, loss_att=39.554, acc=0.756, loss=44.788, backward_time=0.240, grad_norm=28.101, clip=100.000, loss_scale=2.306e+18, optim_step_time=0.075, optim0_lr0=4.054e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-02-27 20:45:13,804 (trainer:762) INFO: 7epoch:train:1501-2000batch: iter_time=1.519e-04, forward_time=0.209, loss_ctc=57.672, loss_att=40.320, acc=0.746, loss=45.526, backward_time=0.238, grad_norm=28.277, clip=100.000, loss_scale=2.306e+18, optim_step_time=0.075, optim0_lr0=4.043e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-27 20:52:50,037 (trainer:762) INFO: 7epoch:train:2001-2500batch: iter_time=1.504e-04, forward_time=0.208, loss_ctc=63.232, loss_att=43.016, acc=0.738, loss=49.081, backward_time=0.237, grad_norm=33.016, clip=100.000, loss_scale=4.612e+18, optim_step_time=0.075, optim0_lr0=4.032e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-27 21:00:26,890 (trainer:762) INFO: 7epoch:train:2501-3000batch: iter_time=1.514e-04, forward_time=0.209, loss_ctc=58.012, loss_att=39.809, acc=0.744, loss=45.270, backward_time=0.239, grad_norm=29.078, clip=100.000, loss_scale=4.612e+18, optim_step_time=0.075, optim0_lr0=4.021e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-02-27 21:08:07,561 (trainer:762) INFO: 7epoch:train:3001-3500batch: iter_time=0.004, forward_time=0.209, loss_ctc=60.592, loss_att=41.807, acc=0.737, loss=47.443, backward_time=0.240, grad_norm=30.753, clip=100.000, loss_scale=4.612e+18, optim_step_time=0.075, optim0_lr0=4.011e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-02-27 21:15:42,499 (trainer:762) INFO: 7epoch:train:3501-4000batch: iter_time=1.508e-04, forward_time=0.208, loss_ctc=62.557, loss_att=42.758, acc=0.740, loss=48.698, backward_time=0.237, grad_norm=31.295, clip=100.000, loss_scale=4.612e+18, optim_step_time=0.075, optim0_lr0=4.000e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-27 21:23:23,811 (trainer:762) INFO: 7epoch:train:4001-4500batch: iter_time=1.492e-04, forward_time=0.209, loss_ctc=59.029, loss_att=40.827, acc=0.753, loss=46.288, backward_time=0.236, grad_norm=29.177, clip=100.000, loss_scale=9.223e+18, optim_step_time=0.075, optim0_lr0=3.989e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-02-27 21:30:59,725 (trainer:762) INFO: 7epoch:train:4501-5000batch: iter_time=1.531e-04, forward_time=0.208, loss_ctc=59.757, loss_att=40.638, acc=0.744, loss=46.374, backward_time=0.227, grad_norm=29.281, clip=100.000, loss_scale=9.223e+18, optim_step_time=0.076, optim0_lr0=3.979e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-27 21:31:05,439 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-27 21:31:41,945 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-27 21:31:50,434 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-27 21:31:50,434 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-02-27 21:31:50,439 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-27 21:46:06,579 (trainer:762) INFO: 7epoch:train:5001-5500batch: iter_time=0.882, forward_time=0.209, loss_ctc=60.957, loss_att=42.119, acc=0.746, loss=47.770, backward_time=0.235, grad_norm=29.848, clip=100.000, loss_scale=9.223e+18, optim_step_time=0.075, optim0_lr0=3.968e-04, train_time=1.813 +[ip-10-0-216-33:0/16] 2024-02-27 21:53:38,177 (trainer:762) INFO: 7epoch:train:5501-6000batch: iter_time=1.494e-04, forward_time=0.210, loss_ctc=58.646, loss_att=40.111, acc=0.751, loss=45.671, backward_time=0.231, grad_norm=32.839, clip=100.000, loss_scale=9.223e+18, optim_step_time=0.075, optim0_lr0=3.958e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-27 22:01:15,502 (trainer:762) INFO: 7epoch:train:6001-6500batch: iter_time=0.008, forward_time=0.210, loss_ctc=55.707, loss_att=38.482, acc=0.763, loss=43.649, backward_time=0.233, grad_norm=28.117, clip=100.000, loss_scale=1.845e+19, optim_step_time=0.075, optim0_lr0=3.948e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-02-27 22:08:46,811 (trainer:762) INFO: 7epoch:train:6501-7000batch: iter_time=1.456e-04, forward_time=0.208, loss_ctc=56.054, loss_att=39.295, acc=0.751, loss=44.323, backward_time=0.237, grad_norm=28.317, clip=100.000, loss_scale=1.845e+19, optim_step_time=0.075, optim0_lr0=3.937e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-02-27 22:16:21,198 (trainer:762) INFO: 7epoch:train:7001-7500batch: iter_time=1.473e-04, forward_time=0.209, loss_ctc=60.521, loss_att=41.714, acc=0.744, loss=47.356, backward_time=0.232, grad_norm=34.682, clip=100.000, loss_scale=1.845e+19, optim_step_time=0.075, optim0_lr0=3.927e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-27 22:23:56,377 (trainer:762) INFO: 7epoch:train:7501-8000batch: iter_time=1.477e-04, forward_time=0.208, loss_ctc=56.190, loss_att=38.820, acc=0.749, loss=44.031, backward_time=0.232, grad_norm=29.071, clip=100.000, loss_scale=1.845e+19, optim_step_time=0.075, optim0_lr0=3.917e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-27 22:31:36,304 (trainer:762) INFO: 7epoch:train:8001-8500batch: iter_time=1.470e-04, forward_time=0.209, loss_ctc=58.922, loss_att=40.494, acc=0.744, loss=46.023, backward_time=0.233, grad_norm=29.829, clip=100.000, loss_scale=3.689e+19, optim_step_time=0.075, optim0_lr0=3.907e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-02-27 22:39:08,003 (trainer:762) INFO: 7epoch:train:8501-9000batch: iter_time=1.475e-04, forward_time=0.209, loss_ctc=60.680, loss_att=41.491, acc=0.746, loss=47.247, backward_time=0.231, grad_norm=30.221, clip=100.000, loss_scale=3.689e+19, optim_step_time=0.075, optim0_lr0=3.897e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-27 22:46:42,292 (trainer:762) INFO: 7epoch:train:9001-9500batch: iter_time=1.435e-04, forward_time=0.210, loss_ctc=57.246, loss_att=39.603, acc=0.759, loss=44.896, backward_time=0.234, grad_norm=27.989, clip=100.000, loss_scale=3.689e+19, optim_step_time=0.075, optim0_lr0=3.888e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-27 22:54:15,034 (trainer:762) INFO: 7epoch:train:9501-10000batch: iter_time=1.434e-04, forward_time=0.207, loss_ctc=57.692, loss_att=39.336, acc=0.751, loss=44.842, backward_time=0.235, grad_norm=30.214, clip=100.000, loss_scale=3.689e+19, optim_step_time=0.075, optim0_lr0=3.878e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-27 22:54:21,388 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-27 22:54:58,747 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-27 22:55:07,402 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-27 22:55:07,402 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-02-27 22:55:07,407 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-27 23:09:34,986 (trainer:762) INFO: 7epoch:train:10001-10500batch: iter_time=0.895, forward_time=0.209, loss_ctc=59.693, loss_att=41.072, acc=0.751, loss=46.658, backward_time=0.236, grad_norm=30.837, clip=100.000, loss_scale=7.379e+19, optim_step_time=0.075, optim0_lr0=3.868e-04, train_time=1.840 +[ip-10-0-216-33:0/16] 2024-02-27 23:17:09,418 (trainer:762) INFO: 7epoch:train:10501-11000batch: iter_time=1.502e-04, forward_time=0.210, loss_ctc=57.517, loss_att=39.300, acc=0.755, loss=44.765, backward_time=0.240, grad_norm=29.002, clip=100.000, loss_scale=7.379e+19, optim_step_time=0.075, optim0_lr0=3.859e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-27 23:24:40,185 (trainer:762) INFO: 7epoch:train:11001-11500batch: iter_time=1.531e-04, forward_time=0.209, loss_ctc=54.011, loss_att=37.379, acc=0.769, loss=42.368, backward_time=0.233, grad_norm=26.912, clip=100.000, loss_scale=7.379e+19, optim_step_time=0.075, optim0_lr0=3.849e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-02-27 23:32:13,712 (trainer:762) INFO: 7epoch:train:11501-12000batch: iter_time=1.489e-04, forward_time=0.210, loss_ctc=55.046, loss_att=38.435, acc=0.756, loss=43.418, backward_time=0.234, grad_norm=27.379, clip=100.000, loss_scale=7.379e+19, optim_step_time=0.075, optim0_lr0=3.840e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-02-27 23:39:50,930 (trainer:762) INFO: 7epoch:train:12001-12500batch: iter_time=1.485e-04, forward_time=0.208, loss_ctc=59.010, loss_att=40.716, acc=0.749, loss=46.204, backward_time=0.235, grad_norm=33.270, clip=100.000, loss_scale=1.476e+20, optim_step_time=0.075, optim0_lr0=3.830e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-02-27 23:47:21,732 (trainer:762) INFO: 7epoch:train:12501-13000batch: iter_time=1.488e-04, forward_time=0.209, loss_ctc=54.867, loss_att=37.925, acc=0.755, loss=43.008, backward_time=0.236, grad_norm=28.485, clip=100.000, loss_scale=1.476e+20, optim_step_time=0.075, optim0_lr0=3.821e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-02-27 23:55:02,691 (trainer:762) INFO: 7epoch:train:13001-13500batch: iter_time=1.454e-04, forward_time=0.208, loss_ctc=57.374, loss_att=39.616, acc=0.749, loss=44.944, backward_time=0.235, grad_norm=29.680, clip=100.000, loss_scale=1.476e+20, optim_step_time=0.075, optim0_lr0=3.812e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-02-28 00:02:39,713 (trainer:762) INFO: 7epoch:train:13501-14000batch: iter_time=1.496e-04, forward_time=0.209, loss_ctc=59.246, loss_att=40.659, acc=0.751, loss=46.235, backward_time=0.237, grad_norm=29.520, clip=100.000, loss_scale=1.476e+20, optim_step_time=0.075, optim0_lr0=3.802e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-02-28 00:10:20,529 (trainer:762) INFO: 7epoch:train:14001-14500batch: iter_time=1.485e-04, forward_time=0.207, loss_ctc=56.111, loss_att=38.776, acc=0.764, loss=43.976, backward_time=0.230, grad_norm=29.550, clip=100.000, loss_scale=2.951e+20, optim_step_time=0.076, optim0_lr0=3.793e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-02-28 00:17:57,666 (trainer:762) INFO: 7epoch:train:14501-15000batch: iter_time=1.455e-04, forward_time=0.209, loss_ctc=56.817, loss_att=38.683, acc=0.754, loss=44.123, backward_time=0.233, grad_norm=30.193, clip=100.000, loss_scale=2.951e+20, optim_step_time=0.075, optim0_lr0=3.784e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-02-28 00:32:33,141 (trainer:361) INFO: 7epoch results: [train] iter_time=0.088, forward_time=0.209, loss_ctc=58.460, loss_att=40.261, acc=0.750, loss=45.720, backward_time=0.235, grad_norm=30.144, clip=100.000, loss_scale=5.872e+19, optim_step_time=0.075, optim0_lr0=3.925e-04, train_time=1.001, time=4 hours, 10 minutes and 41.98 seconds, total_count=105000, gpu_max_cached_mem_GB=36.488, [valid] loss_ctc=64.304, cer_ctc=0.346, loss_att=43.848, acc=0.644, cer=0.313, wer=0.999, loss=49.985, time=14 minutes and 7.63 seconds, total_count=14931, gpu_max_cached_mem_GB=36.488 +[ip-10-0-216-33:0/16] 2024-02-28 00:32:42,437 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-02-28 00:32:42,457 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/2epoch.pth +[ip-10-0-216-33:0/16] 2024-02-28 00:32:42,458 (trainer:290) INFO: 8/45epoch started. Estimated time to finish: 6 days, 23 hours and 40 minutes +[ip-10-0-216-33:0/16] 2024-02-28 00:32:42,465 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-28 00:33:18,158 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-28 00:33:26,007 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-28 00:33:26,008 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-02-28 00:33:26,012 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-28 00:47:26,538 (trainer:762) INFO: 8epoch:train:1-500batch: iter_time=0.843, forward_time=0.206, loss_ctc=61.629, loss_att=41.608, acc=0.742, loss=47.614, backward_time=0.240, grad_norm=32.378, clip=100.000, loss_scale=2.951e+20, optim_step_time=0.075, optim0_lr0=3.775e-04, train_time=1.768 +[ip-10-0-216-33:0/16] 2024-02-28 00:55:05,189 (trainer:762) INFO: 8epoch:train:501-1000batch: iter_time=1.423e-04, forward_time=0.208, loss_ctc=56.314, loss_att=38.416, acc=0.759, loss=43.785, backward_time=0.228, grad_norm=28.765, clip=100.000, loss_scale=2.951e+20, optim_step_time=0.075, optim0_lr0=3.766e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-02-28 01:02:43,846 (trainer:762) INFO: 8epoch:train:1001-1500batch: iter_time=1.476e-04, forward_time=0.209, loss_ctc=55.902, loss_att=38.559, acc=0.758, loss=43.762, backward_time=0.235, grad_norm=28.687, clip=100.000, loss_scale=5.903e+20, optim_step_time=0.075, optim0_lr0=3.757e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-02-28 01:10:16,933 (trainer:762) INFO: 8epoch:train:1501-2000batch: iter_time=1.430e-04, forward_time=0.208, loss_ctc=55.030, loss_att=38.249, acc=0.754, loss=43.284, backward_time=0.239, grad_norm=36.102, clip=100.000, loss_scale=5.903e+20, optim_step_time=0.075, optim0_lr0=3.749e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-02-28 01:17:49,778 (trainer:762) INFO: 8epoch:train:2001-2500batch: iter_time=1.499e-04, forward_time=0.210, loss_ctc=59.827, loss_att=40.777, acc=0.747, loss=46.492, backward_time=0.237, grad_norm=32.459, clip=100.000, loss_scale=5.903e+20, optim_step_time=0.075, optim0_lr0=3.740e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-28 01:25:23,263 (trainer:762) INFO: 8epoch:train:2501-3000batch: iter_time=1.461e-04, forward_time=0.209, loss_ctc=57.704, loss_att=39.050, acc=0.751, loss=44.646, backward_time=0.233, grad_norm=30.979, clip=100.000, loss_scale=5.903e+20, optim_step_time=0.075, optim0_lr0=3.731e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-02-28 01:32:56,662 (trainer:762) INFO: 8epoch:train:3001-3500batch: iter_time=1.463e-04, forward_time=0.209, loss_ctc=60.076, loss_att=40.578, acc=0.754, loss=46.427, backward_time=0.234, grad_norm=43.863, clip=100.000, loss_scale=1.181e+21, optim_step_time=0.075, optim0_lr0=3.722e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-02-28 01:40:36,153 (trainer:762) INFO: 8epoch:train:3501-4000batch: iter_time=1.487e-04, forward_time=0.208, loss_ctc=58.596, loss_att=40.013, acc=0.751, loss=45.588, backward_time=0.233, grad_norm=30.268, clip=100.000, loss_scale=1.181e+21, optim_step_time=0.075, optim0_lr0=3.714e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-02-28 01:48:17,342 (trainer:762) INFO: 8epoch:train:4001-4500batch: iter_time=1.459e-04, forward_time=0.208, loss_ctc=58.604, loss_att=39.909, acc=0.755, loss=45.518, backward_time=0.234, grad_norm=30.349, clip=100.000, loss_scale=1.181e+21, optim_step_time=0.075, optim0_lr0=3.705e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-02-28 01:55:55,745 (trainer:762) INFO: 8epoch:train:4501-5000batch: iter_time=1.486e-04, forward_time=0.207, loss_ctc=55.452, loss_att=37.753, acc=0.761, loss=43.063, backward_time=0.238, grad_norm=32.389, clip=100.000, loss_scale=1.181e+21, optim_step_time=0.075, optim0_lr0=3.697e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-02-28 01:56:00,211 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-28 01:56:37,246 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-28 01:56:45,227 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-28 01:56:45,227 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-02-28 01:56:45,231 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-28 02:11:00,103 (trainer:762) INFO: 8epoch:train:5001-5500batch: iter_time=0.881, forward_time=0.208, loss_ctc=59.940, loss_att=40.458, acc=0.749, loss=46.302, backward_time=0.236, grad_norm=30.593, clip=100.000, loss_scale=2.361e+21, optim_step_time=0.075, optim0_lr0=3.689e-04, train_time=1.808 +[ip-10-0-216-33:0/16] 2024-02-28 02:18:34,855 (trainer:762) INFO: 8epoch:train:5501-6000batch: iter_time=1.424e-04, forward_time=0.208, loss_ctc=55.152, loss_att=37.439, acc=0.765, loss=42.753, backward_time=0.235, grad_norm=27.387, clip=100.000, loss_scale=2.361e+21, optim_step_time=0.075, optim0_lr0=3.680e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-28 02:26:09,459 (trainer:762) INFO: 8epoch:train:6001-6500batch: iter_time=1.420e-04, forward_time=0.208, loss_ctc=54.700, loss_att=37.689, acc=0.764, loss=42.792, backward_time=0.234, grad_norm=28.088, clip=100.000, loss_scale=2.361e+21, optim_step_time=0.075, optim0_lr0=3.672e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-28 02:33:38,113 (trainer:762) INFO: 8epoch:train:6501-7000batch: iter_time=1.418e-04, forward_time=0.210, loss_ctc=53.886, loss_att=37.269, acc=0.760, loss=42.254, backward_time=0.228, grad_norm=28.899, clip=100.000, loss_scale=2.361e+21, optim_step_time=0.075, optim0_lr0=3.664e-04, train_time=0.897 +[ip-10-0-216-33:0/16] 2024-02-28 02:41:15,721 (trainer:762) INFO: 8epoch:train:7001-7500batch: iter_time=1.442e-04, forward_time=0.206, loss_ctc=58.757, loss_att=39.703, acc=0.753, loss=45.419, backward_time=0.240, grad_norm=32.552, clip=100.000, loss_scale=4.722e+21, optim_step_time=0.075, optim0_lr0=3.656e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-02-28 02:48:55,806 (trainer:762) INFO: 8epoch:train:7501-8000batch: iter_time=1.495e-04, forward_time=0.207, loss_ctc=56.292, loss_att=38.018, acc=0.757, loss=43.500, backward_time=0.233, grad_norm=29.519, clip=100.000, loss_scale=4.722e+21, optim_step_time=0.075, optim0_lr0=3.647e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-02-28 02:56:37,392 (trainer:762) INFO: 8epoch:train:8001-8500batch: iter_time=1.455e-04, forward_time=0.209, loss_ctc=58.666, loss_att=39.527, acc=0.760, loss=45.269, backward_time=0.227, grad_norm=34.102, clip=100.000, loss_scale=4.722e+21, optim_step_time=0.075, optim0_lr0=3.639e-04, train_time=0.923 +[ip-10-0-216-33:0/16] 2024-02-28 03:04:15,471 (trainer:762) INFO: 8epoch:train:8501-9000batch: iter_time=0.002, forward_time=0.207, loss_ctc=57.105, loss_att=39.081, acc=0.756, loss=44.488, backward_time=0.235, grad_norm=30.053, clip=100.000, loss_scale=4.722e+21, optim_step_time=0.075, optim0_lr0=3.631e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-02-28 03:11:54,423 (trainer:762) INFO: 8epoch:train:9001-9500batch: iter_time=1.446e-04, forward_time=0.208, loss_ctc=57.147, loss_att=38.837, acc=0.760, loss=44.330, backward_time=0.235, grad_norm=30.883, clip=100.000, loss_scale=9.445e+21, optim_step_time=0.075, optim0_lr0=3.623e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-02-28 03:19:32,102 (trainer:762) INFO: 8epoch:train:9501-10000batch: iter_time=1.424e-04, forward_time=0.207, loss_ctc=54.132, loss_att=36.826, acc=0.766, loss=42.018, backward_time=0.233, grad_norm=28.885, clip=100.000, loss_scale=9.445e+21, optim_step_time=0.075, optim0_lr0=3.615e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-02-28 03:19:36,744 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-28 03:20:13,983 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-28 03:20:22,126 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-28 03:20:22,127 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-02-28 03:20:22,131 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-28 03:34:42,426 (trainer:762) INFO: 8epoch:train:10001-10500batch: iter_time=0.886, forward_time=0.208, loss_ctc=58.790, loss_att=39.735, acc=0.754, loss=45.452, backward_time=0.227, grad_norm=31.441, clip=100.000, loss_scale=9.445e+21, optim_step_time=0.075, optim0_lr0=3.608e-04, train_time=1.820 +[ip-10-0-216-33:0/16] 2024-02-28 03:42:22,322 (trainer:762) INFO: 8epoch:train:10501-11000batch: iter_time=1.503e-04, forward_time=0.207, loss_ctc=53.757, loss_att=36.617, acc=0.769, loss=41.759, backward_time=0.233, grad_norm=29.520, clip=100.000, loss_scale=9.445e+21, optim_step_time=0.075, optim0_lr0=3.600e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-02-28 03:50:01,648 (trainer:762) INFO: 8epoch:train:11001-11500batch: iter_time=1.491e-04, forward_time=0.209, loss_ctc=53.725, loss_att=36.938, acc=0.769, loss=41.974, backward_time=0.237, grad_norm=35.040, clip=100.000, loss_scale=1.889e+22, optim_step_time=0.075, optim0_lr0=3.592e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-02-28 03:57:42,279 (trainer:762) INFO: 8epoch:train:11501-12000batch: iter_time=1.473e-04, forward_time=0.208, loss_ctc=52.884, loss_att=36.669, acc=0.764, loss=41.533, backward_time=0.237, grad_norm=30.513, clip=100.000, loss_scale=1.889e+22, optim_step_time=0.075, optim0_lr0=3.584e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-02-28 04:05:22,948 (trainer:762) INFO: 8epoch:train:12001-12500batch: iter_time=1.488e-04, forward_time=0.207, loss_ctc=57.456, loss_att=38.959, acc=0.757, loss=44.509, backward_time=0.240, grad_norm=30.584, clip=100.000, loss_scale=1.889e+22, optim_step_time=0.075, optim0_lr0=3.577e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-02-28 04:12:56,726 (trainer:762) INFO: 8epoch:train:12501-13000batch: iter_time=1.513e-04, forward_time=0.209, loss_ctc=55.207, loss_att=37.313, acc=0.761, loss=42.681, backward_time=0.235, grad_norm=29.365, clip=100.000, loss_scale=1.889e+22, optim_step_time=0.075, optim0_lr0=3.569e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-02-28 04:20:34,225 (trainer:762) INFO: 8epoch:train:13001-13500batch: iter_time=1.478e-04, forward_time=0.209, loss_ctc=57.401, loss_att=38.592, acc=0.764, loss=44.235, backward_time=0.231, grad_norm=29.859, clip=100.000, loss_scale=3.778e+22, optim_step_time=0.075, optim0_lr0=3.562e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-02-28 04:28:10,992 (trainer:762) INFO: 8epoch:train:13501-14000batch: iter_time=1.471e-04, forward_time=0.208, loss_ctc=56.060, loss_att=38.319, acc=0.761, loss=43.641, backward_time=0.236, grad_norm=30.422, clip=100.000, loss_scale=3.778e+22, optim_step_time=0.075, optim0_lr0=3.554e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-02-28 04:35:46,024 (trainer:762) INFO: 8epoch:train:14001-14500batch: iter_time=1.448e-04, forward_time=0.210, loss_ctc=55.922, loss_att=38.105, acc=0.765, loss=43.450, backward_time=0.243, grad_norm=30.300, clip=100.000, loss_scale=3.778e+22, optim_step_time=0.075, optim0_lr0=3.547e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-28 04:43:16,717 (trainer:762) INFO: 8epoch:train:14501-15000batch: iter_time=1.481e-04, forward_time=0.209, loss_ctc=53.201, loss_att=36.226, acc=0.770, loss=41.318, backward_time=0.236, grad_norm=28.763, clip=100.000, loss_scale=3.778e+22, optim_step_time=0.075, optim0_lr0=3.539e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-02-28 04:57:23,326 (trainer:361) INFO: 8epoch results: [train] iter_time=0.087, forward_time=0.208, loss_ctc=56.644, loss_att=38.574, acc=0.759, loss=43.995, backward_time=0.235, grad_norm=31.100, clip=100.000, loss_scale=1.002e+22, optim_step_time=0.075, optim0_lr0=3.653e-04, train_time=1.002, time=4 hours, 10 minutes and 49.45 seconds, total_count=120000, gpu_max_cached_mem_GB=36.488, [valid] loss_ctc=60.437, cer_ctc=0.333, loss_att=44.204, acc=0.648, cer=0.377, wer=1.000, loss=49.074, time=13 minutes and 51.01 seconds, total_count=17064, gpu_max_cached_mem_GB=36.488 +[ip-10-0-216-33:0/16] 2024-02-28 04:57:33,153 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-02-28 04:57:33,168 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/3epoch.pth +[ip-10-0-216-33:0/16] 2024-02-28 04:57:33,169 (trainer:290) INFO: 9/45epoch started. Estimated time to finish: 6 days, 19 hours and 16 minutes +[ip-10-0-216-33:0/16] 2024-02-28 04:57:33,176 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-28 04:58:09,077 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-28 04:58:17,478 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-28 04:58:17,478 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-02-28 04:58:17,483 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-28 05:12:11,693 (trainer:762) INFO: 9epoch:train:1-500batch: iter_time=0.836, forward_time=0.209, loss_ctc=51.998, loss_att=35.799, acc=0.774, loss=40.659, backward_time=0.229, grad_norm=28.062, clip=100.000, loss_scale=7.556e+22, optim_step_time=0.075, optim0_lr0=3.532e-04, train_time=1.757 +[ip-10-0-216-33:0/16] 2024-02-28 05:19:43,279 (trainer:762) INFO: 9epoch:train:501-1000batch: iter_time=1.466e-04, forward_time=0.209, loss_ctc=54.912, loss_att=37.388, acc=0.761, loss=42.645, backward_time=0.230, grad_norm=29.987, clip=100.000, loss_scale=7.556e+22, optim_step_time=0.075, optim0_lr0=3.525e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-28 05:27:19,301 (trainer:762) INFO: 9epoch:train:1001-1500batch: iter_time=1.473e-04, forward_time=0.208, loss_ctc=58.066, loss_att=39.094, acc=0.762, loss=44.786, backward_time=0.229, grad_norm=31.793, clip=100.000, loss_scale=7.556e+22, optim_step_time=0.075, optim0_lr0=3.517e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-28 05:34:52,623 (trainer:762) INFO: 9epoch:train:1501-2000batch: iter_time=1.456e-04, forward_time=0.210, loss_ctc=56.046, loss_att=38.131, acc=0.766, loss=43.505, backward_time=0.233, grad_norm=32.375, clip=100.000, loss_scale=7.556e+22, optim_step_time=0.075, optim0_lr0=3.510e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-02-28 05:42:26,349 (trainer:762) INFO: 9epoch:train:2001-2500batch: iter_time=1.436e-04, forward_time=0.208, loss_ctc=54.287, loss_att=36.973, acc=0.765, loss=42.167, backward_time=0.232, grad_norm=29.008, clip=100.000, loss_scale=1.511e+23, optim_step_time=0.075, optim0_lr0=3.503e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-02-28 05:50:04,575 (trainer:762) INFO: 9epoch:train:2501-3000batch: iter_time=1.461e-04, forward_time=0.208, loss_ctc=54.092, loss_att=37.111, acc=0.775, loss=42.205, backward_time=0.232, grad_norm=28.601, clip=100.000, loss_scale=1.511e+23, optim_step_time=0.076, optim0_lr0=3.496e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-02-28 05:57:41,255 (trainer:762) INFO: 9epoch:train:3001-3500batch: iter_time=1.449e-04, forward_time=0.208, loss_ctc=52.614, loss_att=35.717, acc=0.765, loss=40.786, backward_time=0.235, grad_norm=29.473, clip=100.000, loss_scale=1.511e+23, optim_step_time=0.075, optim0_lr0=3.489e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-02-28 06:05:22,814 (trainer:762) INFO: 9epoch:train:3501-4000batch: iter_time=1.482e-04, forward_time=0.205, loss_ctc=56.100, loss_att=38.178, acc=0.766, loss=43.554, backward_time=0.228, grad_norm=29.356, clip=100.000, loss_scale=1.511e+23, optim_step_time=0.076, optim0_lr0=3.482e-04, train_time=0.923 +[ip-10-0-216-33:0/16] 2024-02-28 06:12:56,206 (trainer:762) INFO: 9epoch:train:4001-4500batch: iter_time=1.489e-04, forward_time=0.209, loss_ctc=51.624, loss_att=35.487, acc=0.773, loss=40.328, backward_time=0.231, grad_norm=27.249, clip=100.000, loss_scale=3.022e+23, optim_step_time=0.076, optim0_lr0=3.475e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-02-28 06:20:31,307 (trainer:762) INFO: 9epoch:train:4501-5000batch: iter_time=1.456e-04, forward_time=0.208, loss_ctc=53.344, loss_att=36.526, acc=0.770, loss=41.571, backward_time=0.238, grad_norm=28.544, clip=100.000, loss_scale=3.022e+23, optim_step_time=0.076, optim0_lr0=3.468e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-28 06:20:36,416 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-28 06:21:13,513 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-28 06:21:22,108 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-28 06:21:22,108 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-02-28 06:21:22,112 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-28 06:35:44,340 (trainer:762) INFO: 9epoch:train:5001-5500batch: iter_time=0.870, forward_time=0.208, loss_ctc=51.039, loss_att=35.002, acc=0.778, loss=39.813, backward_time=0.235, grad_norm=26.892, clip=100.000, loss_scale=3.022e+23, optim_step_time=0.075, optim0_lr0=3.461e-04, train_time=1.826 +[ip-10-0-216-33:0/16] 2024-02-28 06:43:17,073 (trainer:762) INFO: 9epoch:train:5501-6000batch: iter_time=1.431e-04, forward_time=0.210, loss_ctc=53.501, loss_att=36.548, acc=0.765, loss=41.634, backward_time=0.236, grad_norm=33.185, clip=100.000, loss_scale=3.022e+23, optim_step_time=0.075, optim0_lr0=3.454e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-28 06:50:48,814 (trainer:762) INFO: 9epoch:train:6001-6500batch: iter_time=1.433e-04, forward_time=0.210, loss_ctc=56.592, loss_att=37.999, acc=0.767, loss=43.577, backward_time=0.230, grad_norm=31.168, clip=100.000, loss_scale=6.045e+23, optim_step_time=0.075, optim0_lr0=3.447e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-28 06:58:23,923 (trainer:762) INFO: 9epoch:train:6501-7000batch: iter_time=1.412e-04, forward_time=0.209, loss_ctc=54.595, loss_att=37.241, acc=0.771, loss=42.447, backward_time=0.230, grad_norm=39.097, clip=100.000, loss_scale=6.045e+23, optim_step_time=0.075, optim0_lr0=3.440e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-28 07:06:04,526 (trainer:762) INFO: 9epoch:train:7001-7500batch: iter_time=1.430e-04, forward_time=0.208, loss_ctc=53.319, loss_att=36.310, acc=0.769, loss=41.413, backward_time=0.232, grad_norm=28.730, clip=100.000, loss_scale=6.045e+23, optim_step_time=0.075, optim0_lr0=3.433e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-02-28 07:13:38,086 (trainer:762) INFO: 9epoch:train:7501-8000batch: iter_time=1.449e-04, forward_time=0.208, loss_ctc=52.983, loss_att=36.254, acc=0.779, loss=41.273, backward_time=0.240, grad_norm=28.424, clip=100.000, loss_scale=6.045e+23, optim_step_time=0.075, optim0_lr0=3.427e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-02-28 07:21:16,553 (trainer:762) INFO: 9epoch:train:8001-8500batch: iter_time=1.435e-04, forward_time=0.208, loss_ctc=51.710, loss_att=35.149, acc=0.769, loss=40.117, backward_time=0.237, grad_norm=30.131, clip=100.000, loss_scale=1.209e+24, optim_step_time=0.075, optim0_lr0=3.420e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-02-28 07:28:53,125 (trainer:762) INFO: 9epoch:train:8501-9000batch: iter_time=1.458e-04, forward_time=0.208, loss_ctc=54.977, loss_att=37.431, acc=0.770, loss=42.695, backward_time=0.237, grad_norm=29.380, clip=100.000, loss_scale=1.209e+24, optim_step_time=0.075, optim0_lr0=3.413e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-02-28 07:36:25,302 (trainer:762) INFO: 9epoch:train:9001-9500batch: iter_time=1.419e-04, forward_time=0.210, loss_ctc=50.752, loss_att=34.864, acc=0.776, loss=39.630, backward_time=0.231, grad_norm=31.511, clip=100.000, loss_scale=1.209e+24, optim_step_time=0.076, optim0_lr0=3.407e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-02-28 07:43:56,842 (trainer:762) INFO: 9epoch:train:9501-10000batch: iter_time=1.417e-04, forward_time=0.208, loss_ctc=52.238, loss_att=35.854, acc=0.774, loss=40.769, backward_time=0.229, grad_norm=28.566, clip=100.000, loss_scale=1.209e+24, optim_step_time=0.075, optim0_lr0=3.400e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-28 07:44:01,912 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-28 07:44:38,881 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-28 07:44:47,488 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-28 07:44:47,489 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-02-28 07:44:47,493 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-28 07:59:14,385 (trainer:762) INFO: 9epoch:train:10001-10500batch: iter_time=0.867, forward_time=0.209, loss_ctc=50.401, loss_att=34.563, acc=0.781, loss=39.314, backward_time=0.234, grad_norm=26.697, clip=100.000, loss_scale=2.418e+24, optim_step_time=0.075, optim0_lr0=3.394e-04, train_time=1.835 +[ip-10-0-216-33:0/16] 2024-02-28 08:06:52,252 (trainer:762) INFO: 9epoch:train:10501-11000batch: iter_time=1.501e-04, forward_time=0.208, loss_ctc=52.666, loss_att=35.954, acc=0.768, loss=40.967, backward_time=0.234, grad_norm=28.222, clip=100.000, loss_scale=2.418e+24, optim_step_time=0.075, optim0_lr0=3.387e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-02-28 08:14:31,059 (trainer:762) INFO: 9epoch:train:11001-11500batch: iter_time=1.534e-04, forward_time=0.207, loss_ctc=55.787, loss_att=37.522, acc=0.770, loss=43.001, backward_time=0.231, grad_norm=30.183, clip=100.000, loss_scale=2.418e+24, optim_step_time=0.075, optim0_lr0=3.381e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-02-28 08:22:08,307 (trainer:762) INFO: 9epoch:train:11501-12000batch: iter_time=1.469e-04, forward_time=0.209, loss_ctc=53.691, loss_att=36.616, acc=0.775, loss=41.739, backward_time=0.234, grad_norm=29.588, clip=100.000, loss_scale=2.418e+24, optim_step_time=0.075, optim0_lr0=3.374e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-02-28 08:29:48,077 (trainer:762) INFO: 9epoch:train:12001-12500batch: iter_time=1.530e-04, forward_time=0.208, loss_ctc=52.705, loss_att=35.783, acc=0.772, loss=40.860, backward_time=0.236, grad_norm=29.020, clip=100.000, loss_scale=4.836e+24, optim_step_time=0.075, optim0_lr0=3.368e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-02-28 08:37:21,464 (trainer:762) INFO: 9epoch:train:12501-13000batch: iter_time=1.536e-04, forward_time=0.210, loss_ctc=52.290, loss_att=35.832, acc=0.782, loss=40.769, backward_time=0.235, grad_norm=27.796, clip=100.000, loss_scale=4.836e+24, optim_step_time=0.075, optim0_lr0=3.361e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-02-28 08:45:01,274 (trainer:762) INFO: 9epoch:train:13001-13500batch: iter_time=1.524e-04, forward_time=0.207, loss_ctc=50.608, loss_att=34.517, acc=0.773, loss=39.345, backward_time=0.231, grad_norm=28.941, clip=100.000, loss_scale=4.836e+24, optim_step_time=0.075, optim0_lr0=3.355e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-02-28 08:52:39,081 (trainer:762) INFO: 9epoch:train:13501-14000batch: iter_time=1.495e-04, forward_time=0.209, loss_ctc=53.940, loss_att=36.718, acc=0.774, loss=41.885, backward_time=0.235, grad_norm=30.108, clip=100.000, loss_scale=4.836e+24, optim_step_time=0.075, optim0_lr0=3.349e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-02-28 09:00:13,592 (trainer:762) INFO: 9epoch:train:14001-14500batch: iter_time=1.535e-04, forward_time=0.209, loss_ctc=49.844, loss_att=34.289, acc=0.779, loss=38.955, backward_time=0.232, grad_norm=27.753, clip=100.000, loss_scale=9.671e+24, optim_step_time=0.075, optim0_lr0=3.343e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-28 09:07:50,776 (trainer:762) INFO: 9epoch:train:14501-15000batch: iter_time=1.465e-04, forward_time=0.209, loss_ctc=51.679, loss_att=35.474, acc=0.776, loss=40.336, backward_time=0.237, grad_norm=28.156, clip=100.000, loss_scale=9.671e+24, optim_step_time=0.075, optim0_lr0=3.336e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-02-28 09:22:08,669 (trainer:361) INFO: 9epoch results: [train] iter_time=0.086, forward_time=0.208, loss_ctc=53.280, loss_att=36.344, acc=0.771, loss=41.425, backward_time=0.233, grad_norm=29.600, clip=100.000, loss_scale=1.924e+24, optim_step_time=0.075, optim0_lr0=3.431e-04, train_time=1.001, time=4 hours, 10 minutes and 33.55 seconds, total_count=135000, gpu_max_cached_mem_GB=36.488, [valid] loss_ctc=54.791, cer_ctc=0.304, loss_att=39.448, acc=0.673, cer=0.318, wer=0.999, loss=44.051, time=14 minutes and 1.72 seconds, total_count=19197, gpu_max_cached_mem_GB=36.488 +[ip-10-0-216-33:0/16] 2024-02-28 09:22:17,984 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-02-28 09:22:18,017 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/4epoch.pth +[ip-10-0-216-33:0/16] 2024-02-28 09:22:18,018 (trainer:290) INFO: 10/45epoch started. Estimated time to finish: 6 days, 14 hours and 51 minutes +[ip-10-0-216-33:0/16] 2024-02-28 09:22:18,025 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-28 09:22:53,663 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-28 09:23:02,102 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-28 09:23:02,102 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-02-28 09:23:02,107 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-28 09:36:56,356 (trainer:762) INFO: 10epoch:train:1-500batch: iter_time=0.843, forward_time=0.209, loss_ctc=49.222, loss_att=33.626, acc=0.777, loss=38.305, backward_time=0.235, grad_norm=27.948, clip=100.000, loss_scale=9.671e+24, optim_step_time=0.075, optim0_lr0=3.330e-04, train_time=1.756 +[ip-10-0-216-33:0/16] 2024-02-28 09:44:30,626 (trainer:762) INFO: 10epoch:train:501-1000batch: iter_time=1.468e-04, forward_time=0.208, loss_ctc=51.510, loss_att=35.495, acc=0.771, loss=40.299, backward_time=0.235, grad_norm=29.339, clip=100.000, loss_scale=9.671e+24, optim_step_time=0.075, optim0_lr0=3.324e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-28 09:52:02,008 (trainer:762) INFO: 10epoch:train:1001-1500batch: iter_time=1.485e-04, forward_time=0.210, loss_ctc=49.845, loss_att=34.186, acc=0.780, loss=38.884, backward_time=0.229, grad_norm=27.975, clip=100.000, loss_scale=1.934e+25, optim_step_time=0.075, optim0_lr0=3.318e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-28 09:59:38,948 (trainer:762) INFO: 10epoch:train:1501-2000batch: iter_time=1.468e-04, forward_time=0.208, loss_ctc=51.972, loss_att=35.383, acc=0.772, loss=40.360, backward_time=0.233, grad_norm=29.522, clip=100.000, loss_scale=1.934e+25, optim_step_time=0.075, optim0_lr0=3.312e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-02-28 10:07:13,635 (trainer:762) INFO: 10epoch:train:2001-2500batch: iter_time=1.467e-04, forward_time=0.208, loss_ctc=51.252, loss_att=35.130, acc=0.773, loss=39.967, backward_time=0.236, grad_norm=29.638, clip=100.000, loss_scale=1.934e+25, optim_step_time=0.075, optim0_lr0=3.306e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-28 10:14:51,514 (trainer:762) INFO: 10epoch:train:2501-3000batch: iter_time=1.469e-04, forward_time=0.209, loss_ctc=54.867, loss_att=37.606, acc=0.770, loss=42.784, backward_time=0.239, grad_norm=30.668, clip=100.000, loss_scale=1.934e+25, optim_step_time=0.075, optim0_lr0=3.300e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-02-28 10:22:32,895 (trainer:762) INFO: 10epoch:train:3001-3500batch: iter_time=1.477e-04, forward_time=0.208, loss_ctc=54.902, loss_att=37.192, acc=0.770, loss=42.505, backward_time=0.235, grad_norm=29.310, clip=100.000, loss_scale=3.869e+25, optim_step_time=0.075, optim0_lr0=3.294e-04, train_time=0.923 +[ip-10-0-216-33:0/16] 2024-02-28 10:30:08,323 (trainer:762) INFO: 10epoch:train:3501-4000batch: iter_time=1.489e-04, forward_time=0.207, loss_ctc=48.229, loss_att=33.432, acc=0.779, loss=37.871, backward_time=0.238, grad_norm=27.236, clip=100.000, loss_scale=3.869e+25, optim_step_time=0.075, optim0_lr0=3.288e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-02-28 10:37:49,310 (trainer:762) INFO: 10epoch:train:4001-4500batch: iter_time=1.482e-04, forward_time=0.207, loss_ctc=51.069, loss_att=34.591, acc=0.771, loss=39.534, backward_time=0.242, grad_norm=29.088, clip=100.000, loss_scale=3.869e+25, optim_step_time=0.075, optim0_lr0=3.282e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-02-28 10:45:26,709 (trainer:762) INFO: 10epoch:train:4501-5000batch: iter_time=1.479e-04, forward_time=0.207, loss_ctc=52.465, loss_att=35.664, acc=0.769, loss=40.705, backward_time=0.241, grad_norm=28.895, clip=100.000, loss_scale=3.869e+25, optim_step_time=0.075, optim0_lr0=3.276e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-02-28 10:45:31,310 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-28 10:46:08,100 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-28 10:46:16,784 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-28 10:46:16,784 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-02-28 10:46:16,788 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-28 11:00:38,084 (trainer:762) INFO: 10epoch:train:5001-5500batch: iter_time=0.888, forward_time=0.207, loss_ctc=48.445, loss_att=33.302, acc=0.781, loss=37.845, backward_time=0.234, grad_norm=28.652, clip=100.000, loss_scale=7.737e+25, optim_step_time=0.075, optim0_lr0=3.270e-04, train_time=1.823 +[ip-10-0-216-33:0/16] 2024-02-28 11:08:14,551 (trainer:762) INFO: 10epoch:train:5501-6000batch: iter_time=1.442e-04, forward_time=0.209, loss_ctc=50.861, loss_att=35.048, acc=0.774, loss=39.792, backward_time=0.234, grad_norm=32.199, clip=100.000, loss_scale=7.737e+25, optim_step_time=0.075, optim0_lr0=3.265e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-02-28 11:15:50,123 (trainer:762) INFO: 10epoch:train:6001-6500batch: iter_time=1.472e-04, forward_time=0.209, loss_ctc=48.968, loss_att=33.647, acc=0.784, loss=38.243, backward_time=0.238, grad_norm=27.746, clip=100.000, loss_scale=7.737e+25, optim_step_time=0.075, optim0_lr0=3.259e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-02-28 11:23:22,067 (trainer:762) INFO: 10epoch:train:6501-7000batch: iter_time=1.424e-04, forward_time=0.209, loss_ctc=50.981, loss_att=34.670, acc=0.776, loss=39.563, backward_time=0.234, grad_norm=28.774, clip=100.000, loss_scale=7.737e+25, optim_step_time=0.075, optim0_lr0=3.253e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-02-28 11:31:00,973 (trainer:762) INFO: 10epoch:train:7001-7500batch: iter_time=1.428e-04, forward_time=0.207, loss_ctc=50.277, loss_att=34.486, acc=0.777, loss=39.223, backward_time=0.230, grad_norm=29.192, clip=100.000, loss_scale=1.547e+26, optim_step_time=0.075, optim0_lr0=3.247e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-02-28 11:38:42,792 (trainer:762) INFO: 10epoch:train:7501-8000batch: iter_time=1.454e-04, forward_time=0.208, loss_ctc=53.761, loss_att=36.904, acc=0.773, loss=41.961, backward_time=0.233, grad_norm=37.100, clip=100.000, loss_scale=1.547e+26, optim_step_time=0.075, optim0_lr0=3.242e-04, train_time=0.923 +[ip-10-0-216-33:0/16] 2024-02-28 11:46:26,225 (trainer:762) INFO: 10epoch:train:8001-8500batch: iter_time=1.487e-04, forward_time=0.208, loss_ctc=53.676, loss_att=36.525, acc=0.774, loss=41.670, backward_time=0.237, grad_norm=29.093, clip=100.000, loss_scale=1.547e+26, optim_step_time=0.075, optim0_lr0=3.236e-04, train_time=0.927 +[ip-10-0-216-33:0/16] 2024-02-28 11:54:00,672 (trainer:762) INFO: 10epoch:train:8501-9000batch: iter_time=1.494e-04, forward_time=0.207, loss_ctc=47.630, loss_att=32.958, acc=0.782, loss=37.360, backward_time=0.229, grad_norm=28.446, clip=100.000, loss_scale=1.547e+26, optim_step_time=0.075, optim0_lr0=3.230e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-28 12:01:39,022 (trainer:762) INFO: 10epoch:train:9001-9500batch: iter_time=1.443e-04, forward_time=0.208, loss_ctc=50.325, loss_att=34.043, acc=0.774, loss=38.928, backward_time=0.240, grad_norm=29.568, clip=100.000, loss_scale=3.095e+26, optim_step_time=0.075, optim0_lr0=3.225e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-02-28 12:09:13,083 (trainer:762) INFO: 10epoch:train:9501-10000batch: iter_time=1.469e-04, forward_time=0.207, loss_ctc=51.776, loss_att=35.214, acc=0.772, loss=40.183, backward_time=0.238, grad_norm=29.709, clip=100.000, loss_scale=3.095e+26, optim_step_time=0.075, optim0_lr0=3.219e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-28 12:09:17,856 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-28 12:09:54,848 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-28 12:10:03,315 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-28 12:10:03,316 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-02-28 12:10:03,320 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-28 12:24:24,833 (trainer:762) INFO: 10epoch:train:10001-10500batch: iter_time=0.883, forward_time=0.207, loss_ctc=47.727, loss_att=32.901, acc=0.784, loss=37.349, backward_time=0.230, grad_norm=27.715, clip=100.000, loss_scale=3.095e+26, optim_step_time=0.075, optim0_lr0=3.214e-04, train_time=1.823 +[ip-10-0-216-33:0/16] 2024-02-28 12:31:58,077 (trainer:762) INFO: 10epoch:train:10501-11000batch: iter_time=1.521e-04, forward_time=0.210, loss_ctc=50.143, loss_att=34.525, acc=0.777, loss=39.211, backward_time=0.233, grad_norm=31.115, clip=100.000, loss_scale=3.095e+26, optim_step_time=0.075, optim0_lr0=3.208e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-02-28 12:39:34,777 (trainer:762) INFO: 10epoch:train:11001-11500batch: iter_time=1.535e-04, forward_time=0.207, loss_ctc=48.356, loss_att=33.199, acc=0.787, loss=37.746, backward_time=0.234, grad_norm=28.197, clip=100.000, loss_scale=6.190e+26, optim_step_time=0.075, optim0_lr0=3.203e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-02-28 12:47:17,728 (trainer:762) INFO: 10epoch:train:11501-12000batch: iter_time=1.508e-04, forward_time=0.208, loss_ctc=50.405, loss_att=34.340, acc=0.778, loss=39.160, backward_time=0.237, grad_norm=28.449, clip=100.000, loss_scale=6.190e+26, optim_step_time=0.075, optim0_lr0=3.197e-04, train_time=0.926 +[ip-10-0-216-33:0/16] 2024-02-28 12:54:48,967 (trainer:762) INFO: 10epoch:train:12001-12500batch: iter_time=1.503e-04, forward_time=0.209, loss_ctc=49.608, loss_att=33.977, acc=0.780, loss=38.667, backward_time=0.234, grad_norm=37.166, clip=100.000, loss_scale=6.190e+26, optim_step_time=0.075, optim0_lr0=3.192e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-02-28 13:02:21,492 (trainer:762) INFO: 10epoch:train:12501-13000batch: iter_time=1.511e-04, forward_time=0.211, loss_ctc=52.907, loss_att=36.317, acc=0.776, loss=41.294, backward_time=0.234, grad_norm=30.522, clip=100.000, loss_scale=6.190e+26, optim_step_time=0.075, optim0_lr0=3.186e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-28 13:09:56,429 (trainer:762) INFO: 10epoch:train:13001-13500batch: iter_time=1.476e-04, forward_time=0.209, loss_ctc=52.953, loss_att=36.095, acc=0.776, loss=41.152, backward_time=0.235, grad_norm=29.083, clip=100.000, loss_scale=1.238e+27, optim_step_time=0.075, optim0_lr0=3.181e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-28 13:17:28,449 (trainer:762) INFO: 10epoch:train:13501-14000batch: iter_time=1.447e-04, forward_time=0.209, loss_ctc=47.021, loss_att=32.561, acc=0.785, loss=36.899, backward_time=0.233, grad_norm=27.403, clip=100.000, loss_scale=1.238e+27, optim_step_time=0.075, optim0_lr0=3.176e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-02-28 13:25:03,137 (trainer:762) INFO: 10epoch:train:14001-14500batch: iter_time=1.523e-04, forward_time=0.209, loss_ctc=49.487, loss_att=33.566, acc=0.778, loss=38.342, backward_time=0.240, grad_norm=27.951, clip=100.000, loss_scale=1.238e+27, optim_step_time=0.075, optim0_lr0=3.170e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-28 13:32:42,521 (trainer:762) INFO: 10epoch:train:14501-15000batch: iter_time=1.486e-04, forward_time=0.208, loss_ctc=51.052, loss_att=34.714, acc=0.775, loss=39.615, backward_time=0.233, grad_norm=28.879, clip=100.000, loss_scale=1.238e+27, optim_step_time=0.075, optim0_lr0=3.165e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-02-28 13:46:53,735 (trainer:361) INFO: 10epoch results: [train] iter_time=0.087, forward_time=0.208, loss_ctc=50.723, loss_att=34.710, acc=0.776, loss=39.514, backward_time=0.235, grad_norm=29.553, clip=100.000, loss_scale=3.282e+26, optim_step_time=0.075, optim0_lr0=3.246e-04, train_time=1.001, time=4 hours, 10 minutes and 39.37 seconds, total_count=150000, gpu_max_cached_mem_GB=36.488, [valid] loss_ctc=53.295, cer_ctc=0.294, loss_att=38.250, acc=0.679, cer=0.349, wer=1.000, loss=42.764, time=13 minutes and 56.01 seconds, total_count=21330, gpu_max_cached_mem_GB=36.488 +[ip-10-0-216-33:0/16] 2024-02-28 13:47:03,608 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-02-28 13:47:03,631 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/5epoch.pth +[ip-10-0-216-33:0/16] 2024-02-28 13:47:03,631 (trainer:290) INFO: 11/45epoch started. Estimated time to finish: 6 days, 10 hours and 26 minutes +[ip-10-0-216-33:0/16] 2024-02-28 13:47:03,640 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-28 13:47:40,150 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-28 13:47:48,634 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-28 13:47:48,634 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-02-28 13:47:48,639 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-28 14:01:52,564 (trainer:762) INFO: 11epoch:train:1-500batch: iter_time=0.820, forward_time=0.209, loss_ctc=51.023, loss_att=34.981, acc=0.784, loss=39.793, backward_time=0.231, grad_norm=29.352, clip=100.000, loss_scale=2.476e+27, optim_step_time=0.075, optim0_lr0=3.160e-04, train_time=1.778 +[ip-10-0-216-33:0/16] 2024-02-28 14:09:29,447 (trainer:762) INFO: 11epoch:train:501-1000batch: iter_time=1.487e-04, forward_time=0.209, loss_ctc=50.240, loss_att=34.482, acc=0.783, loss=39.209, backward_time=0.236, grad_norm=28.985, clip=100.000, loss_scale=2.476e+27, optim_step_time=0.075, optim0_lr0=3.154e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-02-28 14:17:07,122 (trainer:762) INFO: 11epoch:train:1001-1500batch: iter_time=1.486e-04, forward_time=0.209, loss_ctc=50.301, loss_att=34.292, acc=0.772, loss=39.095, backward_time=0.235, grad_norm=29.901, clip=100.000, loss_scale=2.476e+27, optim_step_time=0.075, optim0_lr0=3.149e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-02-28 14:24:48,402 (trainer:762) INFO: 11epoch:train:1501-2000batch: iter_time=1.448e-04, forward_time=0.207, loss_ctc=49.760, loss_att=34.030, acc=0.789, loss=38.749, backward_time=0.235, grad_norm=27.658, clip=100.000, loss_scale=2.476e+27, optim_step_time=0.075, optim0_lr0=3.144e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-02-28 14:32:26,378 (trainer:762) INFO: 11epoch:train:2001-2500batch: iter_time=1.449e-04, forward_time=0.209, loss_ctc=51.963, loss_att=35.699, acc=0.781, loss=40.578, backward_time=0.233, grad_norm=31.300, clip=100.000, loss_scale=4.952e+27, optim_step_time=0.076, optim0_lr0=3.139e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-02-28 14:40:01,686 (trainer:762) INFO: 11epoch:train:2501-3000batch: iter_time=1.490e-04, forward_time=0.209, loss_ctc=50.326, loss_att=34.357, acc=0.779, loss=39.148, backward_time=0.230, grad_norm=29.879, clip=100.000, loss_scale=4.952e+27, optim_step_time=0.075, optim0_lr0=3.134e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-28 14:47:33,375 (trainer:762) INFO: 11epoch:train:3001-3500batch: iter_time=1.466e-04, forward_time=0.209, loss_ctc=51.458, loss_att=35.234, acc=0.778, loss=40.101, backward_time=0.233, grad_norm=101.998, clip=100.000, loss_scale=4.952e+27, optim_step_time=0.076, optim0_lr0=3.129e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-28 14:55:07,832 (trainer:762) INFO: 11epoch:train:3501-4000batch: iter_time=1.456e-04, forward_time=0.211, loss_ctc=51.695, loss_att=34.959, acc=0.796, loss=39.980, backward_time=0.232, grad_norm=28.639, clip=100.000, loss_scale=4.952e+27, optim_step_time=0.075, optim0_lr0=3.123e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-28 15:02:47,857 (trainer:762) INFO: 11epoch:train:4001-4500batch: iter_time=1.449e-04, forward_time=0.209, loss_ctc=52.412, loss_att=35.434, acc=0.787, loss=40.527, backward_time=0.234, grad_norm=28.813, clip=100.000, loss_scale=9.904e+27, optim_step_time=0.075, optim0_lr0=3.118e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-02-28 15:10:17,314 (trainer:762) INFO: 11epoch:train:4501-5000batch: iter_time=1.437e-04, forward_time=0.207, loss_ctc=48.465, loss_att=33.363, acc=0.784, loss=37.893, backward_time=0.228, grad_norm=27.865, clip=100.000, loss_scale=9.904e+27, optim_step_time=0.075, optim0_lr0=3.113e-04, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-02-28 15:10:22,314 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-28 15:10:59,328 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-28 15:11:07,327 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-28 15:11:07,328 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-02-28 15:11:07,332 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-28 15:25:21,356 (trainer:762) INFO: 11epoch:train:5001-5500batch: iter_time=0.869, forward_time=0.208, loss_ctc=49.561, loss_att=34.006, acc=0.788, loss=38.672, backward_time=0.226, grad_norm=31.965, clip=100.000, loss_scale=9.904e+27, optim_step_time=0.075, optim0_lr0=3.108e-04, train_time=1.808 +[ip-10-0-216-33:0/16] 2024-02-28 15:33:03,460 (trainer:762) INFO: 11epoch:train:5501-6000batch: iter_time=1.381e-04, forward_time=0.206, loss_ctc=49.070, loss_att=33.789, acc=0.786, loss=38.373, backward_time=0.232, grad_norm=28.946, clip=100.000, loss_scale=9.904e+27, optim_step_time=0.075, optim0_lr0=3.103e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-02-28 15:40:43,446 (trainer:762) INFO: 11epoch:train:6001-6500batch: iter_time=1.421e-04, forward_time=0.206, loss_ctc=49.250, loss_att=33.772, acc=0.775, loss=38.416, backward_time=0.232, grad_norm=29.553, clip=100.000, loss_scale=1.981e+28, optim_step_time=0.075, optim0_lr0=3.098e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-02-28 15:48:19,427 (trainer:762) INFO: 11epoch:train:6501-7000batch: iter_time=1.446e-04, forward_time=0.209, loss_ctc=49.307, loss_att=33.598, acc=0.792, loss=38.310, backward_time=0.232, grad_norm=27.584, clip=100.000, loss_scale=1.981e+28, optim_step_time=0.075, optim0_lr0=3.093e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-28 15:55:54,273 (trainer:762) INFO: 11epoch:train:7001-7500batch: iter_time=1.427e-04, forward_time=0.208, loss_ctc=50.966, loss_att=34.997, acc=0.784, loss=39.787, backward_time=0.233, grad_norm=30.642, clip=100.000, loss_scale=1.981e+28, optim_step_time=0.075, optim0_lr0=3.089e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-28 16:03:26,812 (trainer:762) INFO: 11epoch:train:7501-8000batch: iter_time=1.428e-04, forward_time=0.209, loss_ctc=49.737, loss_att=33.968, acc=0.781, loss=38.699, backward_time=0.235, grad_norm=38.244, clip=100.000, loss_scale=1.981e+28, optim_step_time=0.075, optim0_lr0=3.084e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-28 16:11:07,217 (trainer:762) INFO: 11epoch:train:8001-8500batch: iter_time=1.412e-04, forward_time=0.207, loss_ctc=50.499, loss_att=34.483, acc=0.781, loss=39.288, backward_time=0.242, grad_norm=29.388, clip=100.000, loss_scale=3.961e+28, optim_step_time=0.075, optim0_lr0=3.079e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-02-28 16:18:51,021 (trainer:762) INFO: 11epoch:train:8501-9000batch: iter_time=1.420e-04, forward_time=0.209, loss_ctc=50.719, loss_att=34.463, acc=0.797, loss=39.340, backward_time=0.234, grad_norm=27.851, clip=100.000, loss_scale=3.961e+28, optim_step_time=0.075, optim0_lr0=3.074e-04, train_time=0.927 +[ip-10-0-216-33:0/16] 2024-02-28 16:26:26,275 (trainer:762) INFO: 11epoch:train:9001-9500batch: iter_time=1.408e-04, forward_time=0.209, loss_ctc=51.516, loss_att=34.877, acc=0.790, loss=39.869, backward_time=0.233, grad_norm=28.977, clip=100.000, loss_scale=3.961e+28, optim_step_time=0.075, optim0_lr0=3.069e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-28 16:34:02,139 (trainer:762) INFO: 11epoch:train:9501-10000batch: iter_time=1.458e-04, forward_time=0.208, loss_ctc=47.564, loss_att=32.790, acc=0.787, loss=37.222, backward_time=0.234, grad_norm=27.489, clip=100.000, loss_scale=3.961e+28, optim_step_time=0.075, optim0_lr0=3.064e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-02-28 16:34:06,862 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-28 16:34:44,344 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-28 16:34:52,348 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-28 16:34:52,348 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-02-28 16:34:52,352 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-28 16:49:23,483 (trainer:762) INFO: 11epoch:train:10001-10500batch: iter_time=0.880, forward_time=0.209, loss_ctc=48.987, loss_att=33.680, acc=0.790, loss=38.272, backward_time=0.237, grad_norm=29.332, clip=100.000, loss_scale=7.923e+28, optim_step_time=0.075, optim0_lr0=3.059e-04, train_time=1.842 +[ip-10-0-216-33:0/16] 2024-02-28 16:57:01,861 (trainer:762) INFO: 11epoch:train:10501-11000batch: iter_time=1.447e-04, forward_time=0.209, loss_ctc=48.610, loss_att=33.447, acc=0.788, loss=37.996, backward_time=0.231, grad_norm=29.548, clip=100.000, loss_scale=7.923e+28, optim_step_time=0.075, optim0_lr0=3.055e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-02-28 17:04:37,244 (trainer:762) INFO: 11epoch:train:11001-11500batch: iter_time=1.487e-04, forward_time=0.208, loss_ctc=48.705, loss_att=33.422, acc=0.776, loss=38.007, backward_time=0.233, grad_norm=30.429, clip=100.000, loss_scale=7.923e+28, optim_step_time=0.075, optim0_lr0=3.050e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-02-28 17:12:13,348 (trainer:762) INFO: 11epoch:train:11501-12000batch: iter_time=1.494e-04, forward_time=0.210, loss_ctc=48.848, loss_att=33.359, acc=0.793, loss=38.006, backward_time=0.235, grad_norm=28.340, clip=100.000, loss_scale=7.923e+28, optim_step_time=0.075, optim0_lr0=3.045e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-28 17:19:53,845 (trainer:762) INFO: 11epoch:train:12001-12500batch: iter_time=1.500e-04, forward_time=0.207, loss_ctc=50.119, loss_att=34.571, acc=0.787, loss=39.236, backward_time=0.235, grad_norm=30.465, clip=100.000, loss_scale=1.585e+29, optim_step_time=0.075, optim0_lr0=3.041e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-02-28 17:27:26,757 (trainer:762) INFO: 11epoch:train:12501-13000batch: iter_time=1.478e-04, forward_time=0.209, loss_ctc=49.308, loss_att=33.609, acc=0.783, loss=38.319, backward_time=0.229, grad_norm=29.202, clip=100.000, loss_scale=1.585e+29, optim_step_time=0.075, optim0_lr0=3.036e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-02-28 17:35:01,184 (trainer:762) INFO: 11epoch:train:13001-13500batch: iter_time=1.505e-04, forward_time=0.210, loss_ctc=49.874, loss_att=34.156, acc=0.783, loss=38.871, backward_time=0.233, grad_norm=30.569, clip=100.000, loss_scale=1.585e+29, optim_step_time=0.075, optim0_lr0=3.031e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-28 17:42:36,532 (trainer:762) INFO: 11epoch:train:13501-14000batch: iter_time=1.492e-04, forward_time=0.210, loss_ctc=50.247, loss_att=34.143, acc=0.800, loss=38.974, backward_time=0.233, grad_norm=27.997, clip=100.000, loss_scale=1.585e+29, optim_step_time=0.075, optim0_lr0=3.027e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-28 17:50:06,316 (trainer:762) INFO: 11epoch:train:14001-14500batch: iter_time=1.489e-04, forward_time=0.211, loss_ctc=50.867, loss_att=34.511, acc=0.792, loss=39.418, backward_time=0.236, grad_norm=28.992, clip=100.000, loss_scale=3.169e+29, optim_step_time=0.075, optim0_lr0=3.022e-04, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-02-28 17:57:40,171 (trainer:762) INFO: 11epoch:train:14501-15000batch: iter_time=1.478e-04, forward_time=0.209, loss_ctc=47.004, loss_att=32.523, acc=0.788, loss=36.867, backward_time=0.230, grad_norm=28.879, clip=100.000, loss_scale=3.169e+29, optim_step_time=0.075, optim0_lr0=3.017e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-02-28 18:11:56,580 (trainer:361) INFO: 11epoch results: [train] iter_time=0.086, forward_time=0.209, loss_ctc=49.947, loss_att=34.166, acc=0.786, loss=38.900, backward_time=0.233, grad_norm=31.959, clip=100.000, loss_scale=6.305e+28, optim_step_time=0.075, optim0_lr0=3.087e-04, train_time=1.002, time=4 hours, 10 minutes and 53.73 seconds, total_count=165000, gpu_max_cached_mem_GB=36.488, [valid] loss_ctc=50.615, cer_ctc=0.282, loss_att=38.464, acc=0.680, cer=0.302, wer=1.000, loss=42.110, time=13 minutes and 58.9 seconds, total_count=23463, gpu_max_cached_mem_GB=36.488 +[ip-10-0-216-33:0/16] 2024-02-28 18:12:05,725 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-02-28 18:12:05,744 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/6epoch.pth +[ip-10-0-216-33:0/16] 2024-02-28 18:12:05,745 (trainer:290) INFO: 12/45epoch started. Estimated time to finish: 6 days, 6 hours and 2 minutes +[ip-10-0-216-33:0/16] 2024-02-28 18:12:05,753 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-28 18:12:41,862 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-28 18:12:50,305 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-28 18:12:50,305 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-02-28 18:12:50,310 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-28 18:26:50,167 (trainer:762) INFO: 12epoch:train:1-500batch: iter_time=0.835, forward_time=0.210, loss_ctc=47.246, loss_att=32.445, acc=0.792, loss=36.885, backward_time=0.229, grad_norm=29.129, clip=100.000, loss_scale=3.169e+29, optim_step_time=0.076, optim0_lr0=3.013e-04, train_time=1.769 +[ip-10-0-216-33:0/16] 2024-02-28 18:34:28,848 (trainer:762) INFO: 12epoch:train:501-1000batch: iter_time=1.467e-04, forward_time=0.208, loss_ctc=50.504, loss_att=34.956, acc=0.793, loss=39.620, backward_time=0.232, grad_norm=30.207, clip=100.000, loss_scale=3.169e+29, optim_step_time=0.075, optim0_lr0=3.008e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-02-28 18:42:08,251 (trainer:762) INFO: 12epoch:train:1001-1500batch: iter_time=1.504e-04, forward_time=0.209, loss_ctc=50.480, loss_att=34.388, acc=0.790, loss=39.215, backward_time=0.235, grad_norm=31.515, clip=100.000, loss_scale=6.338e+29, optim_step_time=0.075, optim0_lr0=3.004e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-02-28 18:49:44,337 (trainer:762) INFO: 12epoch:train:1501-2000batch: iter_time=1.473e-04, forward_time=0.209, loss_ctc=48.986, loss_att=33.253, acc=0.790, loss=37.973, backward_time=0.237, grad_norm=29.009, clip=100.000, loss_scale=6.338e+29, optim_step_time=0.075, optim0_lr0=2.999e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-28 18:57:22,359 (trainer:762) INFO: 12epoch:train:2001-2500batch: iter_time=1.460e-04, forward_time=0.207, loss_ctc=47.194, loss_att=33.091, acc=0.791, loss=37.322, backward_time=0.233, grad_norm=29.337, clip=100.000, loss_scale=6.338e+29, optim_step_time=0.075, optim0_lr0=2.995e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-02-28 19:04:57,656 (trainer:762) INFO: 12epoch:train:2501-3000batch: iter_time=1.460e-04, forward_time=0.209, loss_ctc=49.786, loss_att=33.638, acc=0.785, loss=38.482, backward_time=0.235, grad_norm=29.645, clip=100.000, loss_scale=6.338e+29, optim_step_time=0.075, optim0_lr0=2.990e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-28 19:12:32,824 (trainer:762) INFO: 12epoch:train:3001-3500batch: iter_time=1.464e-04, forward_time=0.207, loss_ctc=48.230, loss_att=32.904, acc=0.782, loss=37.502, backward_time=0.232, grad_norm=30.739, clip=100.000, loss_scale=1.268e+30, optim_step_time=0.075, optim0_lr0=2.986e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-28 19:20:05,623 (trainer:762) INFO: 12epoch:train:3501-4000batch: iter_time=1.527e-04, forward_time=0.208, loss_ctc=49.262, loss_att=33.455, acc=0.779, loss=38.197, backward_time=0.238, grad_norm=44.256, clip=100.000, loss_scale=1.268e+30, optim_step_time=0.075, optim0_lr0=2.981e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-28 19:27:37,863 (trainer:762) INFO: 12epoch:train:4001-4500batch: iter_time=1.518e-04, forward_time=0.210, loss_ctc=50.647, loss_att=34.064, acc=0.791, loss=39.039, backward_time=0.231, grad_norm=29.950, clip=100.000, loss_scale=1.268e+30, optim_step_time=0.075, optim0_lr0=2.977e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-02-28 19:35:10,926 (trainer:762) INFO: 12epoch:train:4501-5000batch: iter_time=1.485e-04, forward_time=0.208, loss_ctc=47.005, loss_att=32.194, acc=0.785, loss=36.638, backward_time=0.235, grad_norm=28.931, clip=100.000, loss_scale=1.268e+30, optim_step_time=0.075, optim0_lr0=2.973e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-02-28 19:35:15,558 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-28 19:35:52,989 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-28 19:36:01,914 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-28 19:36:01,914 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-02-28 19:36:01,919 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-28 19:50:23,013 (trainer:762) INFO: 12epoch:train:5001-5500batch: iter_time=0.874, forward_time=0.208, loss_ctc=46.579, loss_att=31.918, acc=0.795, loss=36.316, backward_time=0.243, grad_norm=30.743, clip=100.000, loss_scale=2.535e+30, optim_step_time=0.075, optim0_lr0=2.968e-04, train_time=1.824 +[ip-10-0-216-33:0/16] 2024-02-28 19:58:03,683 (trainer:762) INFO: 12epoch:train:5501-6000batch: iter_time=1.478e-04, forward_time=0.209, loss_ctc=49.734, loss_att=34.408, acc=0.796, loss=39.006, backward_time=0.239, grad_norm=29.235, clip=100.000, loss_scale=2.535e+30, optim_step_time=0.075, optim0_lr0=2.964e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-02-28 20:05:40,374 (trainer:762) INFO: 12epoch:train:6001-6500batch: iter_time=1.480e-04, forward_time=0.209, loss_ctc=49.761, loss_att=33.965, acc=0.792, loss=38.704, backward_time=0.233, grad_norm=30.579, clip=100.000, loss_scale=2.535e+30, optim_step_time=0.075, optim0_lr0=2.960e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-02-28 20:13:19,967 (trainer:762) INFO: 12epoch:train:6501-7000batch: iter_time=1.422e-04, forward_time=0.208, loss_ctc=48.641, loss_att=33.008, acc=0.792, loss=37.698, backward_time=0.235, grad_norm=28.963, clip=100.000, loss_scale=2.535e+30, optim_step_time=0.075, optim0_lr0=2.955e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-02-28 20:20:56,847 (trainer:762) INFO: 12epoch:train:7001-7500batch: iter_time=1.471e-04, forward_time=0.208, loss_ctc=46.871, loss_att=32.937, acc=0.792, loss=37.117, backward_time=0.238, grad_norm=39.204, clip=100.000, loss_scale=5.071e+30, optim_step_time=0.075, optim0_lr0=2.951e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-02-28 20:28:34,722 (trainer:762) INFO: 12epoch:train:7501-8000batch: iter_time=1.470e-04, forward_time=0.206, loss_ctc=49.382, loss_att=33.338, acc=0.786, loss=38.151, backward_time=0.236, grad_norm=30.060, clip=100.000, loss_scale=5.071e+30, optim_step_time=0.075, optim0_lr0=2.947e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-02-28 20:36:09,645 (trainer:762) INFO: 12epoch:train:8001-8500batch: iter_time=1.430e-04, forward_time=0.208, loss_ctc=47.602, loss_att=32.568, acc=0.784, loss=37.079, backward_time=0.238, grad_norm=29.811, clip=100.000, loss_scale=5.071e+30, optim_step_time=0.075, optim0_lr0=2.942e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-28 20:43:39,058 (trainer:762) INFO: 12epoch:train:8501-9000batch: iter_time=1.444e-04, forward_time=0.209, loss_ctc=48.398, loss_att=32.909, acc=0.782, loss=37.556, backward_time=0.231, grad_norm=29.612, clip=100.000, loss_scale=5.071e+30, optim_step_time=0.075, optim0_lr0=2.938e-04, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-02-28 20:51:13,915 (trainer:762) INFO: 12epoch:train:9001-9500batch: iter_time=1.437e-04, forward_time=0.209, loss_ctc=49.953, loss_att=33.638, acc=0.794, loss=38.532, backward_time=0.233, grad_norm=29.698, clip=100.000, loss_scale=1.014e+31, optim_step_time=0.075, optim0_lr0=2.934e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-28 20:58:51,375 (trainer:762) INFO: 12epoch:train:9501-10000batch: iter_time=1.428e-04, forward_time=0.206, loss_ctc=46.660, loss_att=31.731, acc=0.787, loss=36.210, backward_time=0.231, grad_norm=28.998, clip=100.000, loss_scale=1.014e+31, optim_step_time=0.075, optim0_lr0=2.930e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-02-28 20:58:56,587 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-28 20:59:33,501 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-28 20:59:42,294 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-28 20:59:42,294 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-02-28 20:59:42,299 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-28 21:14:04,831 (trainer:762) INFO: 12epoch:train:10001-10500batch: iter_time=0.890, forward_time=0.208, loss_ctc=46.086, loss_att=31.543, acc=0.797, loss=35.906, backward_time=0.235, grad_norm=28.965, clip=100.000, loss_scale=1.014e+31, optim_step_time=0.075, optim0_lr0=2.926e-04, train_time=1.827 +[ip-10-0-216-33:0/16] 2024-02-28 21:21:37,044 (trainer:762) INFO: 12epoch:train:10501-11000batch: iter_time=1.453e-04, forward_time=0.210, loss_ctc=48.939, loss_att=33.764, acc=0.798, loss=38.317, backward_time=0.236, grad_norm=39.947, clip=100.000, loss_scale=1.014e+31, optim_step_time=0.075, optim0_lr0=2.921e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-02-28 21:29:08,341 (trainer:762) INFO: 12epoch:train:11001-11500batch: iter_time=1.553e-04, forward_time=0.209, loss_ctc=48.674, loss_att=33.321, acc=0.795, loss=37.927, backward_time=0.234, grad_norm=29.252, clip=100.000, loss_scale=2.028e+31, optim_step_time=0.075, optim0_lr0=2.917e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-02-28 21:36:42,209 (trainer:762) INFO: 12epoch:train:11501-12000batch: iter_time=1.518e-04, forward_time=0.211, loss_ctc=47.974, loss_att=32.523, acc=0.794, loss=37.158, backward_time=0.236, grad_norm=28.828, clip=100.000, loss_scale=2.028e+31, optim_step_time=0.075, optim0_lr0=2.913e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-02-28 21:44:13,693 (trainer:762) INFO: 12epoch:train:12001-12500batch: iter_time=1.464e-04, forward_time=0.209, loss_ctc=46.404, loss_att=32.637, acc=0.795, loss=36.767, backward_time=0.230, grad_norm=30.410, clip=100.000, loss_scale=2.028e+31, optim_step_time=0.075, optim0_lr0=2.909e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-28 21:51:46,450 (trainer:762) INFO: 12epoch:train:12501-13000batch: iter_time=1.515e-04, forward_time=0.210, loss_ctc=48.466, loss_att=32.848, acc=0.789, loss=37.533, backward_time=0.235, grad_norm=29.161, clip=100.000, loss_scale=2.028e+31, optim_step_time=0.075, optim0_lr0=2.905e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-28 21:59:18,428 (trainer:762) INFO: 12epoch:train:13001-13500batch: iter_time=1.466e-04, forward_time=0.208, loss_ctc=46.989, loss_att=32.240, acc=0.787, loss=36.665, backward_time=0.235, grad_norm=30.940, clip=100.000, loss_scale=4.056e+31, optim_step_time=0.075, optim0_lr0=2.901e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-02-28 22:06:52,815 (trainer:762) INFO: 12epoch:train:13501-14000batch: iter_time=1.515e-04, forward_time=0.210, loss_ctc=48.058, loss_att=32.719, acc=0.783, loss=37.320, backward_time=0.238, grad_norm=30.501, clip=100.000, loss_scale=4.056e+31, optim_step_time=0.075, optim0_lr0=2.897e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-28 22:14:27,805 (trainer:762) INFO: 12epoch:train:14001-14500batch: iter_time=1.494e-04, forward_time=0.209, loss_ctc=49.487, loss_att=33.286, acc=0.795, loss=38.146, backward_time=0.236, grad_norm=29.518, clip=100.000, loss_scale=4.056e+31, optim_step_time=0.075, optim0_lr0=2.893e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-28 22:22:00,945 (trainer:762) INFO: 12epoch:train:14501-15000batch: iter_time=1.477e-04, forward_time=0.210, loss_ctc=46.186, loss_att=31.526, acc=0.789, loss=35.924, backward_time=0.239, grad_norm=29.768, clip=100.000, loss_scale=4.056e+31, optim_step_time=0.075, optim0_lr0=2.889e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-02-28 22:36:13,917 (trainer:361) INFO: 12epoch results: [train] iter_time=0.087, forward_time=0.209, loss_ctc=48.339, loss_att=33.040, acc=0.790, loss=37.630, backward_time=0.235, grad_norm=30.897, clip=100.000, loss_scale=1.075e+31, optim_step_time=0.075, optim0_lr0=2.950e-04, train_time=0.999, time=4 hours, 10 minutes and 11.47 seconds, total_count=180000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=50.169, cer_ctc=0.275, loss_att=36.519, acc=0.690, cer=0.348, wer=1.000, loss=40.614, time=13 minutes and 56.28 seconds, total_count=25596, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-02-28 22:36:23,367 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-02-28 22:36:23,385 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/7epoch.pth +[ip-10-0-216-33:0/16] 2024-02-28 22:36:23,386 (trainer:290) INFO: 13/45epoch started. Estimated time to finish: 6 days, 1 hour and 36 minutes +[ip-10-0-216-33:0/16] 2024-02-28 22:36:23,393 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-28 22:36:59,528 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-28 22:37:07,590 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-28 22:37:07,590 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-02-28 22:37:07,595 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-28 22:51:08,150 (trainer:762) INFO: 13epoch:train:1-500batch: iter_time=0.831, forward_time=0.209, loss_ctc=46.619, loss_att=32.154, acc=0.789, loss=36.494, backward_time=0.231, grad_norm=31.718, clip=100.000, loss_scale=8.113e+31, optim_step_time=0.075, optim0_lr0=2.885e-04, train_time=1.769 +[ip-10-0-216-33:0/16] 2024-02-28 22:58:45,791 (trainer:762) INFO: 13epoch:train:501-1000batch: iter_time=1.499e-04, forward_time=0.207, loss_ctc=47.888, loss_att=32.394, acc=0.786, loss=37.042, backward_time=0.236, grad_norm=30.907, clip=100.000, loss_scale=8.113e+31, optim_step_time=0.075, optim0_lr0=2.881e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-02-28 23:06:21,725 (trainer:762) INFO: 13epoch:train:1001-1500batch: iter_time=1.534e-04, forward_time=0.209, loss_ctc=49.048, loss_att=33.552, acc=0.788, loss=38.201, backward_time=0.234, grad_norm=31.765, clip=100.000, loss_scale=8.113e+31, optim_step_time=0.075, optim0_lr0=2.877e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-28 23:13:54,009 (trainer:762) INFO: 13epoch:train:1501-2000batch: iter_time=1.534e-04, forward_time=0.210, loss_ctc=49.792, loss_att=33.618, acc=0.791, loss=38.470, backward_time=0.235, grad_norm=32.460, clip=100.000, loss_scale=8.113e+31, optim_step_time=0.075, optim0_lr0=2.873e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-02-28 23:21:28,141 (trainer:762) INFO: 13epoch:train:2001-2500batch: iter_time=1.514e-04, forward_time=0.208, loss_ctc=49.856, loss_att=34.040, acc=0.787, loss=38.785, backward_time=0.230, grad_norm=30.904, clip=100.000, loss_scale=1.623e+32, optim_step_time=0.075, optim0_lr0=2.869e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-28 23:29:02,590 (trainer:762) INFO: 13epoch:train:2501-3000batch: iter_time=1.504e-04, forward_time=0.209, loss_ctc=47.701, loss_att=32.480, acc=0.798, loss=37.046, backward_time=0.234, grad_norm=30.379, clip=100.000, loss_scale=1.623e+32, optim_step_time=0.075, optim0_lr0=2.865e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-28 23:36:36,996 (trainer:762) INFO: 13epoch:train:3001-3500batch: iter_time=1.509e-04, forward_time=0.210, loss_ctc=48.650, loss_att=33.054, acc=0.789, loss=37.733, backward_time=0.239, grad_norm=30.431, clip=100.000, loss_scale=1.623e+32, optim_step_time=0.075, optim0_lr0=2.861e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-28 23:44:09,541 (trainer:762) INFO: 13epoch:train:3501-4000batch: iter_time=1.485e-04, forward_time=0.209, loss_ctc=48.180, loss_att=33.595, acc=0.791, loss=37.970, backward_time=0.232, grad_norm=30.940, clip=100.000, loss_scale=1.623e+32, optim_step_time=0.075, optim0_lr0=2.857e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-28 23:51:43,255 (trainer:762) INFO: 13epoch:train:4001-4500batch: iter_time=1.484e-04, forward_time=0.210, loss_ctc=48.972, loss_att=33.520, acc=0.795, loss=38.155, backward_time=0.234, grad_norm=30.186, clip=100.000, loss_scale=3.245e+32, optim_step_time=0.075, optim0_lr0=2.853e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-02-28 23:59:17,445 (trainer:762) INFO: 13epoch:train:4501-5000batch: iter_time=1.479e-04, forward_time=0.209, loss_ctc=47.018, loss_att=32.249, acc=0.803, loss=36.680, backward_time=0.237, grad_norm=27.930, clip=100.000, loss_scale=3.245e+32, optim_step_time=0.075, optim0_lr0=2.849e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-28 23:59:22,570 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-28 23:59:59,351 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-29 00:00:08,112 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-29 00:00:08,112 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-02-29 00:00:08,116 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-29 00:14:32,389 (trainer:762) INFO: 13epoch:train:5001-5500batch: iter_time=0.868, forward_time=0.209, loss_ctc=46.190, loss_att=31.729, acc=0.791, loss=36.067, backward_time=0.232, grad_norm=30.632, clip=100.000, loss_scale=3.245e+32, optim_step_time=0.075, optim0_lr0=2.846e-04, train_time=1.830 +[ip-10-0-216-33:0/16] 2024-02-29 00:22:03,956 (trainer:762) INFO: 13epoch:train:5501-6000batch: iter_time=1.446e-04, forward_time=0.208, loss_ctc=47.189, loss_att=31.970, acc=0.788, loss=36.536, backward_time=0.234, grad_norm=29.986, clip=100.000, loss_scale=3.245e+32, optim_step_time=0.075, optim0_lr0=2.842e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-29 00:29:34,663 (trainer:762) INFO: 13epoch:train:6001-6500batch: iter_time=1.427e-04, forward_time=0.210, loss_ctc=47.840, loss_att=32.885, acc=0.792, loss=37.372, backward_time=0.232, grad_norm=30.200, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=2.838e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-02-29 00:37:08,877 (trainer:762) INFO: 13epoch:train:6501-7000batch: iter_time=1.411e-04, forward_time=0.209, loss_ctc=48.897, loss_att=33.104, acc=0.794, loss=37.842, backward_time=0.230, grad_norm=30.965, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=2.834e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-29 00:44:39,577 (trainer:762) INFO: 13epoch:train:7001-7500batch: iter_time=1.401e-04, forward_time=0.210, loss_ctc=49.134, loss_att=33.479, acc=0.790, loss=38.176, backward_time=0.237, grad_norm=29.618, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=2.830e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-02-29 00:52:17,365 (trainer:762) INFO: 13epoch:train:7501-8000batch: iter_time=1.444e-04, forward_time=0.208, loss_ctc=46.855, loss_att=32.039, acc=0.801, loss=36.484, backward_time=0.234, grad_norm=28.698, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=2.827e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-02-29 00:59:49,880 (trainer:762) INFO: 13epoch:train:8001-8500batch: iter_time=1.412e-04, forward_time=0.210, loss_ctc=47.998, loss_att=32.705, acc=0.791, loss=37.293, backward_time=0.233, grad_norm=30.273, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=2.823e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-29 01:07:24,720 (trainer:762) INFO: 13epoch:train:8501-9000batch: iter_time=1.402e-04, forward_time=0.208, loss_ctc=47.542, loss_att=33.050, acc=0.794, loss=37.398, backward_time=0.236, grad_norm=30.590, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=2.819e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-29 01:15:01,020 (trainer:762) INFO: 13epoch:train:9001-9500batch: iter_time=1.425e-04, forward_time=0.209, loss_ctc=48.364, loss_att=33.032, acc=0.797, loss=37.632, backward_time=0.231, grad_norm=29.406, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=2.815e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-29 01:22:36,449 (trainer:762) INFO: 13epoch:train:9501-10000batch: iter_time=1.401e-04, forward_time=0.207, loss_ctc=46.435, loss_att=31.867, acc=0.805, loss=36.237, backward_time=0.235, grad_norm=28.688, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=2.812e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-02-29 01:22:41,486 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-29 01:23:19,908 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-29 01:23:29,241 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-29 01:23:29,242 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-02-29 01:23:29,246 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-29 01:37:43,649 (trainer:762) INFO: 13epoch:train:10001-10500batch: iter_time=0.884, forward_time=0.209, loss_ctc=45.527, loss_att=31.304, acc=0.793, loss=35.571, backward_time=0.228, grad_norm=30.135, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.808e-04, train_time=1.814 +[ip-10-0-216-33:0/16] 2024-02-29 01:45:17,701 (trainer:762) INFO: 13epoch:train:10501-11000batch: iter_time=1.442e-04, forward_time=0.209, loss_ctc=46.644, loss_att=31.692, acc=0.790, loss=36.177, backward_time=0.232, grad_norm=29.379, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.804e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-29 01:52:46,802 (trainer:762) INFO: 13epoch:train:11001-11500batch: iter_time=1.440e-04, forward_time=0.209, loss_ctc=47.699, loss_att=32.645, acc=0.794, loss=37.161, backward_time=0.237, grad_norm=32.783, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.801e-04, train_time=0.898 +[ip-10-0-216-33:0/16] 2024-02-29 02:00:24,914 (trainer:762) INFO: 13epoch:train:11501-12000batch: iter_time=1.471e-04, forward_time=0.209, loss_ctc=48.470, loss_att=32.888, acc=0.795, loss=37.563, backward_time=0.240, grad_norm=31.877, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.797e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-02-29 02:08:02,104 (trainer:762) INFO: 13epoch:train:12001-12500batch: iter_time=1.489e-04, forward_time=0.206, loss_ctc=48.605, loss_att=33.304, acc=0.791, loss=37.894, backward_time=0.230, grad_norm=30.608, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.793e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-02-29 02:15:46,780 (trainer:762) INFO: 13epoch:train:12501-13000batch: iter_time=1.490e-04, forward_time=0.206, loss_ctc=46.666, loss_att=31.917, acc=0.802, loss=36.341, backward_time=0.228, grad_norm=29.809, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.790e-04, train_time=0.929 +[ip-10-0-216-33:0/16] 2024-02-29 02:17:14,968 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 02:23:30,807 (trainer:762) INFO: 13epoch:train:13001-13500batch: iter_time=1.493e-04, forward_time=0.206, loss_ctc=47.620, loss_att=32.406, acc=0.793, loss=36.970, backward_time=0.232, grad_norm=30.534, clip=100.000, loss_scale=3.080e+33, optim_step_time=0.075, optim0_lr0=2.786e-04, train_time=0.928 +[ip-10-0-216-33:0/16] 2024-02-29 02:31:14,451 (trainer:762) INFO: 13epoch:train:13501-14000batch: iter_time=1.497e-04, forward_time=0.208, loss_ctc=47.310, loss_att=33.002, acc=0.795, loss=37.294, backward_time=0.232, grad_norm=31.058, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.782e-04, train_time=0.927 +[ip-10-0-216-33:0/16] 2024-02-29 02:38:51,477 (trainer:762) INFO: 13epoch:train:14001-14500batch: iter_time=1.468e-04, forward_time=0.209, loss_ctc=48.371, loss_att=33.037, acc=0.798, loss=37.638, backward_time=0.233, grad_norm=38.660, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.779e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-02-29 02:46:33,411 (trainer:762) INFO: 13epoch:train:14501-15000batch: iter_time=1.494e-04, forward_time=0.210, loss_ctc=46.188, loss_att=31.659, acc=0.806, loss=36.018, backward_time=0.231, grad_norm=31.145, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.775e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-02-29 03:00:36,200 (trainer:361) INFO: 13epoch results: [train] iter_time=0.086, forward_time=0.209, loss_ctc=47.776, loss_att=32.679, acc=0.794, loss=37.208, backward_time=0.233, grad_norm=30.755, clip=100.000, loss_scale=1.390e+33, optim_step_time=0.075, optim0_lr0=2.829e-04, train_time=1.000, time=4 hours, 10 minutes and 24.75 seconds, total_count=195000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=48.938, cer_ctc=0.271, loss_att=36.502, acc=0.693, cer=0.344, wer=1.000, loss=40.233, time=13 minutes and 47.76 seconds, total_count=27729, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-02-29 03:00:45,821 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-02-29 03:00:45,868 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/8epoch.pth +[ip-10-0-216-33:0/16] 2024-02-29 03:00:45,868 (trainer:290) INFO: 14/45epoch started. Estimated time to finish: 5 days, 21 hours and 10 minutes +[ip-10-0-216-33:0/16] 2024-02-29 03:00:45,876 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-29 03:01:21,624 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-29 03:01:29,822 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-29 03:01:29,822 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-02-29 03:01:29,827 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-29 03:15:23,829 (trainer:762) INFO: 14epoch:train:1-500batch: iter_time=0.846, forward_time=0.210, loss_ctc=48.457, loss_att=33.156, acc=0.801, loss=37.746, backward_time=0.231, grad_norm=30.604, clip=100.000, loss_scale=4.704e+33, optim_step_time=0.075, optim0_lr0=2.772e-04, train_time=1.756 +[ip-10-0-216-33:0/16] 2024-02-29 03:22:58,749 (trainer:762) INFO: 14epoch:train:501-1000batch: iter_time=1.505e-04, forward_time=0.210, loss_ctc=47.934, loss_att=32.812, acc=0.795, loss=37.349, backward_time=0.232, grad_norm=30.876, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.768e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-29 03:30:31,281 (trainer:762) INFO: 14epoch:train:1001-1500batch: iter_time=1.524e-04, forward_time=0.209, loss_ctc=47.084, loss_att=32.496, acc=0.796, loss=36.872, backward_time=0.237, grad_norm=31.510, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.765e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-29 03:38:07,836 (trainer:762) INFO: 14epoch:train:1501-2000batch: iter_time=1.464e-04, forward_time=0.209, loss_ctc=47.602, loss_att=32.975, acc=0.801, loss=37.363, backward_time=0.234, grad_norm=31.192, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.761e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-02-29 03:40:46,854 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 03:45:41,813 (trainer:762) INFO: 14epoch:train:2001-2500batch: iter_time=1.476e-04, forward_time=0.210, loss_ctc=48.408, loss_att=33.114, acc=0.794, loss=37.702, backward_time=0.236, grad_norm=32.107, clip=100.000, loss_scale=6.035e+33, optim_step_time=0.075, optim0_lr0=2.758e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-29 03:53:19,854 (trainer:762) INFO: 14epoch:train:2501-3000batch: iter_time=1.507e-04, forward_time=0.208, loss_ctc=49.734, loss_att=34.081, acc=0.793, loss=38.777, backward_time=0.235, grad_norm=34.660, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.754e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-02-29 04:00:52,939 (trainer:762) INFO: 14epoch:train:3001-3500batch: iter_time=1.503e-04, forward_time=0.211, loss_ctc=47.953, loss_att=32.901, acc=0.797, loss=37.417, backward_time=0.232, grad_norm=31.518, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.751e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-02-29 04:08:24,686 (trainer:762) INFO: 14epoch:train:3501-4000batch: iter_time=1.491e-04, forward_time=0.209, loss_ctc=48.551, loss_att=32.881, acc=0.798, loss=37.582, backward_time=0.236, grad_norm=32.140, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.747e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-29 04:13:00,589 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 04:16:01,616 (trainer:762) INFO: 14epoch:train:4001-4500batch: iter_time=1.513e-04, forward_time=0.210, loss_ctc=46.760, loss_att=31.891, acc=0.802, loss=36.352, backward_time=0.235, grad_norm=31.374, clip=100.000, loss_scale=6.493e+33, optim_step_time=0.075, optim0_lr0=2.744e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-02-29 04:23:33,926 (trainer:762) INFO: 14epoch:train:4501-5000batch: iter_time=1.473e-04, forward_time=0.210, loss_ctc=47.024, loss_att=32.518, acc=0.801, loss=36.870, backward_time=0.229, grad_norm=30.417, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.740e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-02-29 04:23:39,039 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-29 04:24:15,567 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-29 04:24:23,499 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-29 04:24:23,499 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-02-29 04:24:23,504 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-29 04:38:48,896 (trainer:762) INFO: 14epoch:train:5001-5500batch: iter_time=0.895, forward_time=0.208, loss_ctc=47.610, loss_att=32.632, acc=0.803, loss=37.126, backward_time=0.236, grad_norm=30.402, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.737e-04, train_time=1.830 +[ip-10-0-216-33:0/16] 2024-02-29 04:39:42,436 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 04:46:20,601 (trainer:762) INFO: 14epoch:train:5501-6000batch: iter_time=1.471e-04, forward_time=0.209, loss_ctc=47.504, loss_att=32.547, acc=0.797, loss=37.034, backward_time=0.232, grad_norm=31.093, clip=100.000, loss_scale=2.893e+33, optim_step_time=0.075, optim0_lr0=2.734e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-29 04:53:54,868 (trainer:762) INFO: 14epoch:train:6001-6500batch: iter_time=1.463e-04, forward_time=0.210, loss_ctc=46.615, loss_att=32.142, acc=0.797, loss=36.484, backward_time=0.229, grad_norm=31.035, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.730e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-29 05:01:27,717 (trainer:762) INFO: 14epoch:train:6501-7000batch: iter_time=1.443e-04, forward_time=0.209, loss_ctc=46.759, loss_att=32.500, acc=0.802, loss=36.778, backward_time=0.232, grad_norm=31.182, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.727e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-29 05:09:04,644 (trainer:762) INFO: 14epoch:train:7001-7500batch: iter_time=1.454e-04, forward_time=0.208, loss_ctc=47.823, loss_att=32.745, acc=0.797, loss=37.268, backward_time=0.229, grad_norm=31.536, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.723e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-02-29 05:16:38,575 (trainer:762) INFO: 14epoch:train:7501-8000batch: iter_time=1.460e-04, forward_time=0.208, loss_ctc=48.897, loss_att=33.662, acc=0.794, loss=38.232, backward_time=0.228, grad_norm=34.101, clip=100.000, loss_scale=4.891e+33, optim_step_time=0.075, optim0_lr0=2.720e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-29 05:22:55,549 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 05:24:08,694 (trainer:762) INFO: 14epoch:train:8001-8500batch: iter_time=1.473e-04, forward_time=0.210, loss_ctc=47.713, loss_att=32.725, acc=0.798, loss=37.222, backward_time=0.229, grad_norm=30.919, clip=100.000, loss_scale=4.771e+33, optim_step_time=0.075, optim0_lr0=2.717e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-02-29 05:31:38,615 (trainer:762) INFO: 14epoch:train:8501-9000batch: iter_time=1.470e-04, forward_time=0.210, loss_ctc=48.059, loss_att=32.601, acc=0.799, loss=37.239, backward_time=0.232, grad_norm=31.573, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.713e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-02-29 05:39:12,104 (trainer:762) INFO: 14epoch:train:9001-9500batch: iter_time=1.450e-04, forward_time=0.209, loss_ctc=45.856, loss_att=31.445, acc=0.804, loss=35.769, backward_time=0.236, grad_norm=30.398, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.710e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-02-29 05:46:41,426 (trainer:762) INFO: 14epoch:train:9501-10000batch: iter_time=1.448e-04, forward_time=0.210, loss_ctc=46.686, loss_att=32.130, acc=0.803, loss=36.497, backward_time=0.229, grad_norm=29.878, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.707e-04, train_time=0.898 +[ip-10-0-216-33:0/16] 2024-02-29 05:46:46,299 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-29 05:47:23,462 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-29 05:47:31,422 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-29 05:47:31,423 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-02-29 05:47:31,427 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-29 06:02:01,665 (trainer:762) INFO: 14epoch:train:10001-10500batch: iter_time=0.884, forward_time=0.210, loss_ctc=47.312, loss_att=32.392, acc=0.804, loss=36.868, backward_time=0.232, grad_norm=30.633, clip=100.000, loss_scale=3.017e+33, optim_step_time=0.075, optim0_lr0=2.703e-04, train_time=1.840 +[ip-10-0-216-33:0/16] 2024-02-29 06:09:35,811 (trainer:762) INFO: 14epoch:train:10501-11000batch: iter_time=1.532e-04, forward_time=0.209, loss_ctc=47.094, loss_att=32.303, acc=0.798, loss=36.740, backward_time=0.229, grad_norm=30.746, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.700e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-29 06:17:09,889 (trainer:762) INFO: 14epoch:train:11001-11500batch: iter_time=1.539e-04, forward_time=0.210, loss_ctc=46.141, loss_att=31.839, acc=0.799, loss=36.130, backward_time=0.234, grad_norm=29.979, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.697e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-29 06:24:41,430 (trainer:762) INFO: 14epoch:train:11501-12000batch: iter_time=1.490e-04, forward_time=0.210, loss_ctc=46.445, loss_att=32.172, acc=0.804, loss=36.454, backward_time=0.232, grad_norm=30.652, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.694e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-29 06:26:26,759 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 06:32:18,062 (trainer:762) INFO: 14epoch:train:12001-12500batch: iter_time=1.499e-04, forward_time=0.209, loss_ctc=47.285, loss_att=32.466, acc=0.798, loss=36.912, backward_time=0.237, grad_norm=30.464, clip=100.000, loss_scale=3.194e+33, optim_step_time=0.075, optim0_lr0=2.690e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-02-29 06:39:59,419 (trainer:762) INFO: 14epoch:train:12501-13000batch: iter_time=1.516e-04, forward_time=0.208, loss_ctc=48.607, loss_att=33.428, acc=0.796, loss=37.982, backward_time=0.234, grad_norm=32.923, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.687e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-02-29 06:47:41,398 (trainer:762) INFO: 14epoch:train:13001-13500batch: iter_time=1.539e-04, forward_time=0.208, loss_ctc=47.222, loss_att=32.356, acc=0.800, loss=36.816, backward_time=0.229, grad_norm=29.806, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.684e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-02-29 06:55:19,417 (trainer:762) INFO: 14epoch:train:13501-14000batch: iter_time=1.539e-04, forward_time=0.207, loss_ctc=47.426, loss_att=32.212, acc=0.801, loss=36.776, backward_time=0.231, grad_norm=31.068, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.681e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-02-29 07:02:58,079 (trainer:762) INFO: 14epoch:train:14001-14500batch: iter_time=1.534e-04, forward_time=0.208, loss_ctc=45.572, loss_att=31.395, acc=0.804, loss=35.648, backward_time=0.234, grad_norm=42.193, clip=100.000, loss_scale=4.590e+33, optim_step_time=0.075, optim0_lr0=2.677e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-02-29 07:10:36,438 (trainer:762) INFO: 14epoch:train:14501-15000batch: iter_time=1.503e-04, forward_time=0.209, loss_ctc=46.146, loss_att=31.907, acc=0.804, loss=36.179, backward_time=0.234, grad_norm=29.988, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.674e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-02-29 07:25:08,680 (trainer:361) INFO: 14epoch results: [train] iter_time=0.088, forward_time=0.209, loss_ctc=47.409, loss_att=32.548, acc=0.799, loss=37.006, backward_time=0.232, grad_norm=31.566, clip=100.000, loss_scale=4.209e+33, optim_step_time=0.075, optim0_lr0=2.722e-04, train_time=0.999, time=4 hours, 10 minutes and 6.22 seconds, total_count=210000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=46.186, cer_ctc=0.256, loss_att=35.478, acc=0.700, cer=0.337, wer=1.000, loss=38.690, time=14 minutes and 16.28 seconds, total_count=29862, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-02-29 07:25:18,556 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-02-29 07:25:18,576 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/9epoch.pth +[ip-10-0-216-33:0/16] 2024-02-29 07:25:18,576 (trainer:290) INFO: 15/45epoch started. Estimated time to finish: 5 days, 16 hours and 45 minutes +[ip-10-0-216-33:0/16] 2024-02-29 07:25:18,584 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-29 07:25:54,414 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-29 07:26:02,262 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-29 07:26:02,263 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-02-29 07:26:02,267 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-29 07:39:56,183 (trainer:762) INFO: 15epoch:train:1-500batch: iter_time=0.839, forward_time=0.209, loss_ctc=46.119, loss_att=31.499, acc=0.796, loss=35.885, backward_time=0.237, grad_norm=31.479, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.671e-04, train_time=1.755 +[ip-10-0-216-33:0/16] 2024-02-29 07:40:33,891 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 07:47:33,496 (trainer:762) INFO: 15epoch:train:501-1000batch: iter_time=1.505e-04, forward_time=0.210, loss_ctc=47.063, loss_att=32.784, acc=0.793, loss=37.068, backward_time=0.235, grad_norm=32.957, clip=100.000, loss_scale=2.809e+33, optim_step_time=0.075, optim0_lr0=2.668e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-02-29 07:55:10,981 (trainer:762) INFO: 15epoch:train:1001-1500batch: iter_time=1.530e-04, forward_time=0.210, loss_ctc=47.556, loss_att=32.834, acc=0.803, loss=37.250, backward_time=0.239, grad_norm=30.602, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.665e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-02-29 08:02:52,919 (trainer:762) INFO: 15epoch:train:1501-2000batch: iter_time=1.544e-04, forward_time=0.207, loss_ctc=48.309, loss_att=33.155, acc=0.799, loss=37.701, backward_time=0.232, grad_norm=31.081, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.662e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-02-29 08:10:34,465 (trainer:762) INFO: 15epoch:train:2001-2500batch: iter_time=1.493e-04, forward_time=0.208, loss_ctc=45.815, loss_att=31.394, acc=0.799, loss=35.720, backward_time=0.231, grad_norm=30.358, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.658e-04, train_time=0.923 +[ip-10-0-216-33:0/16] 2024-02-29 08:18:19,091 (trainer:762) INFO: 15epoch:train:2501-3000batch: iter_time=1.484e-04, forward_time=0.206, loss_ctc=48.081, loss_att=32.840, acc=0.796, loss=37.412, backward_time=0.231, grad_norm=34.974, clip=100.000, loss_scale=4.974e+33, optim_step_time=0.075, optim0_lr0=2.655e-04, train_time=0.929 +[ip-10-0-216-33:0/16] 2024-02-29 08:25:59,129 (trainer:762) INFO: 15epoch:train:3001-3500batch: iter_time=1.488e-04, forward_time=0.208, loss_ctc=46.593, loss_att=32.330, acc=0.791, loss=36.609, backward_time=0.239, grad_norm=33.152, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.652e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-02-29 08:29:26,323 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 08:33:39,376 (trainer:762) INFO: 15epoch:train:3501-4000batch: iter_time=1.512e-04, forward_time=0.209, loss_ctc=46.609, loss_att=32.244, acc=0.796, loss=36.553, backward_time=0.238, grad_norm=32.874, clip=100.000, loss_scale=3.751e+33, optim_step_time=0.075, optim0_lr0=2.649e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-02-29 08:41:23,546 (trainer:762) INFO: 15epoch:train:4001-4500batch: iter_time=1.524e-04, forward_time=0.207, loss_ctc=46.331, loss_att=32.137, acc=0.798, loss=36.395, backward_time=0.236, grad_norm=31.982, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.646e-04, train_time=0.928 +[ip-10-0-216-33:0/16] 2024-02-29 08:48:58,356 (trainer:762) INFO: 15epoch:train:4501-5000batch: iter_time=1.482e-04, forward_time=0.209, loss_ctc=45.081, loss_att=31.207, acc=0.800, loss=35.369, backward_time=0.235, grad_norm=31.673, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.643e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-29 08:49:03,071 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-29 08:49:39,637 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-29 08:49:47,631 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-29 08:49:47,631 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-02-29 08:49:47,636 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-29 09:06:14,753 (trainer:762) INFO: 15epoch:train:5001-5500batch: iter_time=1.159, forward_time=0.209, loss_ctc=45.306, loss_att=30.955, acc=0.799, loss=35.260, backward_time=0.229, grad_norm=30.401, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.640e-04, train_time=2.073 +[ip-10-0-216-33:0/16] 2024-02-29 09:13:52,781 (trainer:762) INFO: 15epoch:train:5501-6000batch: iter_time=1.435e-04, forward_time=0.209, loss_ctc=46.330, loss_att=32.165, acc=0.796, loss=36.415, backward_time=0.235, grad_norm=32.285, clip=100.000, loss_scale=4.034e+33, optim_step_time=0.075, optim0_lr0=2.637e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-02-29 09:21:30,845 (trainer:762) INFO: 15epoch:train:6001-6500batch: iter_time=1.487e-04, forward_time=0.209, loss_ctc=47.084, loss_att=32.375, acc=0.805, loss=36.788, backward_time=0.239, grad_norm=30.458, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.634e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-02-29 09:29:09,795 (trainer:762) INFO: 15epoch:train:6501-7000batch: iter_time=1.454e-04, forward_time=0.209, loss_ctc=47.678, loss_att=32.734, acc=0.802, loss=37.217, backward_time=0.233, grad_norm=31.896, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.631e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-02-29 09:36:46,148 (trainer:762) INFO: 15epoch:train:7001-7500batch: iter_time=1.493e-04, forward_time=0.207, loss_ctc=45.469, loss_att=31.183, acc=0.801, loss=35.469, backward_time=0.234, grad_norm=30.086, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.628e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-29 09:42:09,803 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 09:44:27,449 (trainer:762) INFO: 15epoch:train:7501-8000batch: iter_time=1.503e-04, forward_time=0.208, loss_ctc=47.592, loss_att=32.550, acc=0.797, loss=37.062, backward_time=0.235, grad_norm=32.658, clip=100.000, loss_scale=6.503e+33, optim_step_time=0.075, optim0_lr0=2.625e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-02-29 09:51:58,898 (trainer:762) INFO: 15epoch:train:8001-8500batch: iter_time=1.503e-04, forward_time=0.208, loss_ctc=45.755, loss_att=31.793, acc=0.794, loss=35.982, backward_time=0.233, grad_norm=31.877, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.622e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-29 09:59:31,436 (trainer:762) INFO: 15epoch:train:8501-9000batch: iter_time=1.505e-04, forward_time=0.210, loss_ctc=46.400, loss_att=32.045, acc=0.798, loss=36.352, backward_time=0.242, grad_norm=33.589, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.619e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-29 10:00:17,495 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 10:07:07,011 (trainer:762) INFO: 15epoch:train:9001-9500batch: iter_time=1.486e-04, forward_time=0.208, loss_ctc=45.676, loss_att=31.652, acc=0.801, loss=35.859, backward_time=0.240, grad_norm=32.166, clip=100.000, loss_scale=2.851e+33, optim_step_time=0.075, optim0_lr0=2.616e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-02-29 10:14:36,779 (trainer:762) INFO: 15epoch:train:9501-10000batch: iter_time=1.450e-04, forward_time=0.210, loss_ctc=44.660, loss_att=30.909, acc=0.801, loss=35.035, backward_time=0.234, grad_norm=36.789, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.613e-04, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-02-29 10:14:41,613 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-29 10:15:18,993 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-29 10:15:26,935 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-29 10:15:26,936 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-02-29 10:15:26,940 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-29 10:29:51,771 (trainer:762) INFO: 15epoch:train:10001-10500batch: iter_time=0.868, forward_time=0.208, loss_ctc=45.340, loss_att=30.977, acc=0.800, loss=35.286, backward_time=0.235, grad_norm=31.875, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.610e-04, train_time=1.830 +[ip-10-0-216-33:0/16] 2024-02-29 10:37:28,110 (trainer:762) INFO: 15epoch:train:10501-11000batch: iter_time=1.483e-04, forward_time=0.208, loss_ctc=45.994, loss_att=32.062, acc=0.797, loss=36.241, backward_time=0.239, grad_norm=33.527, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.607e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-29 10:45:00,899 (trainer:762) INFO: 15epoch:train:11001-11500batch: iter_time=1.511e-04, forward_time=0.209, loss_ctc=46.931, loss_att=32.299, acc=0.806, loss=36.689, backward_time=0.231, grad_norm=33.846, clip=100.000, loss_scale=4.933e+33, optim_step_time=0.075, optim0_lr0=2.604e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-29 10:52:40,701 (trainer:762) INFO: 15epoch:train:11501-12000batch: iter_time=1.520e-04, forward_time=0.208, loss_ctc=47.390, loss_att=32.516, acc=0.803, loss=36.978, backward_time=0.231, grad_norm=31.445, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.601e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-02-29 11:00:17,876 (trainer:762) INFO: 15epoch:train:12001-12500batch: iter_time=1.450e-04, forward_time=0.209, loss_ctc=45.182, loss_att=30.923, acc=0.802, loss=35.201, backward_time=0.232, grad_norm=30.653, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.598e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-02-29 11:02:44,731 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 11:07:55,294 (trainer:762) INFO: 15epoch:train:12501-13000batch: iter_time=1.454e-04, forward_time=0.209, loss_ctc=47.292, loss_att=32.283, acc=0.798, loss=36.785, backward_time=0.236, grad_norm=33.023, clip=100.000, loss_scale=3.423e+33, optim_step_time=0.075, optim0_lr0=2.595e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-02-29 11:15:33,221 (trainer:762) INFO: 15epoch:train:13001-13500batch: iter_time=1.460e-04, forward_time=0.207, loss_ctc=45.603, loss_att=31.726, acc=0.795, loss=35.889, backward_time=0.231, grad_norm=32.876, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.592e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-02-29 11:23:08,668 (trainer:762) INFO: 15epoch:train:13501-14000batch: iter_time=1.504e-04, forward_time=0.209, loss_ctc=45.787, loss_att=31.705, acc=0.799, loss=35.930, backward_time=0.231, grad_norm=32.341, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.589e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-02-29 11:30:45,005 (trainer:762) INFO: 15epoch:train:14001-14500batch: iter_time=1.467e-04, forward_time=0.209, loss_ctc=45.690, loss_att=31.580, acc=0.802, loss=35.813, backward_time=0.234, grad_norm=31.903, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.586e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-29 11:38:19,611 (trainer:762) INFO: 15epoch:train:14501-15000batch: iter_time=1.441e-04, forward_time=0.207, loss_ctc=44.266, loss_att=30.735, acc=0.802, loss=34.794, backward_time=0.235, grad_norm=31.053, clip=100.000, loss_scale=4.362e+33, optim_step_time=0.075, optim0_lr0=2.583e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-29 11:52:35,968 (trainer:361) INFO: 15epoch results: [train] iter_time=0.096, forward_time=0.208, loss_ctc=46.299, loss_att=31.920, acc=0.799, loss=36.234, backward_time=0.235, grad_norm=32.196, clip=100.000, loss_scale=3.851e+33, optim_step_time=0.075, optim0_lr0=2.627e-04, train_time=1.012, time=4 hours, 13 minutes and 15.81 seconds, total_count=225000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=46.031, cer_ctc=0.254, loss_att=35.781, acc=0.703, cer=0.341, wer=1.000, loss=38.856, time=14 minutes and 1.24 seconds, total_count=31995, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-02-29 11:52:45,582 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-02-29 11:52:45,601 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/10epoch.pth +[ip-10-0-216-33:0/16] 2024-02-29 11:52:45,601 (trainer:290) INFO: 16/45epoch started. Estimated time to finish: 5 days, 12 hours and 26 minutes +[ip-10-0-216-33:0/16] 2024-02-29 11:52:45,608 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-29 11:53:21,588 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-29 11:53:29,872 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-29 11:53:29,872 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-02-29 11:53:29,877 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-29 12:07:23,652 (trainer:762) INFO: 16epoch:train:1-500batch: iter_time=0.833, forward_time=0.210, loss_ctc=45.586, loss_att=31.106, acc=0.808, loss=35.450, backward_time=0.233, grad_norm=32.273, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.581e-04, train_time=1.756 +[ip-10-0-216-33:0/16] 2024-02-29 12:15:02,054 (trainer:762) INFO: 16epoch:train:501-1000batch: iter_time=1.477e-04, forward_time=0.208, loss_ctc=44.590, loss_att=30.760, acc=0.798, loss=34.909, backward_time=0.229, grad_norm=33.143, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.578e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-02-29 12:15:32,222 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 12:22:40,227 (trainer:762) INFO: 16epoch:train:1001-1500batch: iter_time=1.483e-04, forward_time=0.209, loss_ctc=49.477, loss_att=34.189, acc=0.794, loss=38.776, backward_time=0.227, grad_norm=35.199, clip=100.000, loss_scale=2.763e+33, optim_step_time=0.075, optim0_lr0=2.575e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-02-29 12:30:18,629 (trainer:762) INFO: 16epoch:train:1501-2000batch: iter_time=1.457e-04, forward_time=0.209, loss_ctc=46.816, loss_att=32.081, acc=0.803, loss=36.501, backward_time=0.240, grad_norm=33.325, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.572e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-02-29 12:37:53,278 (trainer:762) INFO: 16epoch:train:2001-2500batch: iter_time=1.485e-04, forward_time=0.209, loss_ctc=44.470, loss_att=30.456, acc=0.809, loss=34.660, backward_time=0.236, grad_norm=30.769, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.569e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-29 12:45:34,053 (trainer:762) INFO: 16epoch:train:2501-3000batch: iter_time=1.501e-04, forward_time=0.209, loss_ctc=48.605, loss_att=33.627, acc=0.801, loss=38.121, backward_time=0.238, grad_norm=35.051, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.566e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-02-29 12:49:14,658 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 12:53:11,106 (trainer:762) INFO: 16epoch:train:3001-3500batch: iter_time=1.452e-04, forward_time=0.210, loss_ctc=48.203, loss_att=33.129, acc=0.798, loss=37.651, backward_time=0.233, grad_norm=32.731, clip=100.000, loss_scale=3.684e+33, optim_step_time=0.075, optim0_lr0=2.564e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-02-29 13:00:51,916 (trainer:762) INFO: 16epoch:train:3501-4000batch: iter_time=1.462e-04, forward_time=0.206, loss_ctc=49.027, loss_att=32.877, acc=0.798, loss=37.722, backward_time=0.235, grad_norm=32.418, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.561e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-02-29 13:08:34,120 (trainer:762) INFO: 16epoch:train:4001-4500batch: iter_time=1.455e-04, forward_time=0.209, loss_ctc=45.388, loss_att=31.030, acc=0.808, loss=35.337, backward_time=0.231, grad_norm=30.972, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.558e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-02-29 13:16:11,658 (trainer:762) INFO: 16epoch:train:4501-5000batch: iter_time=1.478e-04, forward_time=0.208, loss_ctc=47.713, loss_att=32.581, acc=0.798, loss=37.120, backward_time=0.233, grad_norm=32.607, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.555e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-02-29 13:16:16,197 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-29 13:16:53,470 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-29 13:17:02,297 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-29 13:17:02,298 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-02-29 13:17:02,302 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-29 13:31:20,942 (trainer:762) INFO: 16epoch:train:5001-5500batch: iter_time=0.876, forward_time=0.209, loss_ctc=44.777, loss_att=30.527, acc=0.810, loss=34.802, backward_time=0.231, grad_norm=30.894, clip=100.000, loss_scale=3.931e+33, optim_step_time=0.075, optim0_lr0=2.552e-04, train_time=1.818 +[ip-10-0-216-33:0/16] 2024-02-29 13:38:53,721 (trainer:762) INFO: 16epoch:train:5501-6000batch: iter_time=1.428e-04, forward_time=0.208, loss_ctc=43.873, loss_att=30.173, acc=0.800, loss=34.283, backward_time=0.237, grad_norm=33.859, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.550e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-29 13:46:23,381 (trainer:762) INFO: 16epoch:train:6001-6500batch: iter_time=1.462e-04, forward_time=0.210, loss_ctc=49.051, loss_att=33.798, acc=0.797, loss=38.374, backward_time=0.237, grad_norm=33.185, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.547e-04, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-02-29 13:53:55,881 (trainer:762) INFO: 16epoch:train:6501-7000batch: iter_time=1.436e-04, forward_time=0.209, loss_ctc=46.206, loss_att=31.597, acc=0.805, loss=35.980, backward_time=0.238, grad_norm=31.791, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.544e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-29 14:01:25,819 (trainer:762) INFO: 16epoch:train:7001-7500batch: iter_time=1.398e-04, forward_time=0.210, loss_ctc=43.965, loss_att=30.172, acc=0.810, loss=34.310, backward_time=0.237, grad_norm=31.106, clip=100.000, loss_scale=7.861e+33, optim_step_time=0.075, optim0_lr0=2.541e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-02-29 14:01:35,131 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 14:08:57,749 (trainer:762) INFO: 16epoch:train:7501-8000batch: iter_time=1.427e-04, forward_time=0.209, loss_ctc=47.733, loss_att=33.123, acc=0.804, loss=37.506, backward_time=0.237, grad_norm=31.903, clip=100.000, loss_scale=5.286e+33, optim_step_time=0.075, optim0_lr0=2.539e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-02-29 14:16:33,066 (trainer:762) INFO: 16epoch:train:8001-8500batch: iter_time=1.457e-04, forward_time=0.210, loss_ctc=47.652, loss_att=32.681, acc=0.800, loss=37.172, backward_time=0.233, grad_norm=32.193, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.536e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-29 14:23:45,676 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 14:24:13,186 (trainer:762) INFO: 16epoch:train:8501-9000batch: iter_time=1.428e-04, forward_time=0.207, loss_ctc=48.539, loss_att=32.466, acc=0.801, loss=37.288, backward_time=0.232, grad_norm=32.641, clip=100.000, loss_scale=5.036e+33, optim_step_time=0.075, optim0_lr0=2.533e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-02-29 14:31:48,248 (trainer:762) INFO: 16epoch:train:9001-9500batch: iter_time=1.405e-04, forward_time=0.210, loss_ctc=44.877, loss_att=30.729, acc=0.809, loss=34.973, backward_time=0.229, grad_norm=30.372, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.531e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-29 14:39:27,489 (trainer:762) INFO: 16epoch:train:9501-10000batch: iter_time=1.403e-04, forward_time=0.206, loss_ctc=46.766, loss_att=32.006, acc=0.800, loss=36.434, backward_time=0.229, grad_norm=32.226, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.528e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-02-29 14:39:32,321 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-29 14:40:10,350 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-29 14:40:19,258 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-29 14:40:19,258 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-02-29 14:40:19,263 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-29 14:50:48,562 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 14:58:23,585 (trainer:762) INFO: 16epoch:train:10001-10500batch: iter_time=1.313, forward_time=0.208, loss_ctc=44.460, loss_att=30.292, acc=0.811, loss=34.543, backward_time=0.233, grad_norm=31.395, clip=100.000, loss_scale=1.301e+33, optim_step_time=0.075, optim0_lr0=2.525e-04, train_time=2.272 +[ip-10-0-216-33:0/16] 2024-02-29 15:05:57,620 (trainer:762) INFO: 16epoch:train:10501-11000batch: iter_time=1.494e-04, forward_time=0.209, loss_ctc=43.599, loss_att=30.075, acc=0.801, loss=34.132, backward_time=0.230, grad_norm=33.606, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=2.523e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-29 15:13:36,355 (trainer:762) INFO: 16epoch:train:11001-11500batch: iter_time=1.501e-04, forward_time=0.207, loss_ctc=48.706, loss_att=33.602, acc=0.798, loss=38.133, backward_time=0.232, grad_norm=32.752, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=2.520e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-02-29 15:21:08,884 (trainer:762) INFO: 16epoch:train:11501-12000batch: iter_time=1.464e-04, forward_time=0.210, loss_ctc=46.103, loss_att=31.414, acc=0.806, loss=35.821, backward_time=0.232, grad_norm=32.872, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=2.517e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-29 15:28:40,086 (trainer:762) INFO: 16epoch:train:12001-12500batch: iter_time=1.467e-04, forward_time=0.209, loss_ctc=43.752, loss_att=30.075, acc=0.811, loss=34.178, backward_time=0.231, grad_norm=30.701, clip=100.000, loss_scale=2.591e+33, optim_step_time=0.075, optim0_lr0=2.515e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-02-29 15:36:16,253 (trainer:762) INFO: 16epoch:train:12501-13000batch: iter_time=1.487e-04, forward_time=0.210, loss_ctc=47.597, loss_att=33.026, acc=0.805, loss=37.397, backward_time=0.230, grad_norm=31.558, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.512e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-29 15:43:51,970 (trainer:762) INFO: 16epoch:train:13001-13500batch: iter_time=1.529e-04, forward_time=0.210, loss_ctc=47.430, loss_att=32.531, acc=0.802, loss=37.001, backward_time=0.236, grad_norm=31.637, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.509e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-02-29 15:51:28,310 (trainer:762) INFO: 16epoch:train:13501-14000batch: iter_time=1.479e-04, forward_time=0.209, loss_ctc=48.417, loss_att=32.452, acc=0.801, loss=37.242, backward_time=0.231, grad_norm=32.235, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.507e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-29 15:59:01,207 (trainer:762) INFO: 16epoch:train:14001-14500batch: iter_time=1.478e-04, forward_time=0.210, loss_ctc=44.262, loss_att=30.388, acc=0.812, loss=34.551, backward_time=0.229, grad_norm=30.453, clip=100.000, loss_scale=5.182e+33, optim_step_time=0.075, optim0_lr0=2.504e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-02-29 16:06:34,850 (trainer:762) INFO: 16epoch:train:14501-15000batch: iter_time=1.478e-04, forward_time=0.210, loss_ctc=46.758, loss_att=32.038, acc=0.801, loss=36.454, backward_time=0.233, grad_norm=32.396, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.501e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-02-29 16:20:48,058 (trainer:361) INFO: 16epoch results: [train] iter_time=0.101, forward_time=0.209, loss_ctc=46.480, loss_att=31.833, acc=0.803, loss=36.227, backward_time=0.233, grad_norm=32.275, clip=100.000, loss_scale=3.548e+33, optim_step_time=0.075, optim0_lr0=2.540e-04, train_time=1.015, time=4 hours, 14 minutes and 4.34 seconds, total_count=240000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=43.907, cer_ctc=0.245, loss_att=32.328, acc=0.727, cer=0.289, wer=0.998, loss=35.802, time=13 minutes and 57.8 seconds, total_count=34128, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-02-29 16:20:57,536 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-02-29 16:20:57,559 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/11epoch.pth +[ip-10-0-216-33:0/16] 2024-02-29 16:20:57,559 (trainer:290) INFO: 17/45epoch started. Estimated time to finish: 5 days, 8 hours and 8 minutes +[ip-10-0-216-33:0/16] 2024-02-29 16:20:57,566 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-29 16:21:33,444 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-29 16:21:41,312 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-29 16:21:41,313 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-02-29 16:21:41,317 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-29 16:32:28,123 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 16:35:34,685 (trainer:762) INFO: 17epoch:train:1-500batch: iter_time=0.845, forward_time=0.210, loss_ctc=44.419, loss_att=30.623, acc=0.806, loss=34.762, backward_time=0.235, grad_norm=31.517, clip=100.000, loss_scale=4.121e+33, optim_step_time=0.075, optim0_lr0=2.499e-04, train_time=1.754 +[ip-10-0-216-33:0/16] 2024-02-29 16:43:10,549 (trainer:762) INFO: 17epoch:train:501-1000batch: iter_time=1.492e-04, forward_time=0.208, loss_ctc=43.193, loss_att=29.559, acc=0.807, loss=33.649, backward_time=0.233, grad_norm=30.523, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.076, optim0_lr0=2.496e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-02-29 16:50:43,142 (trainer:762) INFO: 17epoch:train:1001-1500batch: iter_time=1.521e-04, forward_time=0.209, loss_ctc=44.344, loss_att=30.363, acc=0.802, loss=34.557, backward_time=0.233, grad_norm=33.539, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.494e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-29 16:58:19,755 (trainer:762) INFO: 17epoch:train:1501-2000batch: iter_time=1.477e-04, forward_time=0.209, loss_ctc=45.780, loss_att=31.445, acc=0.803, loss=35.745, backward_time=0.238, grad_norm=32.079, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.491e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-02-29 17:05:52,452 (trainer:762) INFO: 17epoch:train:2001-2500batch: iter_time=1.446e-04, forward_time=0.208, loss_ctc=43.906, loss_att=30.219, acc=0.801, loss=34.325, backward_time=0.240, grad_norm=32.683, clip=100.000, loss_scale=3.666e+33, optim_step_time=0.076, optim0_lr0=2.488e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-29 17:13:26,772 (trainer:762) INFO: 17epoch:train:2501-3000batch: iter_time=1.516e-04, forward_time=0.210, loss_ctc=47.110, loss_att=32.326, acc=0.802, loss=36.761, backward_time=0.232, grad_norm=33.149, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.076, optim0_lr0=2.486e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-29 17:21:05,355 (trainer:762) INFO: 17epoch:train:3001-3500batch: iter_time=1.474e-04, forward_time=0.209, loss_ctc=47.253, loss_att=32.316, acc=0.805, loss=36.797, backward_time=0.236, grad_norm=34.024, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.076, optim0_lr0=2.483e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-02-29 17:28:40,213 (trainer:762) INFO: 17epoch:train:3501-4000batch: iter_time=1.448e-04, forward_time=0.209, loss_ctc=45.861, loss_att=31.239, acc=0.805, loss=35.626, backward_time=0.239, grad_norm=32.415, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.076, optim0_lr0=2.481e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-29 17:36:18,450 (trainer:762) INFO: 17epoch:train:4001-4500batch: iter_time=1.488e-04, forward_time=0.210, loss_ctc=45.194, loss_att=31.308, acc=0.804, loss=35.474, backward_time=0.235, grad_norm=37.064, clip=100.000, loss_scale=7.332e+33, optim_step_time=0.075, optim0_lr0=2.478e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-02-29 17:40:51,753 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 17:43:51,108 (trainer:762) INFO: 17epoch:train:4501-5000batch: iter_time=1.451e-04, forward_time=0.210, loss_ctc=44.455, loss_att=30.365, acc=0.805, loss=34.592, backward_time=0.234, grad_norm=33.685, clip=100.000, loss_scale=8.356e+33, optim_step_time=0.075, optim0_lr0=2.476e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-29 17:43:55,914 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-29 17:44:33,252 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-29 17:44:41,252 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-29 17:44:41,253 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-02-29 17:44:41,258 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-29 17:59:10,998 (trainer:762) INFO: 17epoch:train:5001-5500batch: iter_time=0.881, forward_time=0.209, loss_ctc=44.000, loss_att=30.526, acc=0.807, loss=34.568, backward_time=0.230, grad_norm=31.533, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.473e-04, train_time=1.840 +[ip-10-0-216-33:0/16] 2024-02-29 18:06:49,657 (trainer:762) INFO: 17epoch:train:5501-6000batch: iter_time=1.436e-04, forward_time=0.206, loss_ctc=42.599, loss_att=29.250, acc=0.809, loss=33.255, backward_time=0.237, grad_norm=30.896, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.471e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-02-29 18:14:21,787 (trainer:762) INFO: 17epoch:train:6001-6500batch: iter_time=1.470e-04, forward_time=0.209, loss_ctc=44.240, loss_att=30.187, acc=0.803, loss=34.403, backward_time=0.235, grad_norm=33.668, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.468e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-02-29 18:21:55,853 (trainer:762) INFO: 17epoch:train:6501-7000batch: iter_time=1.514e-04, forward_time=0.207, loss_ctc=45.145, loss_att=31.001, acc=0.805, loss=35.245, backward_time=0.232, grad_norm=32.209, clip=100.000, loss_scale=7.217e+33, optim_step_time=0.075, optim0_lr0=2.466e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-29 18:29:24,866 (trainer:762) INFO: 17epoch:train:7001-7500batch: iter_time=1.522e-04, forward_time=0.208, loss_ctc=43.583, loss_att=30.092, acc=0.801, loss=34.139, backward_time=0.234, grad_norm=32.585, clip=100.000, loss_scale=1.038e+34, optim_step_time=0.075, optim0_lr0=2.463e-04, train_time=0.898 +[ip-10-0-216-33:0/16] 2024-02-29 18:33:34,561 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 18:36:56,755 (trainer:762) INFO: 17epoch:train:7501-8000batch: iter_time=1.534e-04, forward_time=0.209, loss_ctc=46.432, loss_att=31.922, acc=0.804, loss=36.275, backward_time=0.235, grad_norm=33.129, clip=100.000, loss_scale=8.043e+33, optim_step_time=0.075, optim0_lr0=2.461e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-02-29 18:44:30,967 (trainer:762) INFO: 17epoch:train:8001-8500batch: iter_time=1.503e-04, forward_time=0.210, loss_ctc=46.788, loss_att=32.068, acc=0.807, loss=36.484, backward_time=0.239, grad_norm=32.685, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.458e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-29 18:51:59,397 (trainer:762) INFO: 17epoch:train:8501-9000batch: iter_time=1.538e-04, forward_time=0.210, loss_ctc=45.428, loss_att=30.924, acc=0.807, loss=35.276, backward_time=0.234, grad_norm=31.077, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.456e-04, train_time=0.897 +[ip-10-0-216-33:0/16] 2024-02-29 18:57:41,054 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 18:59:31,788 (trainer:762) INFO: 17epoch:train:9001-9500batch: iter_time=1.490e-04, forward_time=0.209, loss_ctc=44.566, loss_att=30.966, acc=0.806, loss=35.046, backward_time=0.232, grad_norm=32.361, clip=100.000, loss_scale=4.558e+33, optim_step_time=0.075, optim0_lr0=2.453e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-29 19:07:04,356 (trainer:762) INFO: 17epoch:train:9501-10000batch: iter_time=1.437e-04, forward_time=0.209, loss_ctc=43.944, loss_att=30.080, acc=0.806, loss=34.239, backward_time=0.239, grad_norm=33.593, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.451e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-29 19:07:09,976 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-29 19:07:46,651 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-29 19:07:54,747 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-29 19:07:54,748 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-02-29 19:07:54,752 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-29 19:22:17,782 (trainer:762) INFO: 17epoch:train:10001-10500batch: iter_time=0.873, forward_time=0.211, loss_ctc=43.669, loss_att=30.331, acc=0.808, loss=34.333, backward_time=0.237, grad_norm=31.640, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.448e-04, train_time=1.827 +[ip-10-0-216-33:0/16] 2024-02-29 19:29:50,337 (trainer:762) INFO: 17epoch:train:10501-11000batch: iter_time=1.503e-04, forward_time=0.208, loss_ctc=42.399, loss_att=29.104, acc=0.810, loss=33.093, backward_time=0.233, grad_norm=30.816, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.446e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-29 19:37:22,238 (trainer:762) INFO: 17epoch:train:11001-11500batch: iter_time=1.532e-04, forward_time=0.210, loss_ctc=43.426, loss_att=29.880, acc=0.805, loss=33.944, backward_time=0.237, grad_norm=31.936, clip=100.000, loss_scale=3.230e+33, optim_step_time=0.075, optim0_lr0=2.443e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-02-29 19:40:46,105 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 19:45:00,071 (trainer:762) INFO: 17epoch:train:11501-12000batch: iter_time=1.481e-04, forward_time=0.208, loss_ctc=44.807, loss_att=30.784, acc=0.806, loss=34.991, backward_time=0.232, grad_norm=32.041, clip=100.000, loss_scale=3.741e+33, optim_step_time=0.075, optim0_lr0=2.441e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-02-29 19:52:33,048 (trainer:762) INFO: 17epoch:train:12001-12500batch: iter_time=1.493e-04, forward_time=0.211, loss_ctc=43.122, loss_att=29.873, acc=0.802, loss=33.848, backward_time=0.230, grad_norm=31.579, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.439e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-02-29 20:00:08,317 (trainer:762) INFO: 17epoch:train:12501-13000batch: iter_time=1.464e-04, forward_time=0.209, loss_ctc=46.245, loss_att=31.869, acc=0.804, loss=36.182, backward_time=0.234, grad_norm=33.401, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.436e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-29 20:07:51,463 (trainer:762) INFO: 17epoch:train:13001-13500batch: iter_time=0.002, forward_time=0.209, loss_ctc=46.492, loss_att=31.844, acc=0.807, loss=36.238, backward_time=0.238, grad_norm=33.016, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.434e-04, train_time=0.926 +[ip-10-0-216-33:0/16] 2024-02-29 20:15:26,012 (trainer:762) INFO: 17epoch:train:13501-14000batch: iter_time=1.474e-04, forward_time=0.210, loss_ctc=45.072, loss_att=30.757, acc=0.807, loss=35.052, backward_time=0.226, grad_norm=31.618, clip=100.000, loss_scale=4.045e+33, optim_step_time=0.075, optim0_lr0=2.431e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-29 20:22:58,984 (trainer:762) INFO: 17epoch:train:14001-14500batch: iter_time=1.518e-04, forward_time=0.209, loss_ctc=44.353, loss_att=30.884, acc=0.806, loss=34.925, backward_time=0.235, grad_norm=32.295, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.429e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-02-29 20:26:25,221 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 20:30:34,484 (trainer:762) INFO: 17epoch:train:14501-15000batch: iter_time=1.480e-04, forward_time=0.210, loss_ctc=43.543, loss_att=29.897, acc=0.807, loss=33.991, backward_time=0.237, grad_norm=32.584, clip=100.000, loss_scale=3.777e+33, optim_step_time=0.075, optim0_lr0=2.427e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-02-29 20:45:00,576 (trainer:361) INFO: 17epoch results: [train] iter_time=0.087, forward_time=0.209, loss_ctc=44.712, loss_att=30.733, acc=0.805, loss=34.927, backward_time=0.235, grad_norm=32.511, clip=100.000, loss_scale=4.618e+33, optim_step_time=0.075, optim0_lr0=2.462e-04, train_time=0.998, time=4 hours, 9 minutes and 52.01 seconds, total_count=255000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=44.011, cer_ctc=0.240, loss_att=33.366, acc=0.721, cer=0.296, wer=0.998, loss=36.560, time=14 minutes and 10.68 seconds, total_count=36261, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-02-29 20:45:11,054 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-02-29 20:45:11,077 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/12epoch.pth +[ip-10-0-216-33:0/16] 2024-02-29 20:45:11,078 (trainer:290) INFO: 18/45epoch started. Estimated time to finish: 5 days, 3 hours and 41 minutes +[ip-10-0-216-33:0/16] 2024-02-29 20:45:11,085 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-29 20:45:47,196 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-29 20:45:55,045 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-29 20:45:55,045 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-02-29 20:45:55,049 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-29 20:59:53,250 (trainer:762) INFO: 18epoch:train:1-500batch: iter_time=0.843, forward_time=0.210, loss_ctc=47.330, loss_att=32.290, acc=0.813, loss=36.802, backward_time=0.229, grad_norm=32.370, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.424e-04, train_time=1.764 +[ip-10-0-216-33:0/16] 2024-02-29 21:07:28,982 (trainer:762) INFO: 18epoch:train:501-1000batch: iter_time=1.498e-04, forward_time=0.210, loss_ctc=46.655, loss_att=32.317, acc=0.801, loss=36.619, backward_time=0.236, grad_norm=35.183, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.422e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-02-29 21:15:00,755 (trainer:762) INFO: 18epoch:train:1001-1500batch: iter_time=1.500e-04, forward_time=0.209, loss_ctc=43.989, loss_att=30.015, acc=0.804, loss=34.207, backward_time=0.228, grad_norm=31.923, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.420e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-29 21:22:34,052 (trainer:762) INFO: 18epoch:train:1501-2000batch: iter_time=1.489e-04, forward_time=0.210, loss_ctc=43.761, loss_att=29.997, acc=0.810, loss=34.126, backward_time=0.242, grad_norm=32.475, clip=100.000, loss_scale=4.008e+33, optim_step_time=0.075, optim0_lr0=2.417e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-02-29 21:30:04,954 (trainer:762) INFO: 18epoch:train:2001-2500batch: iter_time=1.471e-04, forward_time=0.210, loss_ctc=45.856, loss_att=31.442, acc=0.801, loss=35.766, backward_time=0.235, grad_norm=32.977, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.415e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-02-29 21:33:39,735 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 21:37:36,524 (trainer:762) INFO: 18epoch:train:2501-3000batch: iter_time=1.473e-04, forward_time=0.209, loss_ctc=45.251, loss_att=30.738, acc=0.803, loss=35.092, backward_time=0.233, grad_norm=33.480, clip=100.000, loss_scale=3.829e+33, optim_step_time=0.075, optim0_lr0=2.412e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-02-29 21:45:11,838 (trainer:762) INFO: 18epoch:train:3001-3500batch: iter_time=1.520e-04, forward_time=0.209, loss_ctc=42.940, loss_att=29.202, acc=0.810, loss=33.324, backward_time=0.239, grad_norm=31.638, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.410e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-02-29 21:52:46,331 (trainer:762) INFO: 18epoch:train:3501-4000batch: iter_time=1.504e-04, forward_time=0.210, loss_ctc=43.931, loss_att=29.574, acc=0.813, loss=33.881, backward_time=0.237, grad_norm=31.627, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.408e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-02-29 22:00:20,226 (trainer:762) INFO: 18epoch:train:4001-4500batch: iter_time=1.573e-04, forward_time=0.209, loss_ctc=45.080, loss_att=31.655, acc=0.801, loss=35.683, backward_time=0.236, grad_norm=34.147, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.405e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-02-29 22:07:03,476 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 22:07:55,961 (trainer:762) INFO: 18epoch:train:4501-5000batch: iter_time=1.457e-04, forward_time=0.209, loss_ctc=44.952, loss_att=30.811, acc=0.806, loss=35.053, backward_time=0.233, grad_norm=30.845, clip=100.000, loss_scale=3.652e+33, optim_step_time=0.075, optim0_lr0=2.403e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-02-29 22:08:00,736 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-29 22:08:37,629 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-29 22:08:45,603 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-29 22:08:45,603 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-02-29 22:08:45,607 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-29 22:23:12,430 (trainer:762) INFO: 18epoch:train:5001-5500batch: iter_time=0.895, forward_time=0.208, loss_ctc=46.883, loss_att=31.989, acc=0.814, loss=36.457, backward_time=0.233, grad_norm=38.814, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.401e-04, train_time=1.833 +[ip-10-0-216-33:0/16] 2024-02-29 22:30:45,701 (trainer:762) INFO: 18epoch:train:5501-6000batch: iter_time=1.472e-04, forward_time=0.209, loss_ctc=46.410, loss_att=32.141, acc=0.803, loss=36.422, backward_time=0.235, grad_norm=35.189, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.399e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-02-29 22:38:26,629 (trainer:762) INFO: 18epoch:train:6001-6500batch: iter_time=1.487e-04, forward_time=0.205, loss_ctc=43.634, loss_att=29.830, acc=0.805, loss=33.971, backward_time=0.235, grad_norm=30.855, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.396e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-02-29 22:46:05,623 (trainer:762) INFO: 18epoch:train:6501-7000batch: iter_time=1.494e-04, forward_time=0.207, loss_ctc=43.538, loss_att=29.796, acc=0.811, loss=33.919, backward_time=0.236, grad_norm=30.094, clip=100.000, loss_scale=2.897e+33, optim_step_time=0.075, optim0_lr0=2.394e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-02-29 22:49:35,811 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-02-29 22:53:35,414 (trainer:762) INFO: 18epoch:train:7001-7500batch: iter_time=1.448e-04, forward_time=0.208, loss_ctc=45.660, loss_att=31.290, acc=0.802, loss=35.601, backward_time=0.235, grad_norm=32.817, clip=100.000, loss_scale=3.808e+33, optim_step_time=0.075, optim0_lr0=2.392e-04, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-02-29 23:01:08,127 (trainer:762) INFO: 18epoch:train:7501-8000batch: iter_time=1.490e-04, forward_time=0.209, loss_ctc=44.630, loss_att=30.403, acc=0.804, loss=34.671, backward_time=0.238, grad_norm=33.680, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.389e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-02-29 23:08:38,263 (trainer:762) INFO: 18epoch:train:8001-8500batch: iter_time=1.513e-04, forward_time=0.209, loss_ctc=42.376, loss_att=28.937, acc=0.811, loss=32.969, backward_time=0.236, grad_norm=31.290, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.387e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-02-29 23:16:09,443 (trainer:762) INFO: 18epoch:train:8501-9000batch: iter_time=1.453e-04, forward_time=0.209, loss_ctc=43.470, loss_att=29.291, acc=0.813, loss=33.545, backward_time=0.230, grad_norm=30.921, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.385e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-02-29 23:23:45,484 (trainer:762) INFO: 18epoch:train:9001-9500batch: iter_time=1.424e-04, forward_time=0.208, loss_ctc=44.365, loss_att=31.367, acc=0.803, loss=35.267, backward_time=0.231, grad_norm=34.324, clip=100.000, loss_scale=3.977e+33, optim_step_time=0.075, optim0_lr0=2.383e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-29 23:31:21,850 (trainer:762) INFO: 18epoch:train:9501-10000batch: iter_time=1.427e-04, forward_time=0.207, loss_ctc=44.449, loss_att=30.581, acc=0.807, loss=34.741, backward_time=0.234, grad_norm=30.266, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.380e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-02-29 23:31:26,963 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-02-29 23:32:04,772 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-02-29 23:32:12,757 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-02-29 23:32:12,757 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-02-29 23:32:12,762 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-02-29 23:46:40,245 (trainer:762) INFO: 18epoch:train:10001-10500batch: iter_time=0.875, forward_time=0.210, loss_ctc=46.471, loss_att=31.565, acc=0.816, loss=36.037, backward_time=0.239, grad_norm=32.170, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.378e-04, train_time=1.837 +[ip-10-0-216-33:0/16] 2024-02-29 23:54:18,540 (trainer:762) INFO: 18epoch:train:10501-11000batch: iter_time=1.478e-04, forward_time=0.209, loss_ctc=45.830, loss_att=31.864, acc=0.804, loss=36.054, backward_time=0.237, grad_norm=34.804, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.376e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-01 00:01:57,221 (trainer:762) INFO: 18epoch:train:11001-11500batch: iter_time=1.538e-04, forward_time=0.208, loss_ctc=43.288, loss_att=29.656, acc=0.806, loss=33.746, backward_time=0.231, grad_norm=31.101, clip=100.000, loss_scale=7.955e+33, optim_step_time=0.075, optim0_lr0=2.374e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-01 00:04:43,175 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 00:09:35,989 (trainer:762) INFO: 18epoch:train:11501-12000batch: iter_time=1.462e-04, forward_time=0.208, loss_ctc=43.130, loss_att=29.614, acc=0.812, loss=33.669, backward_time=0.233, grad_norm=30.614, clip=100.000, loss_scale=7.055e+33, optim_step_time=0.075, optim0_lr0=2.371e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-01 00:17:06,427 (trainer:762) INFO: 18epoch:train:12001-12500batch: iter_time=1.475e-04, forward_time=0.210, loss_ctc=45.291, loss_att=31.066, acc=0.803, loss=35.334, backward_time=0.233, grad_norm=33.412, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.369e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-03-01 00:24:37,867 (trainer:762) INFO: 18epoch:train:12501-13000batch: iter_time=1.498e-04, forward_time=0.210, loss_ctc=44.527, loss_att=30.416, acc=0.805, loss=34.649, backward_time=0.232, grad_norm=33.792, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.367e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-01 00:32:13,113 (trainer:762) INFO: 18epoch:train:13001-13500batch: iter_time=1.503e-04, forward_time=0.209, loss_ctc=42.184, loss_att=28.849, acc=0.812, loss=32.849, backward_time=0.231, grad_norm=33.365, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.365e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-01 00:35:23,133 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 00:35:34,701 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 00:39:45,189 (trainer:762) INFO: 18epoch:train:13501-14000batch: iter_time=1.507e-04, forward_time=0.208, loss_ctc=43.061, loss_att=29.144, acc=0.814, loss=33.319, backward_time=0.232, grad_norm=30.780, clip=100.000, loss_scale=4.019e+33, optim_step_time=0.075, optim0_lr0=2.363e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-01 00:47:16,528 (trainer:762) INFO: 18epoch:train:14001-14500batch: iter_time=1.496e-04, forward_time=0.210, loss_ctc=44.281, loss_att=31.251, acc=0.803, loss=35.160, backward_time=0.233, grad_norm=34.667, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.360e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-03-01 00:54:53,359 (trainer:762) INFO: 18epoch:train:14501-15000batch: iter_time=1.496e-04, forward_time=0.207, loss_ctc=44.169, loss_att=30.375, acc=0.808, loss=34.513, backward_time=0.236, grad_norm=31.594, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.358e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-01 01:08:57,988 (trainer:361) INFO: 18epoch results: [train] iter_time=0.087, forward_time=0.209, loss_ctc=44.580, loss_att=30.582, acc=0.807, loss=34.781, backward_time=0.234, grad_norm=32.708, clip=100.000, loss_scale=3.796e+33, optim_step_time=0.075, optim0_lr0=2.391e-04, train_time=0.999, time=4 hours, 9 minutes and 57.68 seconds, total_count=270000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=42.581, cer_ctc=0.236, loss_att=31.416, acc=0.742, cer=0.229, wer=0.993, loss=34.765, time=13 minutes and 48.91 seconds, total_count=38394, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-01 01:09:07,824 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-03-01 01:09:07,850 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/13epoch.pth +[ip-10-0-216-33:0/16] 2024-03-01 01:09:07,851 (trainer:290) INFO: 19/45epoch started. Estimated time to finish: 4 days, 23 hours and 14 minutes +[ip-10-0-216-33:0/16] 2024-03-01 01:09:07,858 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-01 01:09:43,763 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-01 01:09:52,075 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-01 01:09:52,075 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-01 01:09:52,080 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-01 01:23:47,886 (trainer:762) INFO: 19epoch:train:1-500batch: iter_time=0.842, forward_time=0.209, loss_ctc=45.516, loss_att=31.230, acc=0.804, loss=35.516, backward_time=0.227, grad_norm=34.222, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.356e-04, train_time=1.760 +[ip-10-0-216-33:0/16] 2024-03-01 01:31:22,351 (trainer:762) INFO: 19epoch:train:501-1000batch: iter_time=1.464e-04, forward_time=0.206, loss_ctc=41.128, loss_att=28.366, acc=0.804, loss=32.195, backward_time=0.229, grad_norm=32.023, clip=100.000, loss_scale=4.045e+33, optim_step_time=0.075, optim0_lr0=2.354e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-01 01:38:55,477 (trainer:762) INFO: 19epoch:train:1001-1500batch: iter_time=1.471e-04, forward_time=0.210, loss_ctc=47.559, loss_att=32.597, acc=0.806, loss=37.085, backward_time=0.236, grad_norm=34.930, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.352e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-01 01:40:23,792 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 01:46:30,260 (trainer:762) INFO: 19epoch:train:1501-2000batch: iter_time=1.432e-04, forward_time=0.210, loss_ctc=45.119, loss_att=30.828, acc=0.810, loss=35.116, backward_time=0.235, grad_norm=33.248, clip=100.000, loss_scale=3.090e+33, optim_step_time=0.075, optim0_lr0=2.350e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-01 01:54:04,625 (trainer:762) INFO: 19epoch:train:2001-2500batch: iter_time=1.472e-04, forward_time=0.209, loss_ctc=45.629, loss_att=31.403, acc=0.801, loss=35.671, backward_time=0.233, grad_norm=35.751, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.347e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-01 02:01:42,312 (trainer:762) INFO: 19epoch:train:2501-3000batch: iter_time=1.449e-04, forward_time=0.209, loss_ctc=45.530, loss_att=30.895, acc=0.812, loss=35.285, backward_time=0.230, grad_norm=32.773, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.345e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-01 02:09:16,643 (trainer:762) INFO: 19epoch:train:3001-3500batch: iter_time=1.450e-04, forward_time=0.209, loss_ctc=44.924, loss_att=30.688, acc=0.805, loss=34.959, backward_time=0.230, grad_norm=33.373, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.343e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-01 02:16:47,546 (trainer:762) INFO: 19epoch:train:3501-4000batch: iter_time=1.490e-04, forward_time=0.208, loss_ctc=44.148, loss_att=30.167, acc=0.814, loss=34.361, backward_time=0.237, grad_norm=32.178, clip=100.000, loss_scale=4.694e+33, optim_step_time=0.075, optim0_lr0=2.341e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-03-01 02:24:25,604 (trainer:762) INFO: 19epoch:train:4001-4500batch: iter_time=1.466e-04, forward_time=0.208, loss_ctc=44.755, loss_att=30.444, acc=0.809, loss=34.738, backward_time=0.234, grad_norm=31.797, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.339e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-01 02:26:19,873 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 02:32:06,375 (trainer:762) INFO: 19epoch:train:4501-5000batch: iter_time=1.476e-04, forward_time=0.207, loss_ctc=43.818, loss_att=30.469, acc=0.812, loss=34.474, backward_time=0.234, grad_norm=33.045, clip=100.000, loss_scale=3.236e+33, optim_step_time=0.075, optim0_lr0=2.337e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-01 02:32:11,031 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-01 02:32:47,607 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-01 02:32:56,335 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-01 02:32:56,335 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-01 02:32:56,340 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-01 02:47:11,995 (trainer:762) INFO: 19epoch:train:5001-5500batch: iter_time=0.882, forward_time=0.207, loss_ctc=44.935, loss_att=30.840, acc=0.805, loss=35.068, backward_time=0.233, grad_norm=33.424, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.335e-04, train_time=1.811 +[ip-10-0-216-33:0/16] 2024-03-01 02:54:49,564 (trainer:762) INFO: 19epoch:train:5501-6000batch: iter_time=1.460e-04, forward_time=0.208, loss_ctc=41.009, loss_att=28.261, acc=0.805, loss=32.086, backward_time=0.233, grad_norm=32.371, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.332e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-01 03:02:20,378 (trainer:762) INFO: 19epoch:train:6001-6500batch: iter_time=1.452e-04, forward_time=0.210, loss_ctc=47.030, loss_att=32.305, acc=0.808, loss=36.723, backward_time=0.237, grad_norm=35.404, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.330e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-03-01 03:09:57,591 (trainer:762) INFO: 19epoch:train:6501-7000batch: iter_time=1.411e-04, forward_time=0.207, loss_ctc=44.571, loss_att=30.493, acc=0.812, loss=34.716, backward_time=0.233, grad_norm=33.316, clip=100.000, loss_scale=4.548e+33, optim_step_time=0.075, optim0_lr0=2.328e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-01 03:17:34,556 (trainer:762) INFO: 19epoch:train:7001-7500batch: iter_time=1.407e-04, forward_time=0.207, loss_ctc=45.102, loss_att=31.048, acc=0.803, loss=35.265, backward_time=0.239, grad_norm=34.342, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.326e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-01 03:20:50,921 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 03:25:12,113 (trainer:762) INFO: 19epoch:train:7501-8000batch: iter_time=1.402e-04, forward_time=0.207, loss_ctc=45.073, loss_att=30.689, acc=0.814, loss=35.004, backward_time=0.235, grad_norm=33.272, clip=100.000, loss_scale=3.694e+33, optim_step_time=0.075, optim0_lr0=2.324e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-01 03:32:49,444 (trainer:762) INFO: 19epoch:train:8001-8500batch: iter_time=1.460e-04, forward_time=0.207, loss_ctc=44.638, loss_att=30.382, acc=0.807, loss=34.659, backward_time=0.231, grad_norm=34.361, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.322e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-01 03:40:24,069 (trainer:762) INFO: 19epoch:train:8501-9000batch: iter_time=1.428e-04, forward_time=0.207, loss_ctc=43.750, loss_att=29.870, acc=0.815, loss=34.034, backward_time=0.234, grad_norm=33.019, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.320e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-01 03:48:01,315 (trainer:762) INFO: 19epoch:train:9001-9500batch: iter_time=1.433e-04, forward_time=0.209, loss_ctc=44.477, loss_att=30.292, acc=0.811, loss=34.547, backward_time=0.229, grad_norm=32.102, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.318e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-01 03:51:21,183 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 03:55:33,770 (trainer:762) INFO: 19epoch:train:9501-10000batch: iter_time=1.448e-04, forward_time=0.207, loss_ctc=43.573, loss_att=30.305, acc=0.813, loss=34.285, backward_time=0.234, grad_norm=31.924, clip=100.000, loss_scale=2.627e+33, optim_step_time=0.075, optim0_lr0=2.316e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-01 03:55:38,835 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-01 03:56:15,813 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-01 03:56:24,417 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-01 03:56:24,417 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-01 03:56:24,422 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-01 04:10:49,143 (trainer:762) INFO: 19epoch:train:10001-10500batch: iter_time=0.905, forward_time=0.208, loss_ctc=44.730, loss_att=30.763, acc=0.807, loss=34.953, backward_time=0.232, grad_norm=33.258, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.314e-04, train_time=1.831 +[ip-10-0-216-33:0/16] 2024-03-01 04:18:19,204 (trainer:762) INFO: 19epoch:train:10501-11000batch: iter_time=1.492e-04, forward_time=0.210, loss_ctc=40.639, loss_att=28.032, acc=0.807, loss=31.814, backward_time=0.232, grad_norm=31.855, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.312e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-03-01 04:25:53,125 (trainer:762) INFO: 19epoch:train:11001-11500batch: iter_time=1.487e-04, forward_time=0.208, loss_ctc=46.971, loss_att=32.277, acc=0.808, loss=36.685, backward_time=0.230, grad_norm=35.247, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.310e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-01 04:33:31,572 (trainer:762) INFO: 19epoch:train:11501-12000batch: iter_time=1.567e-04, forward_time=0.208, loss_ctc=44.411, loss_att=30.300, acc=0.813, loss=34.533, backward_time=0.241, grad_norm=37.587, clip=100.000, loss_scale=4.055e+33, optim_step_time=0.075, optim0_lr0=2.307e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-01 04:41:06,242 (trainer:762) INFO: 19epoch:train:12001-12500batch: iter_time=1.536e-04, forward_time=0.209, loss_ctc=44.763, loss_att=30.920, acc=0.804, loss=35.073, backward_time=0.236, grad_norm=34.622, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.305e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-01 04:42:57,320 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 04:48:37,451 (trainer:762) INFO: 19epoch:train:12501-13000batch: iter_time=1.520e-04, forward_time=0.210, loss_ctc=44.891, loss_att=30.617, acc=0.814, loss=34.899, backward_time=0.228, grad_norm=67.651, clip=100.000, loss_scale=3.231e+33, optim_step_time=0.075, optim0_lr0=2.303e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-03-01 04:56:21,407 (trainer:762) INFO: 19epoch:train:13001-13500batch: iter_time=1.511e-04, forward_time=0.205, loss_ctc=44.232, loss_att=30.187, acc=0.808, loss=34.400, backward_time=0.234, grad_norm=33.744, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.301e-04, train_time=0.928 +[ip-10-0-216-33:0/16] 2024-03-01 05:04:02,640 (trainer:762) INFO: 19epoch:train:13501-14000batch: iter_time=1.510e-04, forward_time=0.209, loss_ctc=43.423, loss_att=29.780, acc=0.816, loss=33.873, backward_time=0.233, grad_norm=32.423, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.299e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-03-01 05:11:43,749 (trainer:762) INFO: 19epoch:train:14001-14500batch: iter_time=1.478e-04, forward_time=0.207, loss_ctc=44.195, loss_att=30.163, acc=0.811, loss=34.373, backward_time=0.230, grad_norm=31.813, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.297e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-03-01 05:16:13,277 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 05:19:23,989 (trainer:762) INFO: 19epoch:train:14501-15000batch: iter_time=1.444e-04, forward_time=0.209, loss_ctc=43.482, loss_att=30.229, acc=0.814, loss=34.205, backward_time=0.230, grad_norm=31.818, clip=100.000, loss_scale=3.481e+33, optim_step_time=0.075, optim0_lr0=2.295e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-01 05:33:24,715 (trainer:361) INFO: 19epoch results: [train] iter_time=0.088, forward_time=0.208, loss_ctc=44.467, loss_att=30.495, acc=0.809, loss=34.686, backward_time=0.233, grad_norm=34.561, clip=100.000, loss_scale=3.300e+33, optim_step_time=0.075, optim0_lr0=2.325e-04, train_time=1.001, time=4 hours, 10 minutes and 30.84 seconds, total_count=285000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=42.117, cer_ctc=0.231, loss_att=30.659, acc=0.752, cer=0.216, wer=0.993, loss=34.096, time=13 minutes and 45.74 seconds, total_count=40527, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-01 05:33:34,377 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-03-01 05:33:34,403 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/14epoch.pth +[ip-10-0-216-33:0/16] 2024-03-01 05:33:34,403 (trainer:290) INFO: 20/45epoch started. Estimated time to finish: 4 days, 18 hours and 49 minutes +[ip-10-0-216-33:0/16] 2024-03-01 05:33:34,411 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-01 05:34:10,647 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-01 05:34:18,490 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-01 05:34:18,490 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-01 05:34:18,495 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-01 05:48:17,526 (trainer:762) INFO: 20epoch:train:1-500batch: iter_time=0.842, forward_time=0.210, loss_ctc=43.136, loss_att=29.619, acc=0.815, loss=33.674, backward_time=0.229, grad_norm=47.671, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.293e-04, train_time=1.766 +[ip-10-0-216-33:0/16] 2024-03-01 05:55:55,272 (trainer:762) INFO: 20epoch:train:501-1000batch: iter_time=1.499e-04, forward_time=0.209, loss_ctc=46.054, loss_att=31.667, acc=0.803, loss=35.983, backward_time=0.234, grad_norm=37.233, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.291e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-01 06:03:30,938 (trainer:762) INFO: 20epoch:train:1001-1500batch: iter_time=1.510e-04, forward_time=0.208, loss_ctc=41.999, loss_att=28.686, acc=0.813, loss=32.679, backward_time=0.232, grad_norm=32.327, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.289e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-01 06:11:05,643 (trainer:762) INFO: 20epoch:train:1501-2000batch: iter_time=1.475e-04, forward_time=0.211, loss_ctc=45.496, loss_att=31.746, acc=0.813, loss=35.871, backward_time=0.236, grad_norm=34.218, clip=100.000, loss_scale=3.666e+33, optim_step_time=0.075, optim0_lr0=2.287e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-01 06:18:44,244 (trainer:762) INFO: 20epoch:train:2001-2500batch: iter_time=1.474e-04, forward_time=0.208, loss_ctc=44.122, loss_att=30.763, acc=0.803, loss=34.771, backward_time=0.232, grad_norm=35.002, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.285e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-01 06:26:23,149 (trainer:762) INFO: 20epoch:train:2501-3000batch: iter_time=1.468e-04, forward_time=0.208, loss_ctc=45.401, loss_att=31.259, acc=0.809, loss=35.502, backward_time=0.236, grad_norm=33.877, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.283e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-01 06:33:58,045 (trainer:762) INFO: 20epoch:train:3001-3500batch: iter_time=1.475e-04, forward_time=0.209, loss_ctc=41.596, loss_att=28.716, acc=0.810, loss=32.580, backward_time=0.233, grad_norm=32.857, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.281e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-01 06:38:48,294 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 06:41:30,376 (trainer:762) INFO: 20epoch:train:3501-4000batch: iter_time=1.495e-04, forward_time=0.209, loss_ctc=44.551, loss_att=31.107, acc=0.813, loss=35.140, backward_time=0.238, grad_norm=34.681, clip=100.000, loss_scale=5.452e+33, optim_step_time=0.075, optim0_lr0=2.279e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-01 06:49:09,535 (trainer:762) INFO: 20epoch:train:4001-4500batch: iter_time=1.459e-04, forward_time=0.210, loss_ctc=47.142, loss_att=32.305, acc=0.812, loss=36.756, backward_time=0.231, grad_norm=34.142, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.277e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-01 06:56:47,057 (trainer:762) INFO: 20epoch:train:4501-5000batch: iter_time=1.469e-04, forward_time=0.209, loss_ctc=45.155, loss_att=30.767, acc=0.812, loss=35.083, backward_time=0.234, grad_norm=34.070, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.275e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-01 06:56:53,045 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-01 06:57:30,657 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-01 06:57:38,619 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-01 06:57:38,620 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-01 06:57:38,624 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-01 07:12:22,412 (trainer:762) INFO: 20epoch:train:5001-5500batch: iter_time=0.905, forward_time=0.210, loss_ctc=42.843, loss_att=29.381, acc=0.816, loss=33.420, backward_time=0.237, grad_norm=33.705, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.273e-04, train_time=1.870 +[ip-10-0-216-33:0/16] 2024-03-01 07:18:48,221 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 07:19:56,410 (trainer:762) INFO: 20epoch:train:5501-6000batch: iter_time=1.569e-04, forward_time=0.209, loss_ctc=45.671, loss_att=31.476, acc=0.804, loss=35.735, backward_time=0.237, grad_norm=36.264, clip=100.000, loss_scale=6.274e+33, optim_step_time=0.075, optim0_lr0=2.271e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-01 07:27:28,906 (trainer:762) INFO: 20epoch:train:6001-6500batch: iter_time=1.502e-04, forward_time=0.209, loss_ctc=41.765, loss_att=28.529, acc=0.814, loss=32.500, backward_time=0.241, grad_norm=33.145, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.270e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-01 07:35:09,364 (trainer:762) INFO: 20epoch:train:6501-7000batch: iter_time=1.505e-04, forward_time=0.209, loss_ctc=45.150, loss_att=31.472, acc=0.814, loss=35.575, backward_time=0.228, grad_norm=33.955, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.268e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-01 07:42:45,615 (trainer:762) INFO: 20epoch:train:7001-7500batch: iter_time=1.442e-04, forward_time=0.209, loss_ctc=43.691, loss_att=30.489, acc=0.804, loss=34.450, backward_time=0.233, grad_norm=41.300, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.266e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-01 07:47:39,422 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 07:50:21,581 (trainer:762) INFO: 20epoch:train:7501-8000batch: iter_time=1.486e-04, forward_time=0.209, loss_ctc=45.045, loss_att=31.053, acc=0.810, loss=35.251, backward_time=0.231, grad_norm=35.404, clip=100.000, loss_scale=4.261e+33, optim_step_time=0.075, optim0_lr0=2.264e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-01 07:57:56,309 (trainer:762) INFO: 20epoch:train:8001-8500batch: iter_time=1.497e-04, forward_time=0.210, loss_ctc=41.123, loss_att=28.471, acc=0.811, loss=32.266, backward_time=0.237, grad_norm=32.847, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.262e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-01 08:05:33,841 (trainer:762) INFO: 20epoch:train:8501-9000batch: iter_time=1.457e-04, forward_time=0.210, loss_ctc=44.002, loss_att=30.813, acc=0.815, loss=34.769, backward_time=0.234, grad_norm=35.165, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.260e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-01 08:13:13,104 (trainer:762) INFO: 20epoch:train:9001-9500batch: iter_time=1.493e-04, forward_time=0.209, loss_ctc=46.841, loss_att=32.142, acc=0.813, loss=36.552, backward_time=0.235, grad_norm=33.449, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.258e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-01 08:20:47,927 (trainer:762) INFO: 20epoch:train:9501-10000batch: iter_time=1.437e-04, forward_time=0.210, loss_ctc=44.438, loss_att=30.458, acc=0.814, loss=34.652, backward_time=0.234, grad_norm=32.589, clip=100.000, loss_scale=3.526e+33, optim_step_time=0.075, optim0_lr0=2.256e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-01 08:20:53,473 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-01 08:21:32,705 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-01 08:21:41,330 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-01 08:21:41,330 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-01 08:21:41,335 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-01 08:29:56,357 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 08:35:58,946 (trainer:762) INFO: 20epoch:train:10001-10500batch: iter_time=0.853, forward_time=0.210, loss_ctc=42.489, loss_att=29.146, acc=0.816, loss=33.149, backward_time=0.235, grad_norm=32.878, clip=100.000, loss_scale=3.106e+33, optim_step_time=0.075, optim0_lr0=2.254e-04, train_time=1.822 +[ip-10-0-216-33:0/16] 2024-03-01 08:43:32,344 (trainer:762) INFO: 20epoch:train:10501-11000batch: iter_time=1.473e-04, forward_time=0.210, loss_ctc=45.596, loss_att=31.414, acc=0.805, loss=35.669, backward_time=0.235, grad_norm=35.892, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.252e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-01 08:51:02,059 (trainer:762) INFO: 20epoch:train:11001-11500batch: iter_time=1.497e-04, forward_time=0.211, loss_ctc=41.460, loss_att=28.263, acc=0.816, loss=32.222, backward_time=0.239, grad_norm=32.967, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.250e-04, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-03-01 08:55:00,304 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 08:58:36,433 (trainer:762) INFO: 20epoch:train:11501-12000batch: iter_time=1.480e-04, forward_time=0.211, loss_ctc=44.589, loss_att=31.173, acc=0.816, loss=35.198, backward_time=0.233, grad_norm=34.535, clip=100.000, loss_scale=1.974e+33, optim_step_time=0.075, optim0_lr0=2.248e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-01 09:06:06,913 (trainer:762) INFO: 20epoch:train:12001-12500batch: iter_time=1.512e-04, forward_time=0.211, loss_ctc=43.329, loss_att=30.401, acc=0.805, loss=34.279, backward_time=0.234, grad_norm=34.105, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=2.247e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-03-01 09:13:38,967 (trainer:762) INFO: 20epoch:train:12501-13000batch: iter_time=1.506e-04, forward_time=0.210, loss_ctc=44.759, loss_att=30.949, acc=0.810, loss=35.092, backward_time=0.232, grad_norm=33.217, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=2.245e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-01 09:21:17,320 (trainer:762) INFO: 20epoch:train:13001-13500batch: iter_time=1.501e-04, forward_time=0.210, loss_ctc=40.985, loss_att=28.365, acc=0.812, loss=32.151, backward_time=0.236, grad_norm=32.906, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=2.243e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-01 09:28:54,779 (trainer:762) INFO: 20epoch:train:13501-14000batch: iter_time=1.467e-04, forward_time=0.210, loss_ctc=43.672, loss_att=30.676, acc=0.815, loss=34.575, backward_time=0.235, grad_norm=34.559, clip=100.000, loss_scale=1.919e+33, optim_step_time=0.075, optim0_lr0=2.241e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-01 09:36:26,460 (trainer:762) INFO: 20epoch:train:14001-14500batch: iter_time=1.483e-04, forward_time=0.210, loss_ctc=46.543, loss_att=31.893, acc=0.814, loss=36.288, backward_time=0.234, grad_norm=33.485, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.239e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-01 09:44:00,696 (trainer:762) INFO: 20epoch:train:14501-15000batch: iter_time=1.483e-04, forward_time=0.211, loss_ctc=44.275, loss_att=30.353, acc=0.814, loss=34.529, backward_time=0.236, grad_norm=33.140, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.237e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-01 09:58:11,012 (trainer:361) INFO: 20epoch results: [train] iter_time=0.087, forward_time=0.210, loss_ctc=44.097, loss_att=30.452, acc=0.811, loss=34.545, backward_time=0.234, grad_norm=34.719, clip=100.000, loss_scale=3.559e+33, optim_step_time=0.075, optim0_lr0=2.265e-04, train_time=1.002, time=4 hours, 10 minutes and 41.83 seconds, total_count=300000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=41.410, cer_ctc=0.230, loss_att=32.884, acc=0.719, cer=0.306, wer=1.000, loss=35.442, time=13 minutes and 54.47 seconds, total_count=42660, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-01 09:58:20,918 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-03-01 09:58:20,942 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/15epoch.pth +[ip-10-0-216-33:0/16] 2024-03-01 09:58:20,943 (trainer:290) INFO: 21/45epoch started. Estimated time to finish: 4 days, 14 hours and 23 minutes +[ip-10-0-216-33:0/16] 2024-03-01 09:58:20,950 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-01 09:58:56,977 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-01 09:59:04,942 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-01 09:59:04,942 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-01 09:59:04,947 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-01 10:13:06,397 (trainer:762) INFO: 21epoch:train:1-500batch: iter_time=0.852, forward_time=0.208, loss_ctc=43.374, loss_att=29.769, acc=0.808, loss=33.850, backward_time=0.231, grad_norm=34.850, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.235e-04, train_time=1.771 +[ip-10-0-216-33:0/16] 2024-03-01 10:20:40,991 (trainer:762) INFO: 21epoch:train:501-1000batch: iter_time=1.468e-04, forward_time=0.209, loss_ctc=44.792, loss_att=30.507, acc=0.812, loss=34.793, backward_time=0.232, grad_norm=33.114, clip=100.000, loss_scale=3.837e+33, optim_step_time=0.075, optim0_lr0=2.233e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-01 10:28:15,060 (trainer:762) INFO: 21epoch:train:1001-1500batch: iter_time=1.505e-04, forward_time=0.208, loss_ctc=44.538, loss_att=30.690, acc=0.807, loss=34.844, backward_time=0.233, grad_norm=37.574, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.232e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-01 10:33:57,055 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 10:35:47,931 (trainer:762) INFO: 21epoch:train:1501-2000batch: iter_time=1.484e-04, forward_time=0.209, loss_ctc=42.962, loss_att=29.423, acc=0.810, loss=33.485, backward_time=0.235, grad_norm=35.017, clip=100.000, loss_scale=4.547e+33, optim_step_time=0.075, optim0_lr0=2.230e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-01 10:43:25,255 (trainer:762) INFO: 21epoch:train:2001-2500batch: iter_time=1.476e-04, forward_time=0.210, loss_ctc=45.490, loss_att=31.792, acc=0.813, loss=35.901, backward_time=0.232, grad_norm=35.118, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.228e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-01 10:51:01,788 (trainer:762) INFO: 21epoch:train:2501-3000batch: iter_time=1.495e-04, forward_time=0.207, loss_ctc=41.313, loss_att=28.305, acc=0.811, loss=32.207, backward_time=0.233, grad_norm=33.014, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.226e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-01 10:58:39,050 (trainer:762) INFO: 21epoch:train:3001-3500batch: iter_time=1.493e-04, forward_time=0.209, loss_ctc=42.651, loss_att=29.777, acc=0.812, loss=33.639, backward_time=0.235, grad_norm=34.566, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.224e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-01 11:06:19,663 (trainer:762) INFO: 21epoch:train:3501-4000batch: iter_time=1.497e-04, forward_time=0.208, loss_ctc=41.539, loss_att=28.515, acc=0.813, loss=32.422, backward_time=0.240, grad_norm=33.685, clip=100.000, loss_scale=3.240e+33, optim_step_time=0.075, optim0_lr0=2.222e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-01 11:14:00,053 (trainer:762) INFO: 21epoch:train:4001-4500batch: iter_time=1.511e-04, forward_time=0.208, loss_ctc=42.932, loss_att=29.791, acc=0.814, loss=33.734, backward_time=0.236, grad_norm=34.703, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.221e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-01 11:20:17,641 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 11:21:40,272 (trainer:762) INFO: 21epoch:train:4501-5000batch: iter_time=1.461e-04, forward_time=0.208, loss_ctc=47.452, loss_att=32.643, acc=0.806, loss=37.086, backward_time=0.234, grad_norm=36.686, clip=100.000, loss_scale=4.729e+33, optim_step_time=0.075, optim0_lr0=2.219e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-01 11:21:44,814 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-01 11:22:21,444 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-01 11:22:29,490 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-01 11:22:29,490 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-01 11:22:29,495 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-01 11:36:48,003 (trainer:762) INFO: 21epoch:train:5001-5500batch: iter_time=0.900, forward_time=0.210, loss_ctc=42.944, loss_att=29.513, acc=0.810, loss=33.542, backward_time=0.237, grad_norm=34.851, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.217e-04, train_time=1.815 +[ip-10-0-216-33:0/16] 2024-03-01 11:44:20,415 (trainer:762) INFO: 21epoch:train:5501-6000batch: iter_time=1.689e-04, forward_time=0.209, loss_ctc=44.215, loss_att=30.142, acc=0.815, loss=34.364, backward_time=0.235, grad_norm=32.395, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.215e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-01 11:51:53,692 (trainer:762) INFO: 21epoch:train:6001-6500batch: iter_time=1.709e-04, forward_time=0.208, loss_ctc=44.024, loss_att=30.397, acc=0.808, loss=34.485, backward_time=0.229, grad_norm=35.663, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.213e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-01 11:59:32,900 (trainer:762) INFO: 21epoch:train:6501-7000batch: iter_time=1.686e-04, forward_time=0.208, loss_ctc=42.760, loss_att=29.361, acc=0.810, loss=33.381, backward_time=0.231, grad_norm=35.378, clip=100.000, loss_scale=3.058e+33, optim_step_time=0.075, optim0_lr0=2.211e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-01 12:07:09,380 (trainer:762) INFO: 21epoch:train:7001-7500batch: iter_time=1.648e-04, forward_time=0.208, loss_ctc=45.031, loss_att=31.541, acc=0.814, loss=35.588, backward_time=0.232, grad_norm=35.932, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.210e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-01 12:14:42,262 (trainer:762) INFO: 21epoch:train:7501-8000batch: iter_time=1.535e-04, forward_time=0.209, loss_ctc=41.122, loss_att=28.104, acc=0.812, loss=32.009, backward_time=0.231, grad_norm=33.456, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.208e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-01 12:15:59,722 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 12:19:21,043 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 12:22:18,653 (trainer:762) INFO: 21epoch:train:8001-8500batch: iter_time=1.590e-04, forward_time=0.207, loss_ctc=42.036, loss_att=29.544, acc=0.813, loss=33.292, backward_time=0.232, grad_norm=37.626, clip=100.000, loss_scale=2.526e+33, optim_step_time=0.075, optim0_lr0=2.206e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-01 12:29:52,488 (trainer:762) INFO: 21epoch:train:8501-9000batch: iter_time=1.536e-04, forward_time=0.208, loss_ctc=41.196, loss_att=28.304, acc=0.814, loss=32.171, backward_time=0.234, grad_norm=34.740, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=2.204e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-01 12:37:28,374 (trainer:762) INFO: 21epoch:train:9001-9500batch: iter_time=1.606e-04, forward_time=0.208, loss_ctc=42.819, loss_att=29.846, acc=0.814, loss=33.738, backward_time=0.237, grad_norm=34.222, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=2.203e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-01 12:45:03,095 (trainer:762) INFO: 21epoch:train:9501-10000batch: iter_time=1.522e-04, forward_time=0.208, loss_ctc=47.111, loss_att=32.457, acc=0.806, loss=36.853, backward_time=0.236, grad_norm=36.486, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=2.201e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-01 12:45:08,361 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-01 12:45:45,990 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-01 12:45:53,915 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-01 12:45:53,916 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-01 12:45:53,920 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-01 13:00:23,429 (trainer:762) INFO: 21epoch:train:10001-10500batch: iter_time=0.884, forward_time=0.208, loss_ctc=42.686, loss_att=29.332, acc=0.811, loss=33.338, backward_time=0.237, grad_norm=34.913, clip=100.000, loss_scale=1.804e+33, optim_step_time=0.075, optim0_lr0=2.199e-04, train_time=1.840 +[ip-10-0-216-33:0/16] 2024-03-01 13:08:03,259 (trainer:762) INFO: 21epoch:train:10501-11000batch: iter_time=1.450e-04, forward_time=0.208, loss_ctc=43.923, loss_att=30.056, acc=0.815, loss=34.216, backward_time=0.235, grad_norm=33.003, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.197e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-01 13:15:36,398 (trainer:762) INFO: 21epoch:train:11001-11500batch: iter_time=1.499e-04, forward_time=0.209, loss_ctc=43.854, loss_att=30.370, acc=0.809, loss=34.415, backward_time=0.229, grad_norm=36.607, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.195e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-01 13:23:11,048 (trainer:762) INFO: 21epoch:train:11501-12000batch: iter_time=1.477e-04, forward_time=0.208, loss_ctc=42.297, loss_att=29.128, acc=0.812, loss=33.079, backward_time=0.241, grad_norm=34.725, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.194e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-01 13:30:45,072 (trainer:762) INFO: 21epoch:train:12001-12500batch: iter_time=1.494e-04, forward_time=0.210, loss_ctc=44.558, loss_att=31.237, acc=0.815, loss=35.233, backward_time=0.232, grad_norm=35.211, clip=100.000, loss_scale=3.609e+33, optim_step_time=0.075, optim0_lr0=2.192e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-01 13:38:19,315 (trainer:762) INFO: 21epoch:train:12501-13000batch: iter_time=1.457e-04, forward_time=0.208, loss_ctc=40.880, loss_att=27.994, acc=0.813, loss=31.860, backward_time=0.234, grad_norm=32.758, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.190e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-01 13:45:58,014 (trainer:762) INFO: 21epoch:train:13001-13500batch: iter_time=1.530e-04, forward_time=0.210, loss_ctc=41.936, loss_att=29.436, acc=0.813, loss=33.186, backward_time=0.235, grad_norm=35.512, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.188e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-01 13:53:33,177 (trainer:762) INFO: 21epoch:train:13501-14000batch: iter_time=1.533e-04, forward_time=0.208, loss_ctc=40.829, loss_att=28.129, acc=0.816, loss=31.939, backward_time=0.236, grad_norm=33.601, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.187e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-01 13:59:50,859 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 14:01:08,752 (trainer:762) INFO: 21epoch:train:14001-14500batch: iter_time=1.460e-04, forward_time=0.211, loss_ctc=42.510, loss_att=29.578, acc=0.815, loss=33.458, backward_time=0.237, grad_norm=34.850, clip=100.000, loss_scale=6.306e+33, optim_step_time=0.075, optim0_lr0=2.185e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-01 14:08:41,344 (trainer:762) INFO: 21epoch:train:14501-15000batch: iter_time=1.459e-04, forward_time=0.208, loss_ctc=46.613, loss_att=32.154, acc=0.807, loss=36.492, backward_time=0.231, grad_norm=35.899, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.183e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-01 14:23:02,570 (trainer:361) INFO: 21epoch results: [train] iter_time=0.088, forward_time=0.209, loss_ctc=43.346, loss_att=29.928, acc=0.812, loss=33.953, backward_time=0.234, grad_norm=34.871, clip=100.000, loss_scale=3.501e+33, optim_step_time=0.075, optim0_lr0=2.209e-04, train_time=1.001, time=4 hours, 10 minutes and 35.41 seconds, total_count=315000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=40.808, cer_ctc=0.223, loss_att=31.526, acc=0.731, cer=0.295, wer=0.997, loss=34.311, time=14 minutes and 5.9 seconds, total_count=44793, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-01 14:23:12,310 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-03-01 14:23:12,328 (trainer:290) INFO: 22/45epoch started. Estimated time to finish: 4 days, 9 hours and 58 minutes +[ip-10-0-216-33:0/16] 2024-03-01 14:23:12,335 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-01 14:23:48,213 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-01 14:23:56,587 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-01 14:23:56,587 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-01 14:23:56,592 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-01 14:34:12,124 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 14:37:50,143 (trainer:762) INFO: 22epoch:train:1-500batch: iter_time=0.838, forward_time=0.209, loss_ctc=45.001, loss_att=30.605, acc=0.809, loss=34.924, backward_time=0.235, grad_norm=35.681, clip=100.000, loss_scale=3.959e+33, optim_step_time=0.075, optim0_lr0=2.181e-04, train_time=1.755 +[ip-10-0-216-33:0/16] 2024-03-01 14:45:24,982 (trainer:762) INFO: 22epoch:train:501-1000batch: iter_time=1.462e-04, forward_time=0.209, loss_ctc=43.417, loss_att=29.453, acc=0.817, loss=33.642, backward_time=0.236, grad_norm=34.314, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.180e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-01 14:53:04,706 (trainer:762) INFO: 22epoch:train:1001-1500batch: iter_time=1.468e-04, forward_time=0.209, loss_ctc=44.080, loss_att=30.687, acc=0.809, loss=34.705, backward_time=0.234, grad_norm=36.154, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.178e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-01 15:00:41,013 (trainer:762) INFO: 22epoch:train:1501-2000batch: iter_time=1.516e-04, forward_time=0.209, loss_ctc=45.417, loss_att=31.232, acc=0.807, loss=35.487, backward_time=0.240, grad_norm=35.705, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.176e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-01 15:08:14,877 (trainer:762) INFO: 22epoch:train:2001-2500batch: iter_time=1.486e-04, forward_time=0.208, loss_ctc=42.517, loss_att=28.884, acc=0.808, loss=32.974, backward_time=0.230, grad_norm=34.454, clip=100.000, loss_scale=3.827e+33, optim_step_time=0.075, optim0_lr0=2.175e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-01 15:15:50,322 (trainer:762) INFO: 22epoch:train:2501-3000batch: iter_time=1.427e-04, forward_time=0.209, loss_ctc=42.102, loss_att=28.834, acc=0.820, loss=32.814, backward_time=0.232, grad_norm=34.805, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.173e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-01 15:23:25,578 (trainer:762) INFO: 22epoch:train:3001-3500batch: iter_time=1.445e-04, forward_time=0.209, loss_ctc=42.410, loss_att=29.268, acc=0.811, loss=33.211, backward_time=0.237, grad_norm=34.025, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.171e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-01 15:30:56,034 (trainer:762) INFO: 22epoch:train:3501-4000batch: iter_time=1.471e-04, forward_time=0.209, loss_ctc=45.141, loss_att=31.336, acc=0.807, loss=35.478, backward_time=0.236, grad_norm=36.174, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.169e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-03-01 15:37:05,663 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 15:38:34,667 (trainer:762) INFO: 22epoch:train:4001-4500batch: iter_time=1.474e-04, forward_time=0.210, loss_ctc=43.619, loss_att=30.381, acc=0.811, loss=34.352, backward_time=0.234, grad_norm=36.427, clip=100.000, loss_scale=6.628e+33, optim_step_time=0.075, optim0_lr0=2.168e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-01 15:46:08,665 (trainer:762) INFO: 22epoch:train:4501-5000batch: iter_time=1.492e-04, forward_time=0.209, loss_ctc=42.232, loss_att=28.798, acc=0.813, loss=32.828, backward_time=0.232, grad_norm=33.771, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.166e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-01 15:46:13,147 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-01 15:46:49,934 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-01 15:46:58,744 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-01 15:46:58,745 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-01 15:46:58,749 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-01 16:00:21,522 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 16:01:24,768 (trainer:762) INFO: 22epoch:train:5001-5500batch: iter_time=0.894, forward_time=0.208, loss_ctc=44.607, loss_att=30.292, acc=0.810, loss=34.587, backward_time=0.228, grad_norm=35.440, clip=100.000, loss_scale=4.828e+33, optim_step_time=0.075, optim0_lr0=2.164e-04, train_time=1.832 +[ip-10-0-216-33:0/16] 2024-03-01 16:09:00,335 (trainer:762) INFO: 22epoch:train:5501-6000batch: iter_time=1.452e-04, forward_time=0.209, loss_ctc=42.978, loss_att=29.192, acc=0.818, loss=33.328, backward_time=0.235, grad_norm=33.522, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.163e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-01 16:16:33,822 (trainer:762) INFO: 22epoch:train:6001-6500batch: iter_time=1.460e-04, forward_time=0.208, loss_ctc=43.873, loss_att=30.516, acc=0.810, loss=34.523, backward_time=0.229, grad_norm=35.857, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.161e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-01 16:24:12,974 (trainer:762) INFO: 22epoch:train:6501-7000batch: iter_time=1.452e-04, forward_time=0.206, loss_ctc=44.818, loss_att=30.893, acc=0.808, loss=35.070, backward_time=0.235, grad_norm=35.469, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.159e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-01 16:31:43,641 (trainer:762) INFO: 22epoch:train:7001-7500batch: iter_time=1.472e-04, forward_time=0.210, loss_ctc=42.333, loss_att=28.701, acc=0.809, loss=32.791, backward_time=0.234, grad_norm=35.400, clip=100.000, loss_scale=2.960e+33, optim_step_time=0.075, optim0_lr0=2.158e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-03-01 16:39:13,856 (trainer:762) INFO: 22epoch:train:7501-8000batch: iter_time=1.488e-04, forward_time=0.209, loss_ctc=41.850, loss_att=28.705, acc=0.821, loss=32.648, backward_time=0.235, grad_norm=35.777, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.156e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-03-01 16:46:47,108 (trainer:762) INFO: 22epoch:train:8001-8500batch: iter_time=1.471e-04, forward_time=0.210, loss_ctc=41.521, loss_att=28.767, acc=0.814, loss=32.593, backward_time=0.238, grad_norm=33.931, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.154e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-01 16:54:15,051 (trainer:762) INFO: 22epoch:train:8501-9000batch: iter_time=1.464e-04, forward_time=0.209, loss_ctc=44.667, loss_att=31.124, acc=0.809, loss=35.187, backward_time=0.236, grad_norm=36.039, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.153e-04, train_time=0.896 +[ip-10-0-216-33:0/16] 2024-03-01 17:01:42,168 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 17:01:49,412 (trainer:762) INFO: 22epoch:train:9001-9500batch: iter_time=1.523e-04, forward_time=0.210, loss_ctc=43.351, loss_att=30.362, acc=0.812, loss=34.258, backward_time=0.233, grad_norm=37.006, clip=100.000, loss_scale=5.827e+33, optim_step_time=0.075, optim0_lr0=2.151e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-01 17:09:18,638 (trainer:762) INFO: 22epoch:train:9501-10000batch: iter_time=1.482e-04, forward_time=0.209, loss_ctc=41.897, loss_att=28.623, acc=0.814, loss=32.605, backward_time=0.236, grad_norm=33.740, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.149e-04, train_time=0.898 +[ip-10-0-216-33:0/16] 2024-03-01 17:09:24,021 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-01 17:10:02,098 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-01 17:10:10,852 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-01 17:10:10,852 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-01 17:10:10,857 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-01 17:24:31,549 (trainer:762) INFO: 22epoch:train:10001-10500batch: iter_time=0.866, forward_time=0.209, loss_ctc=44.346, loss_att=30.124, acc=0.811, loss=34.391, backward_time=0.239, grad_norm=35.458, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.148e-04, train_time=1.826 +[ip-10-0-216-33:0/16] 2024-03-01 17:29:52,576 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 17:32:11,589 (trainer:762) INFO: 22epoch:train:10501-11000batch: iter_time=1.473e-04, forward_time=0.209, loss_ctc=42.521, loss_att=28.982, acc=0.819, loss=33.044, backward_time=0.230, grad_norm=34.098, clip=100.000, loss_scale=4.396e+33, optim_step_time=0.075, optim0_lr0=2.146e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-01 17:39:50,123 (trainer:762) INFO: 22epoch:train:11001-11500batch: iter_time=1.549e-04, forward_time=0.207, loss_ctc=43.566, loss_att=30.319, acc=0.811, loss=34.293, backward_time=0.235, grad_norm=36.184, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.144e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-01 17:47:20,569 (trainer:762) INFO: 22epoch:train:11501-12000batch: iter_time=1.529e-04, forward_time=0.211, loss_ctc=44.283, loss_att=30.541, acc=0.810, loss=34.663, backward_time=0.234, grad_norm=36.515, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.143e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-03-01 17:54:54,476 (trainer:762) INFO: 22epoch:train:12001-12500batch: iter_time=1.537e-04, forward_time=0.209, loss_ctc=42.024, loss_att=28.574, acc=0.810, loss=32.609, backward_time=0.229, grad_norm=35.484, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.141e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-01 18:02:24,464 (trainer:762) INFO: 22epoch:train:12501-13000batch: iter_time=1.480e-04, forward_time=0.211, loss_ctc=41.468, loss_att=28.546, acc=0.822, loss=32.423, backward_time=0.238, grad_norm=36.815, clip=100.000, loss_scale=3.391e+33, optim_step_time=0.075, optim0_lr0=2.139e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-03-01 18:09:58,574 (trainer:762) INFO: 22epoch:train:13001-13500batch: iter_time=1.494e-04, forward_time=0.209, loss_ctc=41.543, loss_att=28.859, acc=0.813, loss=32.664, backward_time=0.238, grad_norm=34.534, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.138e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-01 18:17:37,850 (trainer:762) INFO: 22epoch:train:13501-14000batch: iter_time=1.462e-04, forward_time=0.207, loss_ctc=44.414, loss_att=30.943, acc=0.809, loss=34.984, backward_time=0.234, grad_norm=36.757, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.136e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-01 18:25:19,351 (trainer:762) INFO: 22epoch:train:14001-14500batch: iter_time=1.516e-04, forward_time=0.207, loss_ctc=43.254, loss_att=30.011, acc=0.813, loss=33.984, backward_time=0.230, grad_norm=36.419, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.135e-04, train_time=0.923 +[ip-10-0-216-33:0/16] 2024-03-01 18:32:52,872 (trainer:762) INFO: 22epoch:train:14501-15000batch: iter_time=1.492e-04, forward_time=0.208, loss_ctc=41.711, loss_att=28.558, acc=0.814, loss=32.504, backward_time=0.230, grad_norm=33.574, clip=100.000, loss_scale=6.781e+33, optim_step_time=0.075, optim0_lr0=2.133e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-01 18:47:24,682 (trainer:361) INFO: 22epoch results: [train] iter_time=0.087, forward_time=0.209, loss_ctc=43.232, loss_att=29.737, acc=0.812, loss=33.786, backward_time=0.234, grad_norm=35.318, clip=100.000, loss_scale=4.275e+33, optim_step_time=0.075, optim0_lr0=2.157e-04, train_time=0.998, time=4 hours, 9 minutes and 57.4 seconds, total_count=330000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=40.064, cer_ctc=0.219, loss_att=30.057, acc=0.756, cer=0.218, wer=0.993, loss=33.059, time=14 minutes and 14.64 seconds, total_count=46926, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-01 18:47:34,259 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-03-01 18:47:34,284 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/17epoch.pth +[ip-10-0-216-33:0/16] 2024-03-01 18:47:34,284 (trainer:290) INFO: 23/45epoch started. Estimated time to finish: 4 days, 5 hours and 33 minutes +[ip-10-0-216-33:0/16] 2024-03-01 18:47:34,291 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-01 18:48:10,000 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-01 18:48:17,849 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-01 18:48:17,849 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-01 18:48:17,854 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-01 18:57:56,100 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 19:02:23,464 (trainer:762) INFO: 23epoch:train:1-500batch: iter_time=0.839, forward_time=0.211, loss_ctc=44.181, loss_att=30.158, acc=0.819, loss=34.365, backward_time=0.233, grad_norm=35.276, clip=100.000, loss_scale=7.346e+33, optim_step_time=0.075, optim0_lr0=2.131e-04, train_time=1.778 +[ip-10-0-216-33:0/16] 2024-03-01 19:10:02,321 (trainer:762) INFO: 23epoch:train:501-1000batch: iter_time=1.487e-04, forward_time=0.208, loss_ctc=43.363, loss_att=29.459, acc=0.821, loss=33.630, backward_time=0.233, grad_norm=33.634, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.130e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-01 19:17:40,099 (trainer:762) INFO: 23epoch:train:1001-1500batch: iter_time=1.511e-04, forward_time=0.209, loss_ctc=44.458, loss_att=30.425, acc=0.810, loss=34.635, backward_time=0.235, grad_norm=35.138, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.128e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-01 19:25:19,497 (trainer:762) INFO: 23epoch:train:1501-2000batch: iter_time=1.479e-04, forward_time=0.209, loss_ctc=43.793, loss_att=30.212, acc=0.814, loss=34.286, backward_time=0.239, grad_norm=34.466, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.127e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-01 19:32:56,508 (trainer:762) INFO: 23epoch:train:2001-2500batch: iter_time=1.482e-04, forward_time=0.208, loss_ctc=40.844, loss_att=28.145, acc=0.819, loss=31.955, backward_time=0.237, grad_norm=35.066, clip=100.000, loss_scale=8.225e+33, optim_step_time=0.075, optim0_lr0=2.125e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-01 19:36:04,036 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 19:40:36,786 (trainer:762) INFO: 23epoch:train:2501-3000batch: iter_time=1.498e-04, forward_time=0.210, loss_ctc=44.833, loss_att=30.675, acc=0.819, loss=34.922, backward_time=0.234, grad_norm=35.044, clip=100.000, loss_scale=7.315e+33, optim_step_time=0.075, optim0_lr0=2.123e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-01 19:48:11,480 (trainer:762) INFO: 23epoch:train:3001-3500batch: iter_time=1.512e-04, forward_time=0.209, loss_ctc=41.355, loss_att=28.605, acc=0.813, loss=32.430, backward_time=0.238, grad_norm=34.551, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.122e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-01 19:55:44,713 (trainer:762) INFO: 23epoch:train:3501-4000batch: iter_time=1.512e-04, forward_time=0.209, loss_ctc=44.607, loss_att=30.263, acc=0.816, loss=34.566, backward_time=0.234, grad_norm=35.242, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.120e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-01 20:03:24,188 (trainer:762) INFO: 23epoch:train:4001-4500batch: iter_time=1.517e-04, forward_time=0.209, loss_ctc=43.262, loss_att=29.765, acc=0.814, loss=33.814, backward_time=0.235, grad_norm=36.185, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.119e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-01 20:10:11,933 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 20:11:01,795 (trainer:762) INFO: 23epoch:train:4501-5000batch: iter_time=1.508e-04, forward_time=0.209, loss_ctc=42.728, loss_att=29.255, acc=0.818, loss=33.297, backward_time=0.234, grad_norm=33.813, clip=100.000, loss_scale=7.690e+33, optim_step_time=0.075, optim0_lr0=2.117e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-01 20:11:06,582 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-01 20:11:43,205 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-01 20:11:51,207 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-01 20:11:51,208 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-01 20:11:51,212 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-01 20:26:15,675 (trainer:762) INFO: 23epoch:train:5001-5500batch: iter_time=0.897, forward_time=0.209, loss_ctc=43.249, loss_att=29.564, acc=0.822, loss=33.669, backward_time=0.229, grad_norm=35.007, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.115e-04, train_time=1.828 +[ip-10-0-216-33:0/16] 2024-03-01 20:33:53,178 (trainer:762) INFO: 23epoch:train:5501-6000batch: iter_time=1.479e-04, forward_time=0.208, loss_ctc=42.800, loss_att=29.084, acc=0.822, loss=33.199, backward_time=0.236, grad_norm=32.996, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.114e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-01 20:41:27,561 (trainer:762) INFO: 23epoch:train:6001-6500batch: iter_time=1.589e-04, forward_time=0.209, loss_ctc=43.952, loss_att=30.038, acc=0.812, loss=34.212, backward_time=0.237, grad_norm=34.124, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.112e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-01 20:49:01,042 (trainer:762) INFO: 23epoch:train:6501-7000batch: iter_time=1.465e-04, forward_time=0.208, loss_ctc=43.513, loss_att=29.946, acc=0.814, loss=34.016, backward_time=0.229, grad_norm=35.219, clip=100.000, loss_scale=5.753e+33, optim_step_time=0.075, optim0_lr0=2.111e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-01 20:50:40,437 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 20:56:38,344 (trainer:762) INFO: 23epoch:train:7001-7500batch: iter_time=1.451e-04, forward_time=0.207, loss_ctc=40.742, loss_att=28.011, acc=0.820, loss=31.830, backward_time=0.233, grad_norm=56.709, clip=100.000, loss_scale=6.306e+33, optim_step_time=0.075, optim0_lr0=2.109e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-01 21:04:10,823 (trainer:762) INFO: 23epoch:train:7501-8000batch: iter_time=1.431e-04, forward_time=0.209, loss_ctc=44.597, loss_att=30.549, acc=0.819, loss=34.764, backward_time=0.235, grad_norm=35.454, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.108e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-01 21:11:45,360 (trainer:762) INFO: 23epoch:train:8001-8500batch: iter_time=1.423e-04, forward_time=0.208, loss_ctc=41.032, loss_att=28.346, acc=0.814, loss=32.152, backward_time=0.233, grad_norm=35.337, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.106e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-01 21:19:19,642 (trainer:762) INFO: 23epoch:train:8501-9000batch: iter_time=1.445e-04, forward_time=0.210, loss_ctc=44.194, loss_att=29.960, acc=0.817, loss=34.230, backward_time=0.232, grad_norm=34.246, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.104e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-01 21:26:50,787 (trainer:762) INFO: 23epoch:train:9001-9500batch: iter_time=1.486e-04, forward_time=0.209, loss_ctc=43.179, loss_att=29.743, acc=0.814, loss=33.774, backward_time=0.230, grad_norm=35.402, clip=100.000, loss_scale=9.263e+33, optim_step_time=0.075, optim0_lr0=2.103e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-03-01 21:27:02,724 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 21:33:02,094 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 21:34:22,062 (trainer:762) INFO: 23epoch:train:9501-10000batch: iter_time=1.479e-04, forward_time=0.210, loss_ctc=42.367, loss_att=28.998, acc=0.819, loss=33.009, backward_time=0.233, grad_norm=33.506, clip=100.000, loss_scale=4.859e+33, optim_step_time=0.075, optim0_lr0=2.101e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-03-01 21:34:26,726 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-01 21:35:04,092 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-01 21:35:12,102 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-01 21:35:12,103 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-01 21:35:12,107 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-01 21:49:41,024 (trainer:762) INFO: 23epoch:train:10001-10500batch: iter_time=0.898, forward_time=0.209, loss_ctc=43.192, loss_att=29.519, acc=0.822, loss=33.621, backward_time=0.231, grad_norm=34.880, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.100e-04, train_time=1.838 +[ip-10-0-216-33:0/16] 2024-03-01 21:57:17,908 (trainer:762) INFO: 23epoch:train:10501-11000batch: iter_time=1.530e-04, forward_time=0.209, loss_ctc=42.607, loss_att=29.037, acc=0.823, loss=33.108, backward_time=0.232, grad_norm=33.000, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.098e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-01 22:04:58,933 (trainer:762) INFO: 23epoch:train:11001-11500batch: iter_time=1.657e-04, forward_time=0.208, loss_ctc=43.632, loss_att=29.842, acc=0.813, loss=33.979, backward_time=0.232, grad_norm=34.437, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.097e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-03-01 22:12:36,998 (trainer:762) INFO: 23epoch:train:11501-12000batch: iter_time=1.536e-04, forward_time=0.209, loss_ctc=43.184, loss_att=29.847, acc=0.815, loss=33.848, backward_time=0.235, grad_norm=35.094, clip=100.000, loss_scale=3.053e+33, optim_step_time=0.075, optim0_lr0=2.095e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-01 22:20:06,973 (trainer:762) INFO: 23epoch:train:12001-12500batch: iter_time=1.457e-04, forward_time=0.210, loss_ctc=40.497, loss_att=27.851, acc=0.820, loss=31.645, backward_time=0.227, grad_norm=33.437, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.094e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-03-01 22:27:42,894 (trainer:762) INFO: 23epoch:train:12501-13000batch: iter_time=1.471e-04, forward_time=0.209, loss_ctc=44.674, loss_att=30.570, acc=0.819, loss=34.801, backward_time=0.234, grad_norm=35.020, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.092e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-01 22:35:19,722 (trainer:762) INFO: 23epoch:train:13001-13500batch: iter_time=1.508e-04, forward_time=0.207, loss_ctc=40.803, loss_att=28.248, acc=0.815, loss=32.015, backward_time=0.236, grad_norm=34.732, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.091e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-01 22:42:52,842 (trainer:762) INFO: 23epoch:train:13501-14000batch: iter_time=1.520e-04, forward_time=0.211, loss_ctc=43.912, loss_att=29.885, acc=0.818, loss=34.093, backward_time=0.231, grad_norm=34.886, clip=100.000, loss_scale=6.106e+33, optim_step_time=0.075, optim0_lr0=2.089e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-01 22:43:52,184 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 22:50:24,740 (trainer:762) INFO: 23epoch:train:14001-14500batch: iter_time=1.509e-04, forward_time=0.209, loss_ctc=42.713, loss_att=29.374, acc=0.815, loss=33.376, backward_time=0.230, grad_norm=35.018, clip=100.000, loss_scale=5.858e+33, optim_step_time=0.075, optim0_lr0=2.088e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-01 22:51:37,062 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 22:57:57,190 (trainer:762) INFO: 23epoch:train:14501-15000batch: iter_time=1.487e-04, forward_time=0.211, loss_ctc=42.386, loss_att=28.975, acc=0.820, loss=32.999, backward_time=0.233, grad_norm=38.102, clip=100.000, loss_scale=3.002e+33, optim_step_time=0.075, optim0_lr0=2.086e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-01 23:12:27,584 (trainer:361) INFO: 23epoch results: [train] iter_time=0.088, forward_time=0.209, loss_ctc=43.022, loss_att=29.478, acc=0.817, loss=33.541, backward_time=0.233, grad_norm=35.500, clip=100.000, loss_scale=5.348e+33, optim_step_time=0.075, optim0_lr0=2.108e-04, train_time=1.001, time=4 hours, 10 minutes and 39.33 seconds, total_count=345000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=40.329, cer_ctc=0.224, loss_att=30.798, acc=0.736, cer=0.276, wer=0.999, loss=33.657, time=14 minutes and 13.66 seconds, total_count=49059, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-01 23:12:37,368 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-03-01 23:12:37,398 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/16epoch.pth +[ip-10-0-216-33:0/16] 2024-03-01 23:12:37,398 (trainer:290) INFO: 24/45epoch started. Estimated time to finish: 4 days, 1 hour and 8 minutes +[ip-10-0-216-33:0/16] 2024-03-01 23:12:37,405 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-01 23:13:13,087 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-01 23:13:20,925 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-01 23:13:20,926 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-01 23:13:20,930 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-01 23:27:15,722 (trainer:762) INFO: 24epoch:train:1-500batch: iter_time=0.845, forward_time=0.210, loss_ctc=41.223, loss_att=28.462, acc=0.819, loss=32.290, backward_time=0.233, grad_norm=34.700, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.085e-04, train_time=1.756 +[ip-10-0-216-33:0/16] 2024-03-01 23:34:53,811 (trainer:762) INFO: 24epoch:train:501-1000batch: iter_time=1.538e-04, forward_time=0.209, loss_ctc=43.082, loss_att=29.547, acc=0.814, loss=33.607, backward_time=0.233, grad_norm=38.783, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.083e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-01 23:42:31,650 (trainer:762) INFO: 24epoch:train:1001-1500batch: iter_time=1.526e-04, forward_time=0.210, loss_ctc=41.477, loss_att=28.825, acc=0.817, loss=32.621, backward_time=0.233, grad_norm=35.025, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.082e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-01 23:50:07,661 (trainer:762) INFO: 24epoch:train:1501-2000batch: iter_time=1.537e-04, forward_time=0.209, loss_ctc=42.428, loss_att=29.581, acc=0.814, loss=33.435, backward_time=0.233, grad_norm=35.639, clip=100.000, loss_scale=4.782e+33, optim_step_time=0.075, optim0_lr0=2.080e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-01 23:53:57,976 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-01 23:57:45,949 (trainer:762) INFO: 24epoch:train:2001-2500batch: iter_time=1.519e-04, forward_time=0.210, loss_ctc=42.458, loss_att=29.215, acc=0.816, loss=33.188, backward_time=0.234, grad_norm=34.283, clip=100.000, loss_scale=3.897e+33, optim_step_time=0.075, optim0_lr0=2.079e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-02 00:05:31,638 (trainer:762) INFO: 24epoch:train:2501-3000batch: iter_time=1.543e-04, forward_time=0.208, loss_ctc=42.655, loss_att=29.231, acc=0.814, loss=33.259, backward_time=0.234, grad_norm=34.957, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.077e-04, train_time=0.931 +[ip-10-0-216-33:0/16] 2024-03-02 00:13:08,235 (trainer:762) INFO: 24epoch:train:3001-3500batch: iter_time=1.534e-04, forward_time=0.210, loss_ctc=41.740, loss_att=28.402, acc=0.816, loss=32.403, backward_time=0.235, grad_norm=35.128, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.076e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-02 00:20:45,072 (trainer:762) INFO: 24epoch:train:3501-4000batch: iter_time=1.492e-04, forward_time=0.211, loss_ctc=43.602, loss_att=30.107, acc=0.815, loss=34.156, backward_time=0.238, grad_norm=35.209, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.074e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-02 00:28:23,311 (trainer:762) INFO: 24epoch:train:4001-4500batch: iter_time=1.505e-04, forward_time=0.208, loss_ctc=41.162, loss_att=28.078, acc=0.824, loss=32.003, backward_time=0.232, grad_norm=33.399, clip=100.000, loss_scale=3.889e+33, optim_step_time=0.075, optim0_lr0=2.073e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-02 00:36:07,182 (trainer:762) INFO: 24epoch:train:4501-5000batch: iter_time=1.511e-04, forward_time=0.207, loss_ctc=43.105, loss_att=30.297, acc=0.812, loss=34.140, backward_time=0.233, grad_norm=36.459, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.071e-04, train_time=0.927 +[ip-10-0-216-33:0/16] 2024-03-02 00:36:11,913 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-02 00:36:48,514 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-02 00:36:56,533 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-02 00:36:56,534 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-02 00:36:56,538 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-02 00:47:35,911 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 00:51:24,267 (trainer:762) INFO: 24epoch:train:5001-5500batch: iter_time=0.904, forward_time=0.209, loss_ctc=40.911, loss_att=28.427, acc=0.819, loss=32.172, backward_time=0.235, grad_norm=35.218, clip=100.000, loss_scale=3.907e+33, optim_step_time=0.075, optim0_lr0=2.070e-04, train_time=1.834 +[ip-10-0-216-33:0/16] 2024-03-02 00:59:03,027 (trainer:762) INFO: 24epoch:train:5501-6000batch: iter_time=1.468e-04, forward_time=0.209, loss_ctc=42.563, loss_att=29.058, acc=0.815, loss=33.110, backward_time=0.236, grad_norm=35.821, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.068e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-02 01:06:34,218 (trainer:762) INFO: 24epoch:train:6001-6500batch: iter_time=1.495e-04, forward_time=0.209, loss_ctc=41.363, loss_att=28.729, acc=0.819, loss=32.519, backward_time=0.230, grad_norm=34.484, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.067e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-03-02 01:14:11,798 (trainer:762) INFO: 24epoch:train:6501-7000batch: iter_time=1.455e-04, forward_time=0.209, loss_ctc=42.188, loss_att=29.455, acc=0.815, loss=33.275, backward_time=0.232, grad_norm=35.513, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.065e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-02 01:21:50,655 (trainer:762) INFO: 24epoch:train:7001-7500batch: iter_time=1.600e-04, forward_time=0.207, loss_ctc=42.347, loss_att=29.154, acc=0.817, loss=33.112, backward_time=0.233, grad_norm=34.605, clip=100.000, loss_scale=3.879e+33, optim_step_time=0.075, optim0_lr0=2.064e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-02 01:29:25,713 (trainer:762) INFO: 24epoch:train:7501-8000batch: iter_time=1.458e-04, forward_time=0.209, loss_ctc=42.514, loss_att=29.203, acc=0.815, loss=33.197, backward_time=0.234, grad_norm=35.301, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.062e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-02 01:36:58,351 (trainer:762) INFO: 24epoch:train:8001-8500batch: iter_time=1.527e-04, forward_time=0.208, loss_ctc=41.544, loss_att=28.296, acc=0.816, loss=32.271, backward_time=0.233, grad_norm=34.374, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.061e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-02 01:44:29,525 (trainer:762) INFO: 24epoch:train:8501-9000batch: iter_time=1.484e-04, forward_time=0.211, loss_ctc=43.132, loss_att=29.911, acc=0.816, loss=33.877, backward_time=0.228, grad_norm=34.803, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.059e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-03-02 01:48:37,816 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 01:52:07,446 (trainer:762) INFO: 24epoch:train:9001-9500batch: iter_time=1.453e-04, forward_time=0.207, loss_ctc=40.759, loss_att=27.848, acc=0.825, loss=31.721, backward_time=0.232, grad_norm=32.131, clip=100.000, loss_scale=5.369e+33, optim_step_time=0.075, optim0_lr0=2.058e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-02 01:59:50,176 (trainer:762) INFO: 24epoch:train:9501-10000batch: iter_time=1.487e-04, forward_time=0.208, loss_ctc=42.871, loss_att=30.096, acc=0.813, loss=33.929, backward_time=0.234, grad_norm=35.983, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.056e-04, train_time=0.925 +[ip-10-0-216-33:0/16] 2024-03-02 01:59:54,833 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-02 02:00:32,303 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-02 02:00:40,326 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-02 02:00:40,326 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-02 02:00:40,331 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-02 02:18:10,212 (trainer:762) INFO: 24epoch:train:10001-10500batch: iter_time=1.246, forward_time=0.210, loss_ctc=40.723, loss_att=28.347, acc=0.819, loss=32.060, backward_time=0.235, grad_norm=34.448, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.055e-04, train_time=2.200 +[ip-10-0-216-33:0/16] 2024-03-02 02:25:43,968 (trainer:762) INFO: 24epoch:train:10501-11000batch: iter_time=1.523e-04, forward_time=0.210, loss_ctc=42.420, loss_att=29.076, acc=0.816, loss=33.079, backward_time=0.234, grad_norm=35.618, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.054e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-02 02:33:22,362 (trainer:762) INFO: 24epoch:train:11001-11500batch: iter_time=1.480e-04, forward_time=0.210, loss_ctc=41.117, loss_att=28.501, acc=0.819, loss=32.286, backward_time=0.236, grad_norm=35.322, clip=100.000, loss_scale=7.570e+33, optim_step_time=0.075, optim0_lr0=2.052e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-02 02:41:05,608 (trainer:762) INFO: 24epoch:train:11501-12000batch: iter_time=1.529e-04, forward_time=0.208, loss_ctc=42.206, loss_att=29.416, acc=0.815, loss=33.253, backward_time=0.234, grad_norm=35.275, clip=100.000, loss_scale=1.038e+34, optim_step_time=0.075, optim0_lr0=2.051e-04, train_time=0.926 +[ip-10-0-216-33:0/16] 2024-03-02 02:42:26,244 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 02:48:46,456 (trainer:762) INFO: 24epoch:train:12001-12500batch: iter_time=1.520e-04, forward_time=0.210, loss_ctc=41.903, loss_att=29.012, acc=0.818, loss=32.879, backward_time=0.233, grad_norm=34.657, clip=100.000, loss_scale=6.087e+33, optim_step_time=0.075, optim0_lr0=2.049e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-02 02:56:22,007 (trainer:762) INFO: 24epoch:train:12501-13000batch: iter_time=1.507e-04, forward_time=0.208, loss_ctc=42.406, loss_att=29.091, acc=0.816, loss=33.085, backward_time=0.231, grad_norm=35.100, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.048e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-02 03:04:01,502 (trainer:762) INFO: 24epoch:train:13001-13500batch: iter_time=1.489e-04, forward_time=0.208, loss_ctc=41.379, loss_att=28.189, acc=0.817, loss=32.146, backward_time=0.239, grad_norm=34.781, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.046e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-02 03:11:42,808 (trainer:762) INFO: 24epoch:train:13501-14000batch: iter_time=1.517e-04, forward_time=0.208, loss_ctc=43.262, loss_att=30.009, acc=0.816, loss=33.985, backward_time=0.235, grad_norm=34.088, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.045e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-03-02 03:15:21,539 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 03:18:46,196 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 03:19:24,277 (trainer:762) INFO: 24epoch:train:14001-14500batch: iter_time=1.596e-04, forward_time=0.209, loss_ctc=40.528, loss_att=27.716, acc=0.826, loss=31.560, backward_time=0.237, grad_norm=32.885, clip=100.000, loss_scale=6.522e+33, optim_step_time=0.075, optim0_lr0=2.044e-04, train_time=0.923 +[ip-10-0-216-33:0/16] 2024-03-02 03:27:05,945 (trainer:762) INFO: 24epoch:train:14501-15000batch: iter_time=1.507e-04, forward_time=0.207, loss_ctc=42.478, loss_att=29.827, acc=0.814, loss=33.622, backward_time=0.235, grad_norm=36.510, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.042e-04, train_time=0.923 +[ip-10-0-216-33:0/16] 2024-03-02 03:41:30,117 (trainer:361) INFO: 24epoch results: [train] iter_time=0.100, forward_time=0.209, loss_ctc=42.051, loss_att=29.037, acc=0.817, loss=32.941, backward_time=0.234, grad_norm=35.017, clip=100.000, loss_scale=4.472e+33, optim_step_time=0.075, optim0_lr0=2.063e-04, train_time=1.018, time=4 hours, 14 minutes and 44.2 seconds, total_count=360000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=39.640, cer_ctc=0.217, loss_att=31.657, acc=0.728, cer=0.313, wer=1.000, loss=34.052, time=14 minutes and 8.21 seconds, total_count=51192, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-02 03:41:39,958 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-03-02 03:41:39,979 (trainer:290) INFO: 25/45epoch started. Estimated time to finish: 3 days, 20 hours and 47 minutes +[ip-10-0-216-33:0/16] 2024-03-02 03:41:39,986 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-02 03:42:16,359 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-02 03:42:24,653 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-02 03:42:24,654 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-02 03:42:24,658 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-02 03:56:16,256 (trainer:762) INFO: 25epoch:train:1-500batch: iter_time=0.841, forward_time=0.210, loss_ctc=42.746, loss_att=29.057, acc=0.817, loss=33.164, backward_time=0.231, grad_norm=37.279, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.041e-04, train_time=1.752 +[ip-10-0-216-33:0/16] 2024-03-02 04:03:54,854 (trainer:762) INFO: 25epoch:train:501-1000batch: iter_time=1.530e-04, forward_time=0.209, loss_ctc=41.092, loss_att=28.391, acc=0.812, loss=32.202, backward_time=0.233, grad_norm=36.933, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.039e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-02 04:11:33,832 (trainer:762) INFO: 25epoch:train:1001-1500batch: iter_time=1.514e-04, forward_time=0.208, loss_ctc=42.296, loss_att=28.685, acc=0.814, loss=32.769, backward_time=0.237, grad_norm=35.672, clip=100.000, loss_scale=2.809e+33, optim_step_time=0.075, optim0_lr0=2.038e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-02 04:19:07,664 (trainer:762) INFO: 25epoch:train:1501-2000batch: iter_time=1.496e-04, forward_time=0.210, loss_ctc=43.309, loss_att=29.958, acc=0.815, loss=33.964, backward_time=0.241, grad_norm=38.219, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.036e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-02 04:26:43,671 (trainer:762) INFO: 25epoch:train:2001-2500batch: iter_time=1.477e-04, forward_time=0.208, loss_ctc=42.798, loss_att=29.283, acc=0.816, loss=33.338, backward_time=0.233, grad_norm=36.222, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.035e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-02 04:27:42,055 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 04:34:25,162 (trainer:762) INFO: 25epoch:train:2501-3000batch: iter_time=1.497e-04, forward_time=0.209, loss_ctc=46.561, loss_att=31.675, acc=0.817, loss=36.141, backward_time=0.232, grad_norm=38.708, clip=100.000, loss_scale=2.924e+33, optim_step_time=0.075, optim0_lr0=2.034e-04, train_time=0.923 +[ip-10-0-216-33:0/16] 2024-03-02 04:41:59,302 (trainer:762) INFO: 25epoch:train:3001-3500batch: iter_time=1.514e-04, forward_time=0.209, loss_ctc=42.037, loss_att=28.308, acc=0.818, loss=32.427, backward_time=0.235, grad_norm=34.461, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.032e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-02 04:49:37,773 (trainer:762) INFO: 25epoch:train:3501-4000batch: iter_time=1.484e-04, forward_time=0.207, loss_ctc=42.857, loss_att=29.159, acc=0.810, loss=33.269, backward_time=0.235, grad_norm=36.050, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.031e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-02 04:57:14,107 (trainer:762) INFO: 25epoch:train:4001-4500batch: iter_time=1.527e-04, forward_time=0.210, loss_ctc=45.147, loss_att=30.658, acc=0.821, loss=35.005, backward_time=0.238, grad_norm=36.313, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.029e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-02 05:04:56,011 (trainer:762) INFO: 25epoch:train:4501-5000batch: iter_time=1.472e-04, forward_time=0.206, loss_ctc=42.938, loss_att=29.189, acc=0.808, loss=33.314, backward_time=0.230, grad_norm=36.517, clip=100.000, loss_scale=4.860e+33, optim_step_time=0.075, optim0_lr0=2.028e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-03-02 05:05:00,731 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-02 05:05:37,625 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-02 05:05:45,795 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-02 05:05:45,795 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-02 05:05:45,799 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-02 05:20:15,490 (trainer:762) INFO: 25epoch:train:5001-5500batch: iter_time=0.893, forward_time=0.207, loss_ctc=42.282, loss_att=28.683, acc=0.820, loss=32.763, backward_time=0.232, grad_norm=37.224, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.027e-04, train_time=1.839 +[ip-10-0-216-33:0/16] 2024-03-02 05:21:38,656 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 05:27:55,901 (trainer:762) INFO: 25epoch:train:5501-6000batch: iter_time=1.465e-04, forward_time=0.206, loss_ctc=40.653, loss_att=28.206, acc=0.813, loss=31.940, backward_time=0.231, grad_norm=36.338, clip=100.000, loss_scale=3.064e+33, optim_step_time=0.075, optim0_lr0=2.025e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-02 05:35:30,396 (trainer:762) INFO: 25epoch:train:6001-6500batch: iter_time=1.482e-04, forward_time=0.207, loss_ctc=41.812, loss_att=28.395, acc=0.815, loss=32.420, backward_time=0.239, grad_norm=35.894, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.024e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-02 05:43:07,089 (trainer:762) INFO: 25epoch:train:6501-7000batch: iter_time=1.450e-04, forward_time=0.207, loss_ctc=43.082, loss_att=29.813, acc=0.816, loss=33.794, backward_time=0.233, grad_norm=37.756, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.023e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-02 05:50:46,119 (trainer:762) INFO: 25epoch:train:7001-7500batch: iter_time=1.468e-04, forward_time=0.208, loss_ctc=41.928, loss_att=28.923, acc=0.817, loss=32.825, backward_time=0.236, grad_norm=34.661, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.021e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-02 05:58:21,186 (trainer:762) INFO: 25epoch:train:7501-8000batch: iter_time=1.475e-04, forward_time=0.209, loss_ctc=46.083, loss_att=31.396, acc=0.818, loss=35.803, backward_time=0.237, grad_norm=38.502, clip=100.000, loss_scale=4.720e+33, optim_step_time=0.075, optim0_lr0=2.020e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-02 06:05:56,153 (trainer:762) INFO: 25epoch:train:8001-8500batch: iter_time=1.469e-04, forward_time=0.209, loss_ctc=41.792, loss_att=28.192, acc=0.820, loss=32.272, backward_time=0.232, grad_norm=47.345, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.018e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-02 06:13:27,910 (trainer:762) INFO: 25epoch:train:8501-9000batch: iter_time=1.497e-04, forward_time=0.209, loss_ctc=42.461, loss_att=28.965, acc=0.811, loss=33.014, backward_time=0.234, grad_norm=35.801, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.017e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-02 06:21:02,125 (trainer:762) INFO: 25epoch:train:9001-9500batch: iter_time=1.488e-04, forward_time=0.209, loss_ctc=44.873, loss_att=30.484, acc=0.822, loss=34.800, backward_time=0.233, grad_norm=36.406, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.016e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-02 06:28:33,482 (trainer:762) INFO: 25epoch:train:9501-10000batch: iter_time=1.502e-04, forward_time=0.210, loss_ctc=42.623, loss_att=29.009, acc=0.809, loss=33.094, backward_time=0.229, grad_norm=36.064, clip=100.000, loss_scale=9.440e+33, optim_step_time=0.075, optim0_lr0=2.014e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-03-02 06:28:38,108 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-02 06:29:15,606 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-02 06:29:23,577 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-02 06:29:23,577 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-02 06:29:23,582 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-02 06:40:19,855 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 06:43:45,994 (trainer:762) INFO: 25epoch:train:10001-10500batch: iter_time=0.897, forward_time=0.211, loss_ctc=42.013, loss_att=28.557, acc=0.820, loss=32.594, backward_time=0.230, grad_norm=35.737, clip=100.000, loss_scale=8.033e+33, optim_step_time=0.075, optim0_lr0=2.013e-04, train_time=1.825 +[ip-10-0-216-33:0/16] 2024-03-02 06:51:21,167 (trainer:762) INFO: 25epoch:train:10501-11000batch: iter_time=1.531e-04, forward_time=0.210, loss_ctc=40.231, loss_att=27.954, acc=0.814, loss=31.637, backward_time=0.235, grad_norm=36.584, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.012e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-02 06:58:54,083 (trainer:762) INFO: 25epoch:train:11001-11500batch: iter_time=1.529e-04, forward_time=0.211, loss_ctc=41.600, loss_att=28.229, acc=0.815, loss=32.241, backward_time=0.238, grad_norm=35.730, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.010e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-02 07:06:30,835 (trainer:762) INFO: 25epoch:train:11501-12000batch: iter_time=1.535e-04, forward_time=0.208, loss_ctc=42.881, loss_att=29.755, acc=0.817, loss=33.693, backward_time=0.232, grad_norm=36.970, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.009e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-02 07:10:53,554 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 07:14:08,329 (trainer:762) INFO: 25epoch:train:12001-12500batch: iter_time=1.484e-04, forward_time=0.210, loss_ctc=41.874, loss_att=28.794, acc=0.818, loss=32.718, backward_time=0.236, grad_norm=35.070, clip=100.000, loss_scale=5.328e+33, optim_step_time=0.075, optim0_lr0=2.008e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-02 07:21:45,553 (trainer:762) INFO: 25epoch:train:12501-13000batch: iter_time=1.494e-04, forward_time=0.209, loss_ctc=46.130, loss_att=31.400, acc=0.819, loss=35.819, backward_time=0.232, grad_norm=38.817, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=2.006e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-02 07:26:51,614 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 07:29:22,889 (trainer:762) INFO: 25epoch:train:13001-13500batch: iter_time=1.529e-04, forward_time=0.208, loss_ctc=41.715, loss_att=28.139, acc=0.820, loss=32.212, backward_time=0.230, grad_norm=50.256, clip=100.000, loss_scale=4.339e+33, optim_step_time=0.075, optim0_lr0=2.005e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-02 07:37:02,429 (trainer:762) INFO: 25epoch:train:13501-14000batch: iter_time=1.494e-04, forward_time=0.206, loss_ctc=42.183, loss_att=28.853, acc=0.812, loss=32.852, backward_time=0.226, grad_norm=35.612, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.004e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-02 07:44:40,249 (trainer:762) INFO: 25epoch:train:14001-14500batch: iter_time=1.465e-04, forward_time=0.209, loss_ctc=44.722, loss_att=30.339, acc=0.823, loss=34.654, backward_time=0.236, grad_norm=36.695, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.002e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-02 07:52:14,045 (trainer:762) INFO: 25epoch:train:14501-15000batch: iter_time=1.503e-04, forward_time=0.210, loss_ctc=42.443, loss_att=28.928, acc=0.809, loss=32.983, backward_time=0.238, grad_norm=36.293, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=2.001e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-02 08:09:40,900 (trainer:361) INFO: 25epoch results: [train] iter_time=0.088, forward_time=0.209, loss_ctc=42.839, loss_att=29.246, acc=0.816, loss=33.324, backward_time=0.234, grad_norm=37.337, clip=100.000, loss_scale=4.200e+33, optim_step_time=0.075, optim0_lr0=2.021e-04, train_time=1.002, time=4 hours, 10 minutes and 51.85 seconds, total_count=375000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=38.967, cer_ctc=0.216, loss_att=29.761, acc=0.749, cer=0.242, wer=0.996, loss=32.522, time=17 minutes and 8.74 seconds, total_count=53325, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-02 08:09:50,801 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-03-02 08:09:50,839 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/20epoch.pth +[ip-10-0-216-33:0/16] 2024-03-02 08:09:50,840 (trainer:290) INFO: 26/45epoch started. Estimated time to finish: 3 days, 16 hours and 24 minutes +[ip-10-0-216-33:0/16] 2024-03-02 08:09:50,847 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-02 08:10:27,220 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-02 08:10:35,261 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-02 08:10:35,261 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-02 08:10:35,265 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-02 08:24:38,774 (trainer:762) INFO: 26epoch:train:1-500batch: iter_time=0.843, forward_time=0.209, loss_ctc=41.124, loss_att=28.048, acc=0.823, loss=31.971, backward_time=0.236, grad_norm=35.770, clip=100.000, loss_scale=3.448e+33, optim_step_time=0.075, optim0_lr0=2.000e-04, train_time=1.776 +[ip-10-0-216-33:0/16] 2024-03-02 08:32:14,535 (trainer:762) INFO: 26epoch:train:501-1000batch: iter_time=1.490e-04, forward_time=0.210, loss_ctc=44.775, loss_att=30.626, acc=0.820, loss=34.871, backward_time=0.234, grad_norm=37.231, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.998e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-02 08:39:49,814 (trainer:762) INFO: 26epoch:train:1001-1500batch: iter_time=1.487e-04, forward_time=0.209, loss_ctc=40.701, loss_att=28.244, acc=0.822, loss=31.981, backward_time=0.238, grad_norm=35.664, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.997e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-02 08:47:27,086 (trainer:762) INFO: 26epoch:train:1501-2000batch: iter_time=1.488e-04, forward_time=0.209, loss_ctc=43.070, loss_att=29.471, acc=0.827, loss=33.551, backward_time=0.236, grad_norm=35.857, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.996e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-02 08:53:21,534 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 08:54:19,539 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 08:55:01,013 (trainer:762) INFO: 26epoch:train:2001-2500batch: iter_time=1.508e-04, forward_time=0.210, loss_ctc=42.708, loss_att=29.606, acc=0.822, loss=33.537, backward_time=0.235, grad_norm=36.909, clip=100.000, loss_scale=5.505e+33, optim_step_time=0.075, optim0_lr0=1.994e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-02 09:02:35,605 (trainer:762) INFO: 26epoch:train:2501-3000batch: iter_time=1.506e-04, forward_time=0.209, loss_ctc=42.218, loss_att=29.131, acc=0.820, loss=33.058, backward_time=0.233, grad_norm=36.891, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.993e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-02 09:10:15,631 (trainer:762) INFO: 26epoch:train:3001-3500batch: iter_time=1.523e-04, forward_time=0.209, loss_ctc=40.255, loss_att=27.361, acc=0.823, loss=31.229, backward_time=0.236, grad_norm=35.531, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.992e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-02 09:17:58,883 (trainer:762) INFO: 26epoch:train:3501-4000batch: iter_time=1.477e-04, forward_time=0.209, loss_ctc=42.607, loss_att=29.586, acc=0.821, loss=33.492, backward_time=0.238, grad_norm=36.318, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.990e-04, train_time=0.926 +[ip-10-0-216-33:0/16] 2024-03-02 09:25:40,883 (trainer:762) INFO: 26epoch:train:4001-4500batch: iter_time=1.468e-04, forward_time=0.209, loss_ctc=42.280, loss_att=28.969, acc=0.823, loss=32.963, backward_time=0.232, grad_norm=34.822, clip=100.000, loss_scale=2.835e+33, optim_step_time=0.075, optim0_lr0=1.989e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-03-02 09:32:27,954 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 09:33:15,146 (trainer:762) INFO: 26epoch:train:4501-5000batch: iter_time=1.476e-04, forward_time=0.211, loss_ctc=43.168, loss_att=29.874, acc=0.817, loss=33.862, backward_time=0.235, grad_norm=37.870, clip=100.000, loss_scale=4.927e+33, optim_step_time=0.075, optim0_lr0=1.988e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-02 09:33:20,635 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-02 09:33:56,937 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-02 09:34:04,853 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-02 09:34:04,853 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-02 09:34:04,858 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-02 09:48:25,853 (trainer:762) INFO: 26epoch:train:5001-5500batch: iter_time=0.887, forward_time=0.210, loss_ctc=40.932, loss_att=27.995, acc=0.823, loss=31.876, backward_time=0.235, grad_norm=35.834, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.986e-04, train_time=1.821 +[ip-10-0-216-33:0/16] 2024-03-02 09:55:59,670 (trainer:762) INFO: 26epoch:train:5501-6000batch: iter_time=1.434e-04, forward_time=0.210, loss_ctc=44.515, loss_att=30.665, acc=0.820, loss=34.820, backward_time=0.234, grad_norm=37.408, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.985e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-02 10:03:30,483 (trainer:762) INFO: 26epoch:train:6001-6500batch: iter_time=1.448e-04, forward_time=0.208, loss_ctc=40.413, loss_att=28.182, acc=0.822, loss=31.851, backward_time=0.235, grad_norm=35.045, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.984e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-03-02 10:11:07,890 (trainer:762) INFO: 26epoch:train:6501-7000batch: iter_time=1.409e-04, forward_time=0.210, loss_ctc=42.700, loss_att=29.266, acc=0.828, loss=33.297, backward_time=0.232, grad_norm=36.461, clip=100.000, loss_scale=2.861e+33, optim_step_time=0.075, optim0_lr0=1.982e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-02 10:18:42,176 (trainer:762) INFO: 26epoch:train:7001-7500batch: iter_time=1.415e-04, forward_time=0.209, loss_ctc=42.451, loss_att=29.495, acc=0.822, loss=33.382, backward_time=0.229, grad_norm=36.201, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.981e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-02 10:26:21,553 (trainer:762) INFO: 26epoch:train:7501-8000batch: iter_time=1.465e-04, forward_time=0.209, loss_ctc=41.989, loss_att=28.950, acc=0.821, loss=32.862, backward_time=0.233, grad_norm=36.293, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.980e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-02 10:33:58,001 (trainer:762) INFO: 26epoch:train:8001-8500batch: iter_time=1.453e-04, forward_time=0.207, loss_ctc=39.941, loss_att=27.146, acc=0.823, loss=30.985, backward_time=0.230, grad_norm=35.467, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.979e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-02 10:41:29,105 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 10:41:32,604 (trainer:762) INFO: 26epoch:train:8501-9000batch: iter_time=1.395e-04, forward_time=0.209, loss_ctc=42.319, loss_att=29.752, acc=0.821, loss=33.522, backward_time=0.234, grad_norm=36.901, clip=100.000, loss_scale=5.671e+33, optim_step_time=0.075, optim0_lr0=1.977e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-02 10:49:10,053 (trainer:762) INFO: 26epoch:train:9001-9500batch: iter_time=1.510e-04, forward_time=0.209, loss_ctc=41.794, loss_att=28.682, acc=0.824, loss=32.616, backward_time=0.237, grad_norm=34.051, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.976e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-02 10:54:34,077 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 10:56:50,141 (trainer:762) INFO: 26epoch:train:9501-10000batch: iter_time=1.426e-04, forward_time=0.206, loss_ctc=43.157, loss_att=29.922, acc=0.817, loss=33.892, backward_time=0.234, grad_norm=36.970, clip=100.000, loss_scale=4.417e+33, optim_step_time=0.075, optim0_lr0=1.975e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-02 10:56:55,297 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-02 10:57:32,349 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-02 10:57:40,457 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-02 10:57:40,458 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-02 10:57:40,462 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-02 11:12:10,763 (trainer:762) INFO: 26epoch:train:10001-10500batch: iter_time=0.912, forward_time=0.209, loss_ctc=40.718, loss_att=27.859, acc=0.824, loss=31.717, backward_time=0.231, grad_norm=35.258, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.973e-04, train_time=1.841 +[ip-10-0-216-33:0/16] 2024-03-02 11:19:44,519 (trainer:762) INFO: 26epoch:train:10501-11000batch: iter_time=1.460e-04, forward_time=0.210, loss_ctc=44.215, loss_att=30.528, acc=0.821, loss=34.634, backward_time=0.231, grad_norm=36.889, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.972e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-02 11:23:44,020 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 11:27:21,486 (trainer:762) INFO: 26epoch:train:11001-11500batch: iter_time=1.532e-04, forward_time=0.209, loss_ctc=40.267, loss_att=28.107, acc=0.822, loss=31.755, backward_time=0.229, grad_norm=63.949, clip=100.000, loss_scale=1.987e+33, optim_step_time=0.075, optim0_lr0=1.971e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-02 11:35:04,406 (trainer:762) INFO: 26epoch:train:11501-12000batch: iter_time=1.538e-04, forward_time=0.207, loss_ctc=42.530, loss_att=29.190, acc=0.828, loss=33.192, backward_time=0.233, grad_norm=35.424, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.970e-04, train_time=0.926 +[ip-10-0-216-33:0/16] 2024-03-02 11:42:45,178 (trainer:762) INFO: 26epoch:train:12001-12500batch: iter_time=1.497e-04, forward_time=0.209, loss_ctc=42.062, loss_att=29.299, acc=0.823, loss=33.128, backward_time=0.232, grad_norm=35.527, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.968e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-02 11:50:23,322 (trainer:762) INFO: 26epoch:train:12501-13000batch: iter_time=1.473e-04, forward_time=0.208, loss_ctc=41.781, loss_att=28.867, acc=0.821, loss=32.741, backward_time=0.231, grad_norm=36.030, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.967e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-02 11:57:58,918 (trainer:762) INFO: 26epoch:train:13001-13500batch: iter_time=1.444e-04, forward_time=0.208, loss_ctc=39.734, loss_att=26.988, acc=0.824, loss=30.812, backward_time=0.232, grad_norm=34.628, clip=100.000, loss_scale=1.906e+33, optim_step_time=0.075, optim0_lr0=1.966e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-02 12:05:36,968 (trainer:762) INFO: 26epoch:train:13501-14000batch: iter_time=1.490e-04, forward_time=0.208, loss_ctc=42.177, loss_att=29.494, acc=0.822, loss=33.299, backward_time=0.233, grad_norm=39.726, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.964e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-02 12:13:14,593 (trainer:762) INFO: 26epoch:train:14001-14500batch: iter_time=1.507e-04, forward_time=0.209, loss_ctc=41.762, loss_att=28.630, acc=0.825, loss=32.569, backward_time=0.237, grad_norm=33.585, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.963e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-02 12:20:51,919 (trainer:762) INFO: 26epoch:train:14501-15000batch: iter_time=1.514e-04, forward_time=0.208, loss_ctc=42.747, loss_att=29.624, acc=0.819, loss=33.561, backward_time=0.232, grad_norm=37.135, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.962e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-02 12:35:02,489 (trainer:361) INFO: 26epoch results: [train] iter_time=0.088, forward_time=0.209, loss_ctc=42.037, loss_att=28.985, acc=0.822, loss=32.901, backward_time=0.234, grad_norm=37.053, clip=100.000, loss_scale=3.411e+33, optim_step_time=0.075, optim0_lr0=1.981e-04, train_time=1.004, time=4 hours, 11 minutes and 19.56 seconds, total_count=390000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=38.746, cer_ctc=0.211, loss_att=30.664, acc=0.736, cer=0.312, wer=1.000, loss=33.089, time=13 minutes and 51.8 seconds, total_count=55458, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-02 12:35:12,234 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-03-02 12:35:12,270 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/21epoch.pth +[ip-10-0-216-33:0/16] 2024-03-02 12:35:12,270 (trainer:290) INFO: 27/45epoch started. Estimated time to finish: 3 days, 11 hours and 59 minutes +[ip-10-0-216-33:0/16] 2024-03-02 12:35:12,277 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-02 12:35:48,293 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-02 12:35:56,574 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-02 12:35:56,574 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-02 12:35:56,579 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-02 12:49:51,500 (trainer:762) INFO: 27epoch:train:1-500batch: iter_time=0.843, forward_time=0.210, loss_ctc=43.093, loss_att=29.722, acc=0.820, loss=33.733, backward_time=0.236, grad_norm=38.367, clip=100.000, loss_scale=3.811e+33, optim_step_time=0.075, optim0_lr0=1.961e-04, train_time=1.758 +[ip-10-0-216-33:0/16] 2024-03-02 12:57:26,096 (trainer:762) INFO: 27epoch:train:501-1000batch: iter_time=1.500e-04, forward_time=0.209, loss_ctc=42.408, loss_att=29.334, acc=0.819, loss=33.256, backward_time=0.230, grad_norm=38.161, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.959e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-02 13:05:09,378 (trainer:762) INFO: 27epoch:train:1001-1500batch: iter_time=1.515e-04, forward_time=0.208, loss_ctc=41.784, loss_att=28.400, acc=0.821, loss=32.415, backward_time=0.236, grad_norm=37.123, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.958e-04, train_time=0.926 +[ip-10-0-216-33:0/16] 2024-03-02 13:12:53,125 (trainer:762) INFO: 27epoch:train:1501-2000batch: iter_time=1.487e-04, forward_time=0.208, loss_ctc=44.717, loss_att=30.929, acc=0.815, loss=35.065, backward_time=0.234, grad_norm=40.473, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.957e-04, train_time=0.927 +[ip-10-0-216-33:0/16] 2024-03-02 13:18:02,548 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 13:20:23,989 (trainer:762) INFO: 27epoch:train:2001-2500batch: iter_time=1.489e-04, forward_time=0.209, loss_ctc=40.644, loss_att=27.983, acc=0.816, loss=31.781, backward_time=0.231, grad_norm=37.460, clip=100.000, loss_scale=5.973e+33, optim_step_time=0.075, optim0_lr0=1.956e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-03-02 13:27:57,926 (trainer:762) INFO: 27epoch:train:2501-3000batch: iter_time=1.474e-04, forward_time=0.209, loss_ctc=44.070, loss_att=29.672, acc=0.817, loss=33.992, backward_time=0.232, grad_norm=38.546, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.954e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-02 13:35:32,185 (trainer:762) INFO: 27epoch:train:3001-3500batch: iter_time=1.496e-04, forward_time=0.210, loss_ctc=40.563, loss_att=27.688, acc=0.823, loss=31.551, backward_time=0.230, grad_norm=35.124, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.953e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-02 13:43:06,244 (trainer:762) INFO: 27epoch:train:3501-4000batch: iter_time=1.542e-04, forward_time=0.209, loss_ctc=39.792, loss_att=27.553, acc=0.827, loss=31.224, backward_time=0.236, grad_norm=34.627, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.952e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-02 13:50:43,498 (trainer:762) INFO: 27epoch:train:4001-4500batch: iter_time=1.472e-04, forward_time=0.209, loss_ctc=41.092, loss_att=29.742, acc=0.819, loss=33.147, backward_time=0.234, grad_norm=38.546, clip=100.000, loss_scale=6.833e+33, optim_step_time=0.075, optim0_lr0=1.951e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-02 13:55:17,364 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 13:58:17,185 (trainer:762) INFO: 27epoch:train:4501-5000batch: iter_time=1.535e-04, forward_time=0.209, loss_ctc=42.827, loss_att=29.389, acc=0.818, loss=33.421, backward_time=0.236, grad_norm=37.872, clip=100.000, loss_scale=8.356e+33, optim_step_time=0.075, optim0_lr0=1.950e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-02 13:58:21,529 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-02 13:58:58,264 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-02 13:59:06,982 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-02 13:59:06,982 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-02 13:59:06,987 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-02 14:13:27,005 (trainer:762) INFO: 27epoch:train:5001-5500batch: iter_time=0.897, forward_time=0.210, loss_ctc=42.641, loss_att=29.337, acc=0.821, loss=33.328, backward_time=0.231, grad_norm=38.185, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.948e-04, train_time=1.819 +[ip-10-0-216-33:0/16] 2024-03-02 14:20:57,776 (trainer:762) INFO: 27epoch:train:5501-6000batch: iter_time=1.505e-04, forward_time=0.209, loss_ctc=42.016, loss_att=29.087, acc=0.820, loss=32.966, backward_time=0.227, grad_norm=37.145, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.947e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-03-02 14:28:35,196 (trainer:762) INFO: 27epoch:train:6001-6500batch: iter_time=1.480e-04, forward_time=0.209, loss_ctc=41.554, loss_att=28.230, acc=0.822, loss=32.228, backward_time=0.234, grad_norm=39.478, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.946e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-02 14:34:52,034 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 14:36:05,967 (trainer:762) INFO: 27epoch:train:6501-7000batch: iter_time=1.454e-04, forward_time=0.208, loss_ctc=44.197, loss_att=30.548, acc=0.816, loss=34.643, backward_time=0.223, grad_norm=39.443, clip=100.000, loss_scale=6.358e+33, optim_step_time=0.075, optim0_lr0=1.945e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-03-02 14:43:18,958 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 14:43:33,194 (trainer:762) INFO: 27epoch:train:7001-7500batch: iter_time=1.483e-04, forward_time=0.209, loss_ctc=40.145, loss_att=27.563, acc=0.817, loss=31.337, backward_time=0.236, grad_norm=36.682, clip=100.000, loss_scale=5.109e+33, optim_step_time=0.075, optim0_lr0=1.943e-04, train_time=0.894 +[ip-10-0-216-33:0/16] 2024-03-02 14:51:07,900 (trainer:762) INFO: 27epoch:train:7501-8000batch: iter_time=1.467e-04, forward_time=0.209, loss_ctc=43.477, loss_att=29.485, acc=0.817, loss=33.683, backward_time=0.238, grad_norm=38.434, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.942e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-02 14:58:41,305 (trainer:762) INFO: 27epoch:train:8001-8500batch: iter_time=1.465e-04, forward_time=0.210, loss_ctc=40.241, loss_att=27.514, acc=0.824, loss=31.332, backward_time=0.236, grad_norm=36.393, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.941e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-02 15:06:14,053 (trainer:762) INFO: 27epoch:train:8501-9000batch: iter_time=1.459e-04, forward_time=0.208, loss_ctc=39.474, loss_att=27.383, acc=0.828, loss=31.010, backward_time=0.234, grad_norm=36.086, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.940e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-02 15:13:48,741 (trainer:762) INFO: 27epoch:train:9001-9500batch: iter_time=1.477e-04, forward_time=0.210, loss_ctc=41.084, loss_att=29.807, acc=0.819, loss=33.190, backward_time=0.229, grad_norm=36.699, clip=100.000, loss_scale=2.679e+33, optim_step_time=0.075, optim0_lr0=1.938e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-02 15:21:18,471 (trainer:762) INFO: 27epoch:train:9501-10000batch: iter_time=1.457e-04, forward_time=0.208, loss_ctc=42.629, loss_att=29.202, acc=0.820, loss=33.230, backward_time=0.237, grad_norm=37.755, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.937e-04, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-03-02 15:21:23,018 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-02 15:22:00,373 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-02 15:22:09,339 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-02 15:22:09,339 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-02 15:22:09,343 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-02 15:36:37,748 (trainer:762) INFO: 27epoch:train:10001-10500batch: iter_time=0.892, forward_time=0.208, loss_ctc=42.488, loss_att=29.301, acc=0.821, loss=33.257, backward_time=0.233, grad_norm=38.236, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.936e-04, train_time=1.838 +[ip-10-0-216-33:0/16] 2024-03-02 15:44:18,750 (trainer:762) INFO: 27epoch:train:10501-11000batch: iter_time=1.524e-04, forward_time=0.208, loss_ctc=42.162, loss_att=29.195, acc=0.820, loss=33.085, backward_time=0.233, grad_norm=37.698, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.935e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-03-02 15:44:47,438 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 15:51:57,464 (trainer:762) INFO: 27epoch:train:11001-11500batch: iter_time=1.499e-04, forward_time=0.207, loss_ctc=41.366, loss_att=28.130, acc=0.823, loss=32.101, backward_time=0.236, grad_norm=36.613, clip=100.000, loss_scale=2.752e+33, optim_step_time=0.075, optim0_lr0=1.934e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-02 15:59:36,699 (trainer:762) INFO: 27epoch:train:11501-12000batch: iter_time=1.528e-04, forward_time=0.208, loss_ctc=44.216, loss_att=30.707, acc=0.815, loss=34.759, backward_time=0.231, grad_norm=40.094, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.932e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-02 16:07:09,453 (trainer:762) INFO: 27epoch:train:12001-12500batch: iter_time=1.558e-04, forward_time=0.209, loss_ctc=40.100, loss_att=27.590, acc=0.817, loss=31.343, backward_time=0.232, grad_norm=37.320, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.931e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-02 16:14:43,711 (trainer:762) INFO: 27epoch:train:12501-13000batch: iter_time=1.472e-04, forward_time=0.209, loss_ctc=43.342, loss_att=29.399, acc=0.818, loss=33.582, backward_time=0.231, grad_norm=37.640, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.930e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-02 16:22:03,768 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 16:22:17,998 (trainer:762) INFO: 27epoch:train:13001-13500batch: iter_time=1.522e-04, forward_time=0.209, loss_ctc=40.210, loss_att=27.445, acc=0.825, loss=31.274, backward_time=0.230, grad_norm=36.009, clip=100.000, loss_scale=4.948e+33, optim_step_time=0.075, optim0_lr0=1.929e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-02 16:29:50,651 (trainer:762) INFO: 27epoch:train:13501-14000batch: iter_time=1.548e-04, forward_time=0.210, loss_ctc=39.398, loss_att=27.314, acc=0.828, loss=30.939, backward_time=0.231, grad_norm=35.230, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.928e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-02 16:37:24,742 (trainer:762) INFO: 27epoch:train:14001-14500batch: iter_time=1.526e-04, forward_time=0.209, loss_ctc=40.980, loss_att=29.646, acc=0.819, loss=33.046, backward_time=0.232, grad_norm=35.930, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.926e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-02 16:45:02,268 (trainer:762) INFO: 27epoch:train:14501-15000batch: iter_time=1.511e-04, forward_time=0.208, loss_ctc=42.378, loss_att=29.149, acc=0.819, loss=33.118, backward_time=0.227, grad_norm=39.063, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.925e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-02 16:59:21,887 (trainer:361) INFO: 27epoch results: [train] iter_time=0.088, forward_time=0.209, loss_ctc=41.836, loss_att=28.881, acc=0.820, loss=32.768, backward_time=0.233, grad_norm=37.548, clip=100.000, loss_scale=4.416e+33, optim_step_time=0.075, optim0_lr0=1.943e-04, train_time=0.999, time=4 hours, 10 minutes and 5.44 seconds, total_count=405000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=38.325, cer_ctc=0.210, loss_att=29.635, acc=0.746, cer=0.279, wer=0.997, loss=32.242, time=14 minutes and 3.86 seconds, total_count=57591, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-02 16:59:31,962 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-03-02 16:59:31,985 (trainer:290) INFO: 28/45epoch started. Estimated time to finish: 3 days, 7 hours and 33 minutes +[ip-10-0-216-33:0/16] 2024-03-02 16:59:31,992 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-02 17:00:07,955 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-02 17:00:15,824 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-02 17:00:15,824 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-02 17:00:15,829 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-02 17:14:16,462 (trainer:762) INFO: 28epoch:train:1-500batch: iter_time=0.815, forward_time=0.210, loss_ctc=41.010, loss_att=28.048, acc=0.819, loss=31.937, backward_time=0.231, grad_norm=39.636, clip=100.000, loss_scale=2.679e+33, optim_step_time=0.075, optim0_lr0=1.924e-04, train_time=1.769 +[ip-10-0-216-33:0/16] 2024-03-02 17:21:57,966 (trainer:762) INFO: 28epoch:train:501-1000batch: iter_time=1.532e-04, forward_time=0.209, loss_ctc=42.447, loss_att=28.749, acc=0.819, loss=32.858, backward_time=0.233, grad_norm=39.133, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.923e-04, train_time=0.923 +[ip-10-0-216-33:0/16] 2024-03-02 17:29:35,058 (trainer:762) INFO: 28epoch:train:1001-1500batch: iter_time=1.564e-04, forward_time=0.209, loss_ctc=40.563, loss_att=28.451, acc=0.822, loss=32.085, backward_time=0.233, grad_norm=38.689, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.076, optim0_lr0=1.922e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-02 17:37:08,174 (trainer:762) INFO: 28epoch:train:1501-2000batch: iter_time=1.515e-04, forward_time=0.209, loss_ctc=40.257, loss_att=27.946, acc=0.819, loss=31.640, backward_time=0.235, grad_norm=37.286, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.921e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-02 17:44:46,106 (trainer:762) INFO: 28epoch:train:2001-2500batch: iter_time=1.506e-04, forward_time=0.209, loss_ctc=44.270, loss_att=30.216, acc=0.811, loss=34.432, backward_time=0.233, grad_norm=39.508, clip=100.000, loss_scale=5.358e+33, optim_step_time=0.075, optim0_lr0=1.919e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-02 17:48:00,331 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 17:51:30,985 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 17:52:19,866 (trainer:762) INFO: 28epoch:train:2501-3000batch: iter_time=1.574e-04, forward_time=0.208, loss_ctc=41.356, loss_att=28.431, acc=0.817, loss=32.308, backward_time=0.230, grad_norm=38.191, clip=100.000, loss_scale=7.121e+33, optim_step_time=0.075, optim0_lr0=1.918e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-02 17:59:54,803 (trainer:762) INFO: 28epoch:train:3001-3500batch: iter_time=1.486e-04, forward_time=0.209, loss_ctc=40.606, loss_att=28.211, acc=0.816, loss=31.930, backward_time=0.232, grad_norm=38.288, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.917e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-02 18:07:32,896 (trainer:762) INFO: 28epoch:train:3501-4000batch: iter_time=1.497e-04, forward_time=0.208, loss_ctc=42.258, loss_att=29.132, acc=0.816, loss=33.069, backward_time=0.230, grad_norm=39.642, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.916e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-02 18:15:07,943 (trainer:762) INFO: 28epoch:train:4001-4500batch: iter_time=1.474e-04, forward_time=0.209, loss_ctc=42.033, loss_att=28.977, acc=0.822, loss=32.894, backward_time=0.237, grad_norm=39.158, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.915e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-02 18:22:44,537 (trainer:762) INFO: 28epoch:train:4501-5000batch: iter_time=1.473e-04, forward_time=0.210, loss_ctc=40.594, loss_att=28.007, acc=0.825, loss=31.783, backward_time=0.234, grad_norm=35.749, clip=100.000, loss_scale=2.877e+33, optim_step_time=0.075, optim0_lr0=1.914e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-02 18:22:49,171 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-02 18:23:25,724 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-02 18:23:33,695 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-02 18:23:33,695 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-02 18:23:33,700 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-02 18:37:58,111 (trainer:762) INFO: 28epoch:train:5001-5500batch: iter_time=0.907, forward_time=0.210, loss_ctc=40.574, loss_att=27.889, acc=0.821, loss=31.694, backward_time=0.234, grad_norm=38.630, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.912e-04, train_time=1.827 +[ip-10-0-216-33:0/16] 2024-03-02 18:45:25,631 (trainer:762) INFO: 28epoch:train:5501-6000batch: iter_time=1.475e-04, forward_time=0.209, loss_ctc=42.180, loss_att=28.559, acc=0.820, loss=32.645, backward_time=0.232, grad_norm=39.034, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.911e-04, train_time=0.895 +[ip-10-0-216-33:0/16] 2024-03-02 18:52:54,639 (trainer:762) INFO: 28epoch:train:6001-6500batch: iter_time=1.472e-04, forward_time=0.210, loss_ctc=40.108, loss_att=28.072, acc=0.824, loss=31.683, backward_time=0.234, grad_norm=38.502, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.910e-04, train_time=0.898 +[ip-10-0-216-33:0/16] 2024-03-02 19:00:23,796 (trainer:762) INFO: 28epoch:train:6501-7000batch: iter_time=1.423e-04, forward_time=0.209, loss_ctc=39.958, loss_att=27.671, acc=0.820, loss=31.357, backward_time=0.229, grad_norm=37.414, clip=100.000, loss_scale=5.753e+33, optim_step_time=0.075, optim0_lr0=1.909e-04, train_time=0.898 +[ip-10-0-216-33:0/16] 2024-03-02 19:03:41,740 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 19:07:57,411 (trainer:762) INFO: 28epoch:train:7001-7500batch: iter_time=1.457e-04, forward_time=0.208, loss_ctc=44.022, loss_att=30.053, acc=0.812, loss=34.244, backward_time=0.233, grad_norm=39.863, clip=100.000, loss_scale=7.450e+33, optim_step_time=0.075, optim0_lr0=1.908e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-02 19:15:28,088 (trainer:762) INFO: 28epoch:train:7501-8000batch: iter_time=1.469e-04, forward_time=0.210, loss_ctc=41.047, loss_att=28.174, acc=0.819, loss=32.036, backward_time=0.237, grad_norm=38.304, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.907e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-03-02 19:22:58,369 (trainer:762) INFO: 28epoch:train:8001-8500batch: iter_time=1.453e-04, forward_time=0.209, loss_ctc=40.154, loss_att=27.981, acc=0.816, loss=31.633, backward_time=0.233, grad_norm=36.969, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.905e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-03-02 19:30:27,336 (trainer:762) INFO: 28epoch:train:8501-9000batch: iter_time=1.439e-04, forward_time=0.210, loss_ctc=42.132, loss_att=29.001, acc=0.817, loss=32.941, backward_time=0.231, grad_norm=39.239, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.904e-04, train_time=0.898 +[ip-10-0-216-33:0/16] 2024-03-02 19:37:57,311 (trainer:762) INFO: 28epoch:train:9001-9500batch: iter_time=1.434e-04, forward_time=0.209, loss_ctc=41.687, loss_att=28.791, acc=0.823, loss=32.660, backward_time=0.233, grad_norm=37.382, clip=100.000, loss_scale=8.121e+33, optim_step_time=0.075, optim0_lr0=1.903e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-03-02 19:40:33,368 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 19:45:26,943 (trainer:762) INFO: 28epoch:train:9501-10000batch: iter_time=1.421e-04, forward_time=0.210, loss_ctc=40.471, loss_att=28.000, acc=0.826, loss=31.741, backward_time=0.233, grad_norm=36.619, clip=100.000, loss_scale=6.972e+33, optim_step_time=0.075, optim0_lr0=1.902e-04, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-03-02 19:45:31,724 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-02 19:46:08,833 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-02 19:46:16,852 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-02 19:46:16,852 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-02 19:46:16,856 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-02 20:00:44,645 (trainer:762) INFO: 28epoch:train:10001-10500batch: iter_time=0.883, forward_time=0.208, loss_ctc=40.403, loss_att=27.616, acc=0.822, loss=31.452, backward_time=0.233, grad_norm=38.118, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.901e-04, train_time=1.835 +[ip-10-0-216-33:0/16] 2024-03-02 20:08:24,185 (trainer:762) INFO: 28epoch:train:10501-11000batch: iter_time=1.480e-04, forward_time=0.207, loss_ctc=41.943, loss_att=28.497, acc=0.820, loss=32.531, backward_time=0.231, grad_norm=39.578, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.900e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-02 20:16:06,097 (trainer:762) INFO: 28epoch:train:11001-11500batch: iter_time=1.489e-04, forward_time=0.205, loss_ctc=40.069, loss_att=28.066, acc=0.824, loss=31.667, backward_time=0.227, grad_norm=39.250, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.899e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-03-02 20:23:43,572 (trainer:762) INFO: 28epoch:train:11501-12000batch: iter_time=1.534e-04, forward_time=0.206, loss_ctc=39.679, loss_att=27.641, acc=0.821, loss=31.253, backward_time=0.234, grad_norm=51.599, clip=100.000, loss_scale=8.598e+33, optim_step_time=0.075, optim0_lr0=1.897e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-02 20:30:40,852 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 20:31:16,440 (trainer:762) INFO: 28epoch:train:12001-12500batch: iter_time=1.459e-04, forward_time=0.211, loss_ctc=43.932, loss_att=30.003, acc=0.813, loss=34.181, backward_time=0.237, grad_norm=38.954, clip=100.000, loss_scale=9.989e+33, optim_step_time=0.075, optim0_lr0=1.896e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-02 20:38:55,430 (trainer:762) INFO: 28epoch:train:12501-13000batch: iter_time=1.472e-04, forward_time=0.206, loss_ctc=40.507, loss_att=27.901, acc=0.820, loss=31.682, backward_time=0.233, grad_norm=37.499, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.076, optim0_lr0=1.895e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-02 20:46:33,535 (trainer:762) INFO: 28epoch:train:13001-13500batch: iter_time=1.464e-04, forward_time=0.208, loss_ctc=39.956, loss_att=27.822, acc=0.818, loss=31.462, backward_time=0.233, grad_norm=38.028, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.894e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-02 20:54:05,727 (trainer:762) INFO: 28epoch:train:13501-14000batch: iter_time=1.476e-04, forward_time=0.209, loss_ctc=41.893, loss_att=28.805, acc=0.818, loss=32.731, backward_time=0.233, grad_norm=37.900, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.893e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-02 21:01:37,594 (trainer:762) INFO: 28epoch:train:14001-14500batch: iter_time=1.463e-04, forward_time=0.210, loss_ctc=41.586, loss_att=28.754, acc=0.823, loss=32.604, backward_time=0.231, grad_norm=37.912, clip=100.000, loss_scale=5.587e+33, optim_step_time=0.076, optim0_lr0=1.892e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-02 21:04:07,295 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 21:09:12,444 (trainer:762) INFO: 28epoch:train:14501-15000batch: iter_time=1.467e-04, forward_time=0.209, loss_ctc=40.252, loss_att=27.920, acc=0.826, loss=31.620, backward_time=0.233, grad_norm=35.870, clip=100.000, loss_scale=6.878e+33, optim_step_time=0.076, optim0_lr0=1.891e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-02 21:23:25,328 (trainer:361) INFO: 28epoch results: [train] iter_time=0.087, forward_time=0.209, loss_ctc=41.265, loss_att=28.446, acc=0.820, loss=32.292, backward_time=0.233, grad_norm=38.732, clip=100.000, loss_scale=5.434e+33, optim_step_time=0.075, optim0_lr0=1.907e-04, train_time=0.998, time=4 hours, 9 minutes and 57.56 seconds, total_count=420000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=38.202, cer_ctc=0.207, loss_att=30.050, acc=0.759, cer=0.222, wer=0.989, loss=32.496, time=13 minutes and 55.47 seconds, total_count=59724, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-02 21:23:35,268 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-03-02 21:23:35,304 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/18epoch.pth, exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/23epoch.pth +[ip-10-0-216-33:0/16] 2024-03-02 21:23:35,305 (trainer:290) INFO: 29/45epoch started. Estimated time to finish: 3 days, 3 hours and 7 minutes +[ip-10-0-216-33:0/16] 2024-03-02 21:23:35,312 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-02 21:24:11,252 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-02 21:24:19,092 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-02 21:24:19,093 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-02 21:24:19,097 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-02 21:38:09,667 (trainer:762) INFO: 29epoch:train:1-500batch: iter_time=0.826, forward_time=0.209, loss_ctc=40.974, loss_att=28.423, acc=0.820, loss=32.188, backward_time=0.232, grad_norm=43.213, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.076, optim0_lr0=1.889e-04, train_time=1.748 +[ip-10-0-216-33:0/16] 2024-03-02 21:45:47,572 (trainer:762) INFO: 29epoch:train:501-1000batch: iter_time=1.500e-04, forward_time=0.209, loss_ctc=41.458, loss_att=28.301, acc=0.824, loss=32.248, backward_time=0.231, grad_norm=38.420, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.888e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-02 21:53:22,331 (trainer:762) INFO: 29epoch:train:1001-1500batch: iter_time=1.558e-04, forward_time=0.210, loss_ctc=43.009, loss_att=29.966, acc=0.820, loss=33.879, backward_time=0.232, grad_norm=40.358, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.887e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-02 21:56:50,327 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 22:00:57,296 (trainer:762) INFO: 29epoch:train:1501-2000batch: iter_time=1.490e-04, forward_time=0.209, loss_ctc=40.310, loss_att=27.663, acc=0.823, loss=31.457, backward_time=0.230, grad_norm=37.586, clip=100.000, loss_scale=5.858e+33, optim_step_time=0.075, optim0_lr0=1.886e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-02 22:08:32,638 (trainer:762) INFO: 29epoch:train:2001-2500batch: iter_time=1.471e-04, forward_time=0.210, loss_ctc=41.161, loss_att=29.108, acc=0.821, loss=32.724, backward_time=0.232, grad_norm=39.255, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.885e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-02 22:16:07,574 (trainer:762) INFO: 29epoch:train:2501-3000batch: iter_time=1.487e-04, forward_time=0.209, loss_ctc=41.001, loss_att=27.796, acc=0.819, loss=31.758, backward_time=0.235, grad_norm=38.371, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.884e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-02 22:18:19,514 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 22:23:42,169 (trainer:762) INFO: 29epoch:train:3001-3500batch: iter_time=1.495e-04, forward_time=0.209, loss_ctc=42.031, loss_att=28.616, acc=0.831, loss=32.640, backward_time=0.232, grad_norm=34.893, clip=100.000, loss_scale=3.345e+33, optim_step_time=0.075, optim0_lr0=1.883e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-02 22:31:16,747 (trainer:762) INFO: 29epoch:train:3501-4000batch: iter_time=1.482e-04, forward_time=0.209, loss_ctc=39.694, loss_att=27.773, acc=0.822, loss=31.350, backward_time=0.232, grad_norm=42.499, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.882e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-02 22:38:57,542 (trainer:762) INFO: 29epoch:train:4001-4500batch: iter_time=1.464e-04, forward_time=0.208, loss_ctc=40.521, loss_att=27.829, acc=0.827, loss=31.637, backward_time=0.237, grad_norm=37.836, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.881e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-02 22:46:36,020 (trainer:762) INFO: 29epoch:train:4501-5000batch: iter_time=1.491e-04, forward_time=0.208, loss_ctc=39.527, loss_att=27.274, acc=0.826, loss=30.950, backward_time=0.233, grad_norm=36.904, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.879e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-02 22:46:40,313 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-02 22:47:17,312 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-02 22:47:25,346 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-02 22:47:25,347 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-02 22:47:25,351 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-02 22:56:04,638 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-02 23:01:47,524 (trainer:762) INFO: 29epoch:train:5001-5500batch: iter_time=0.898, forward_time=0.207, loss_ctc=40.649, loss_att=28.026, acc=0.822, loss=31.813, backward_time=0.235, grad_norm=38.983, clip=100.000, loss_scale=1.631e+33, optim_step_time=0.075, optim0_lr0=1.878e-04, train_time=1.823 +[ip-10-0-216-33:0/16] 2024-03-02 23:09:25,628 (trainer:762) INFO: 29epoch:train:5501-6000batch: iter_time=1.456e-04, forward_time=0.208, loss_ctc=41.222, loss_att=28.163, acc=0.824, loss=32.081, backward_time=0.235, grad_norm=39.282, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.877e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-02 23:16:59,621 (trainer:762) INFO: 29epoch:train:6001-6500batch: iter_time=1.480e-04, forward_time=0.207, loss_ctc=42.537, loss_att=29.615, acc=0.821, loss=33.492, backward_time=0.234, grad_norm=40.508, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.876e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-02 23:24:45,176 (trainer:762) INFO: 29epoch:train:6501-7000batch: iter_time=1.468e-04, forward_time=0.205, loss_ctc=40.122, loss_att=27.564, acc=0.824, loss=31.331, backward_time=0.236, grad_norm=37.654, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.875e-04, train_time=0.931 +[ip-10-0-216-33:0/16] 2024-03-02 23:32:22,580 (trainer:762) INFO: 29epoch:train:7001-7500batch: iter_time=1.474e-04, forward_time=0.207, loss_ctc=40.996, loss_att=28.898, acc=0.822, loss=32.527, backward_time=0.235, grad_norm=38.903, clip=100.000, loss_scale=2.261e+33, optim_step_time=0.075, optim0_lr0=1.874e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-02 23:40:00,894 (trainer:762) INFO: 29epoch:train:7501-8000batch: iter_time=1.454e-04, forward_time=0.207, loss_ctc=40.607, loss_att=27.589, acc=0.819, loss=31.494, backward_time=0.232, grad_norm=37.849, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.873e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-02 23:47:43,286 (trainer:762) INFO: 29epoch:train:8001-8500batch: iter_time=1.472e-04, forward_time=0.208, loss_ctc=41.710, loss_att=28.442, acc=0.832, loss=32.422, backward_time=0.230, grad_norm=35.289, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.872e-04, train_time=0.925 +[ip-10-0-216-33:0/16] 2024-03-02 23:55:22,500 (trainer:762) INFO: 29epoch:train:8501-9000batch: iter_time=1.529e-04, forward_time=0.206, loss_ctc=39.161, loss_att=27.539, acc=0.822, loss=31.026, backward_time=0.232, grad_norm=36.382, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.871e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-03 00:03:02,936 (trainer:762) INFO: 29epoch:train:9001-9500batch: iter_time=1.500e-04, forward_time=0.208, loss_ctc=40.175, loss_att=27.621, acc=0.828, loss=31.388, backward_time=0.229, grad_norm=37.610, clip=100.000, loss_scale=4.522e+33, optim_step_time=0.075, optim0_lr0=1.870e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-03 00:10:34,501 (trainer:762) INFO: 29epoch:train:9501-10000batch: iter_time=1.460e-04, forward_time=0.208, loss_ctc=39.213, loss_att=27.067, acc=0.828, loss=30.711, backward_time=0.231, grad_norm=36.409, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.868e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-03 00:10:39,116 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-03 00:11:16,395 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-03 00:11:24,637 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-03 00:11:24,638 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-03 00:11:24,642 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-03 00:25:40,141 (trainer:762) INFO: 29epoch:train:10001-10500batch: iter_time=0.885, forward_time=0.210, loss_ctc=40.219, loss_att=27.736, acc=0.823, loss=31.481, backward_time=0.231, grad_norm=39.406, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.867e-04, train_time=1.811 +[ip-10-0-216-33:0/16] 2024-03-03 00:31:40,997 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 00:33:19,353 (trainer:762) INFO: 29epoch:train:10501-11000batch: iter_time=1.499e-04, forward_time=0.210, loss_ctc=41.129, loss_att=28.100, acc=0.825, loss=32.009, backward_time=0.228, grad_norm=37.372, clip=100.000, loss_scale=4.641e+33, optim_step_time=0.075, optim0_lr0=1.866e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-03 00:40:59,353 (trainer:762) INFO: 29epoch:train:11001-11500batch: iter_time=1.514e-04, forward_time=0.207, loss_ctc=42.686, loss_att=29.652, acc=0.821, loss=33.562, backward_time=0.235, grad_norm=40.965, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.865e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-03 00:48:38,280 (trainer:762) INFO: 29epoch:train:11501-12000batch: iter_time=1.502e-04, forward_time=0.209, loss_ctc=39.732, loss_att=27.309, acc=0.825, loss=31.036, backward_time=0.234, grad_norm=37.600, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.864e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-03 00:56:16,470 (trainer:762) INFO: 29epoch:train:12001-12500batch: iter_time=1.579e-04, forward_time=0.208, loss_ctc=41.125, loss_att=28.886, acc=0.822, loss=32.557, backward_time=0.234, grad_norm=40.483, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.863e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-03 01:03:55,722 (trainer:762) INFO: 29epoch:train:12501-13000batch: iter_time=1.534e-04, forward_time=0.206, loss_ctc=40.345, loss_att=27.467, acc=0.820, loss=31.330, backward_time=0.232, grad_norm=38.036, clip=100.000, loss_scale=3.147e+33, optim_step_time=0.075, optim0_lr0=1.862e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-03 01:11:38,138 (trainer:762) INFO: 29epoch:train:13001-13500batch: iter_time=1.518e-04, forward_time=0.209, loss_ctc=41.714, loss_att=28.391, acc=0.832, loss=32.388, backward_time=0.234, grad_norm=35.500, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.861e-04, train_time=0.925 +[ip-10-0-216-33:0/16] 2024-03-03 01:13:13,653 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 01:13:39,082 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 01:19:18,154 (trainer:762) INFO: 29epoch:train:13501-14000batch: iter_time=1.484e-04, forward_time=0.206, loss_ctc=39.168, loss_att=27.504, acc=0.822, loss=31.003, backward_time=0.233, grad_norm=38.452, clip=100.000, loss_scale=2.182e+33, optim_step_time=0.075, optim0_lr0=1.860e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-03 01:26:50,530 (trainer:762) INFO: 29epoch:train:14001-14500batch: iter_time=1.475e-04, forward_time=0.211, loss_ctc=40.002, loss_att=27.558, acc=0.828, loss=31.291, backward_time=0.231, grad_norm=37.301, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.859e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-03 01:34:21,570 (trainer:762) INFO: 29epoch:train:14501-15000batch: iter_time=1.469e-04, forward_time=0.209, loss_ctc=39.152, loss_att=27.052, acc=0.828, loss=30.682, backward_time=0.229, grad_norm=36.543, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.858e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-03-03 01:48:24,698 (trainer:361) INFO: 29epoch results: [train] iter_time=0.087, forward_time=0.208, loss_ctc=40.712, loss_att=28.098, acc=0.824, loss=31.882, backward_time=0.233, grad_norm=38.329, clip=100.000, loss_scale=3.299e+33, optim_step_time=0.075, optim0_lr0=1.873e-04, train_time=1.003, time=4 hours, 11 minutes and 0.99 seconds, total_count=435000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=38.279, cer_ctc=0.207, loss_att=29.394, acc=0.764, cer=0.216, wer=0.990, loss=32.059, time=13 minutes and 48.1 seconds, total_count=61857, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-03 01:48:34,651 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-03-03 01:48:34,686 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/24epoch.pth +[ip-10-0-216-33:0/16] 2024-03-03 01:48:34,687 (trainer:290) INFO: 30/45epoch started. Estimated time to finish: 2 days, 22 hours and 42 minutes +[ip-10-0-216-33:0/16] 2024-03-03 01:48:34,694 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-03 01:49:10,398 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-03 01:49:18,177 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-03 01:49:18,177 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-03 01:49:18,182 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-03 02:03:17,206 (trainer:762) INFO: 30epoch:train:1-500batch: iter_time=0.840, forward_time=0.210, loss_ctc=41.313, loss_att=28.340, acc=0.826, loss=32.232, backward_time=0.235, grad_norm=38.196, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.857e-04, train_time=1.765 +[ip-10-0-216-33:0/16] 2024-03-03 02:10:56,516 (trainer:762) INFO: 30epoch:train:501-1000batch: iter_time=1.529e-04, forward_time=0.208, loss_ctc=41.256, loss_att=28.139, acc=0.819, loss=32.074, backward_time=0.240, grad_norm=39.072, clip=100.000, loss_scale=2.251e+33, optim_step_time=0.075, optim0_lr0=1.856e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-03 02:18:30,786 (trainer:762) INFO: 30epoch:train:1001-1500batch: iter_time=1.506e-04, forward_time=0.209, loss_ctc=40.954, loss_att=28.134, acc=0.818, loss=31.980, backward_time=0.232, grad_norm=41.803, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.854e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-03 02:26:08,659 (trainer:762) INFO: 30epoch:train:1501-2000batch: iter_time=1.486e-04, forward_time=0.208, loss_ctc=38.754, loss_att=26.550, acc=0.824, loss=30.211, backward_time=0.231, grad_norm=38.183, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.853e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-03 02:33:50,310 (trainer:762) INFO: 30epoch:train:2001-2500batch: iter_time=1.532e-04, forward_time=0.207, loss_ctc=42.048, loss_att=28.539, acc=0.826, loss=32.592, backward_time=0.234, grad_norm=38.210, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.852e-04, train_time=0.923 +[ip-10-0-216-33:0/16] 2024-03-03 02:41:27,286 (trainer:762) INFO: 30epoch:train:2501-3000batch: iter_time=1.530e-04, forward_time=0.208, loss_ctc=40.212, loss_att=28.100, acc=0.817, loss=31.734, backward_time=0.235, grad_norm=38.410, clip=100.000, loss_scale=4.502e+33, optim_step_time=0.075, optim0_lr0=1.851e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-03 02:43:19,218 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 02:49:04,265 (trainer:762) INFO: 30epoch:train:3001-3500batch: iter_time=1.494e-04, forward_time=0.209, loss_ctc=41.643, loss_att=28.824, acc=0.821, loss=32.670, backward_time=0.237, grad_norm=38.326, clip=100.000, loss_scale=3.220e+33, optim_step_time=0.075, optim0_lr0=1.850e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-03 02:56:49,170 (trainer:762) INFO: 30epoch:train:3501-4000batch: iter_time=1.493e-04, forward_time=0.206, loss_ctc=42.214, loss_att=29.037, acc=0.826, loss=32.990, backward_time=0.230, grad_norm=38.155, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.849e-04, train_time=0.930 +[ip-10-0-216-33:0/16] 2024-03-03 03:04:28,718 (trainer:762) INFO: 30epoch:train:4001-4500batch: iter_time=1.518e-04, forward_time=0.209, loss_ctc=42.681, loss_att=29.105, acc=0.826, loss=33.178, backward_time=0.234, grad_norm=37.125, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.848e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-03 03:12:04,629 (trainer:762) INFO: 30epoch:train:4501-5000batch: iter_time=1.503e-04, forward_time=0.208, loss_ctc=42.075, loss_att=28.626, acc=0.820, loss=32.661, backward_time=0.233, grad_norm=39.715, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.847e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-03 03:12:10,027 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-03 03:12:46,380 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-03 03:12:54,355 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-03 03:12:54,355 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-03 03:12:54,360 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-03 03:27:18,459 (trainer:762) INFO: 30epoch:train:5001-5500batch: iter_time=0.904, forward_time=0.208, loss_ctc=40.600, loss_att=27.932, acc=0.828, loss=31.732, backward_time=0.232, grad_norm=37.070, clip=100.000, loss_scale=4.564e+33, optim_step_time=0.075, optim0_lr0=1.846e-04, train_time=1.827 +[ip-10-0-216-33:0/16] 2024-03-03 03:34:52,032 (trainer:762) INFO: 30epoch:train:5501-6000batch: iter_time=1.442e-04, forward_time=0.207, loss_ctc=40.817, loss_att=27.860, acc=0.821, loss=31.747, backward_time=0.227, grad_norm=38.221, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.845e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-03 03:42:31,550 (trainer:762) INFO: 30epoch:train:6001-6500batch: iter_time=1.447e-04, forward_time=0.208, loss_ctc=40.479, loss_att=27.864, acc=0.820, loss=31.648, backward_time=0.240, grad_norm=39.147, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.844e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-03 03:50:05,915 (trainer:762) INFO: 30epoch:train:6501-7000batch: iter_time=1.441e-04, forward_time=0.206, loss_ctc=38.380, loss_att=26.473, acc=0.824, loss=30.045, backward_time=0.234, grad_norm=38.637, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.843e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-03 03:57:42,845 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 03:57:42,875 (trainer:762) INFO: 30epoch:train:7001-7500batch: iter_time=1.459e-04, forward_time=0.208, loss_ctc=41.606, loss_att=28.350, acc=0.826, loss=32.327, backward_time=0.235, grad_norm=36.766, clip=100.000, loss_scale=9.126e+33, optim_step_time=0.075, optim0_lr0=1.842e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-03 04:05:13,048 (trainer:762) INFO: 30epoch:train:7501-8000batch: iter_time=1.450e-04, forward_time=0.208, loss_ctc=40.141, loss_att=28.020, acc=0.818, loss=31.656, backward_time=0.237, grad_norm=37.561, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.841e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-03-03 04:12:48,238 (trainer:762) INFO: 30epoch:train:8001-8500batch: iter_time=1.483e-04, forward_time=0.210, loss_ctc=41.549, loss_att=28.787, acc=0.821, loss=32.615, backward_time=0.229, grad_norm=37.902, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.840e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-03 04:18:22,486 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 04:20:21,605 (trainer:762) INFO: 30epoch:train:8501-9000batch: iter_time=1.457e-04, forward_time=0.211, loss_ctc=41.699, loss_att=28.811, acc=0.827, loss=32.678, backward_time=0.233, grad_norm=37.298, clip=100.000, loss_scale=4.516e+33, optim_step_time=0.075, optim0_lr0=1.839e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-03 04:27:54,525 (trainer:762) INFO: 30epoch:train:9001-9500batch: iter_time=1.440e-04, forward_time=0.209, loss_ctc=42.344, loss_att=28.830, acc=0.827, loss=32.885, backward_time=0.231, grad_norm=36.375, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.838e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-03 04:31:46,580 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 04:35:35,865 (trainer:762) INFO: 30epoch:train:9501-10000batch: iter_time=1.423e-04, forward_time=0.209, loss_ctc=41.706, loss_att=28.370, acc=0.822, loss=32.371, backward_time=0.239, grad_norm=39.958, clip=100.000, loss_scale=1.956e+33, optim_step_time=0.075, optim0_lr0=1.837e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-03-03 04:35:41,013 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-03 04:36:18,199 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-03 04:36:26,161 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-03 04:36:26,161 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-03 04:36:26,166 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-03 04:51:14,901 (trainer:762) INFO: 30epoch:train:10001-10500batch: iter_time=0.885, forward_time=0.211, loss_ctc=40.596, loss_att=27.862, acc=0.828, loss=31.682, backward_time=0.236, grad_norm=36.706, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.836e-04, train_time=1.878 +[ip-10-0-216-33:0/16] 2024-03-03 04:58:50,792 (trainer:762) INFO: 30epoch:train:10501-11000batch: iter_time=1.462e-04, forward_time=0.207, loss_ctc=40.700, loss_att=27.769, acc=0.822, loss=31.649, backward_time=0.238, grad_norm=38.218, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.835e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-03 05:06:23,474 (trainer:762) INFO: 30epoch:train:11001-11500batch: iter_time=1.472e-04, forward_time=0.209, loss_ctc=40.511, loss_att=27.944, acc=0.819, loss=31.714, backward_time=0.234, grad_norm=39.051, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.834e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-03 05:13:59,188 (trainer:762) INFO: 30epoch:train:11501-12000batch: iter_time=1.490e-04, forward_time=0.206, loss_ctc=38.356, loss_att=26.418, acc=0.824, loss=30.000, backward_time=0.236, grad_norm=38.283, clip=100.000, loss_scale=1.937e+33, optim_step_time=0.075, optim0_lr0=1.833e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-03 05:21:35,492 (trainer:762) INFO: 30epoch:train:12001-12500batch: iter_time=1.478e-04, forward_time=0.209, loss_ctc=41.527, loss_att=28.303, acc=0.827, loss=32.270, backward_time=0.226, grad_norm=37.355, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.832e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-03 05:29:11,466 (trainer:762) INFO: 30epoch:train:12501-13000batch: iter_time=1.483e-04, forward_time=0.208, loss_ctc=39.872, loss_att=27.901, acc=0.818, loss=31.492, backward_time=0.235, grad_norm=52.096, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.076, optim0_lr0=1.831e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-03 05:36:45,983 (trainer:762) INFO: 30epoch:train:13001-13500batch: iter_time=1.504e-04, forward_time=0.208, loss_ctc=41.480, loss_att=28.737, acc=0.821, loss=32.560, backward_time=0.238, grad_norm=39.214, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.829e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-03 05:44:23,314 (trainer:762) INFO: 30epoch:train:13501-14000batch: iter_time=1.469e-04, forward_time=0.210, loss_ctc=41.505, loss_att=28.654, acc=0.828, loss=32.509, backward_time=0.235, grad_norm=37.506, clip=100.000, loss_scale=3.873e+33, optim_step_time=0.075, optim0_lr0=1.828e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-03 05:51:54,841 (trainer:762) INFO: 30epoch:train:14001-14500batch: iter_time=1.485e-04, forward_time=0.209, loss_ctc=42.279, loss_att=28.886, acc=0.827, loss=32.904, backward_time=0.239, grad_norm=37.177, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.827e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-03 05:56:00,071 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 05:59:28,687 (trainer:762) INFO: 30epoch:train:14501-15000batch: iter_time=1.456e-04, forward_time=0.210, loss_ctc=41.300, loss_att=28.260, acc=0.823, loss=32.172, backward_time=0.231, grad_norm=39.646, clip=100.000, loss_scale=3.990e+33, optim_step_time=0.075, optim0_lr0=1.826e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-03 06:13:56,928 (trainer:361) INFO: 30epoch results: [train] iter_time=0.088, forward_time=0.209, loss_ctc=41.020, loss_att=28.181, acc=0.823, loss=32.033, backward_time=0.234, grad_norm=38.713, clip=100.000, loss_scale=3.408e+33, optim_step_time=0.075, optim0_lr0=1.841e-04, train_time=1.003, time=4 hours, 11 minutes and 11.24 seconds, total_count=450000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=37.104, cer_ctc=0.203, loss_att=28.175, acc=0.768, cer=0.234, wer=0.989, loss=30.854, time=14 minutes and 10.66 seconds, total_count=63990, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-03 06:14:06,744 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-03-03 06:14:06,793 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/25epoch.pth +[ip-10-0-216-33:0/16] 2024-03-03 06:14:06,794 (trainer:290) INFO: 31/45epoch started. Estimated time to finish: 2 days, 18 hours and 17 minutes +[ip-10-0-216-33:0/16] 2024-03-03 06:14:06,802 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-03 06:14:42,717 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-03 06:14:50,521 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-03 06:14:50,522 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-03 06:14:50,526 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-03 06:28:56,913 (trainer:762) INFO: 31epoch:train:1-500batch: iter_time=0.845, forward_time=0.208, loss_ctc=42.269, loss_att=28.605, acc=0.820, loss=32.704, backward_time=0.232, grad_norm=39.446, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.825e-04, train_time=1.780 +[ip-10-0-216-33:0/16] 2024-03-03 06:36:39,722 (trainer:762) INFO: 31epoch:train:501-1000batch: iter_time=1.477e-04, forward_time=0.209, loss_ctc=42.524, loss_att=28.792, acc=0.823, loss=32.911, backward_time=0.240, grad_norm=38.575, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.824e-04, train_time=0.925 +[ip-10-0-216-33:0/16] 2024-03-03 06:44:15,604 (trainer:762) INFO: 31epoch:train:1001-1500batch: iter_time=1.466e-04, forward_time=0.209, loss_ctc=40.872, loss_att=28.563, acc=0.822, loss=32.256, backward_time=0.233, grad_norm=40.638, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.823e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-03 06:51:48,617 (trainer:762) INFO: 31epoch:train:1501-2000batch: iter_time=1.459e-04, forward_time=0.210, loss_ctc=43.299, loss_att=29.689, acc=0.823, loss=33.772, backward_time=0.229, grad_norm=57.345, clip=100.000, loss_scale=3.796e+33, optim_step_time=0.075, optim0_lr0=1.822e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-03 06:52:02,079 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 06:59:28,190 (trainer:762) INFO: 31epoch:train:2001-2500batch: iter_time=1.463e-04, forward_time=0.208, loss_ctc=41.613, loss_att=28.451, acc=0.823, loss=32.400, backward_time=0.238, grad_norm=41.343, clip=100.000, loss_scale=2.669e+33, optim_step_time=0.075, optim0_lr0=1.821e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-03 07:07:10,678 (trainer:762) INFO: 31epoch:train:2501-3000batch: iter_time=1.444e-04, forward_time=0.207, loss_ctc=41.194, loss_att=27.845, acc=0.826, loss=31.850, backward_time=0.235, grad_norm=38.151, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.820e-04, train_time=0.925 +[ip-10-0-216-33:0/16] 2024-03-03 07:14:48,052 (trainer:762) INFO: 31epoch:train:3001-3500batch: iter_time=1.481e-04, forward_time=0.208, loss_ctc=41.509, loss_att=27.986, acc=0.825, loss=32.042, backward_time=0.233, grad_norm=39.179, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.819e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-03 07:22:26,010 (trainer:762) INFO: 31epoch:train:3501-4000batch: iter_time=1.466e-04, forward_time=0.208, loss_ctc=40.262, loss_att=27.868, acc=0.826, loss=31.586, backward_time=0.233, grad_norm=37.011, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.818e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-03 07:30:04,325 (trainer:762) INFO: 31epoch:train:4001-4500batch: iter_time=1.450e-04, forward_time=0.207, loss_ctc=39.813, loss_att=27.888, acc=0.819, loss=31.466, backward_time=0.232, grad_norm=39.174, clip=100.000, loss_scale=5.114e+33, optim_step_time=0.075, optim0_lr0=1.817e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-03 07:37:42,331 (trainer:762) INFO: 31epoch:train:4501-5000batch: iter_time=1.475e-04, forward_time=0.208, loss_ctc=39.743, loss_att=27.032, acc=0.823, loss=30.845, backward_time=0.240, grad_norm=37.544, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.816e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-03 07:37:46,937 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-03 07:38:23,709 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-03 07:38:31,734 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-03 07:38:31,735 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-03 07:38:31,740 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-03 07:52:01,289 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 07:53:00,344 (trainer:762) INFO: 31epoch:train:5001-5500batch: iter_time=0.890, forward_time=0.207, loss_ctc=41.992, loss_att=28.857, acc=0.820, loss=32.798, backward_time=0.236, grad_norm=40.332, clip=100.000, loss_scale=4.854e+33, optim_step_time=0.075, optim0_lr0=1.815e-04, train_time=1.836 +[ip-10-0-216-33:0/16] 2024-03-03 08:00:40,626 (trainer:762) INFO: 31epoch:train:5501-6000batch: iter_time=1.461e-04, forward_time=0.207, loss_ctc=42.007, loss_att=28.562, acc=0.824, loss=32.595, backward_time=0.235, grad_norm=39.122, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.814e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-03 08:08:09,936 (trainer:762) INFO: 31epoch:train:6001-6500batch: iter_time=1.491e-04, forward_time=0.209, loss_ctc=40.649, loss_att=28.421, acc=0.823, loss=32.089, backward_time=0.232, grad_norm=40.138, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.813e-04, train_time=0.898 +[ip-10-0-216-33:0/16] 2024-03-03 08:15:50,522 (trainer:762) INFO: 31epoch:train:6501-7000batch: iter_time=1.441e-04, forward_time=0.208, loss_ctc=43.118, loss_att=29.529, acc=0.825, loss=33.606, backward_time=0.233, grad_norm=40.858, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.812e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-03 08:23:21,486 (trainer:762) INFO: 31epoch:train:7001-7500batch: iter_time=1.454e-04, forward_time=0.208, loss_ctc=41.495, loss_att=28.448, acc=0.823, loss=32.362, backward_time=0.231, grad_norm=39.681, clip=100.000, loss_scale=2.934e+33, optim_step_time=0.075, optim0_lr0=1.811e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-03-03 08:28:02,671 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 08:31:00,436 (trainer:762) INFO: 31epoch:train:7501-8000batch: iter_time=1.482e-04, forward_time=0.209, loss_ctc=41.003, loss_att=27.754, acc=0.827, loss=31.729, backward_time=0.238, grad_norm=37.941, clip=100.000, loss_scale=4.167e+33, optim_step_time=0.075, optim0_lr0=1.810e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-03 08:38:32,864 (trainer:762) INFO: 31epoch:train:8001-8500batch: iter_time=1.453e-04, forward_time=0.207, loss_ctc=40.954, loss_att=27.688, acc=0.826, loss=31.668, backward_time=0.232, grad_norm=39.204, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.809e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-03 08:46:06,807 (trainer:762) INFO: 31epoch:train:8501-9000batch: iter_time=1.439e-04, forward_time=0.208, loss_ctc=40.197, loss_att=27.861, acc=0.826, loss=31.561, backward_time=0.237, grad_norm=37.131, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.808e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-03 08:53:44,180 (trainer:762) INFO: 31epoch:train:9001-9500batch: iter_time=1.439e-04, forward_time=0.207, loss_ctc=39.771, loss_att=27.835, acc=0.819, loss=31.416, backward_time=0.233, grad_norm=40.043, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.807e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-03 08:59:00,218 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 09:01:16,990 (trainer:762) INFO: 31epoch:train:9501-10000batch: iter_time=1.428e-04, forward_time=0.208, loss_ctc=39.423, loss_att=26.780, acc=0.824, loss=30.573, backward_time=0.232, grad_norm=37.628, clip=100.000, loss_scale=2.841e+33, optim_step_time=0.075, optim0_lr0=1.806e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-03 09:01:21,436 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-03 09:01:58,734 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-03 09:02:06,745 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-03 09:02:06,745 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-03 09:02:06,750 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-03 09:16:29,909 (trainer:762) INFO: 31epoch:train:10001-10500batch: iter_time=0.904, forward_time=0.208, loss_ctc=41.689, loss_att=28.658, acc=0.821, loss=32.567, backward_time=0.232, grad_norm=41.050, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.805e-04, train_time=1.826 +[ip-10-0-216-33:0/16] 2024-03-03 09:24:13,338 (trainer:762) INFO: 31epoch:train:10501-11000batch: iter_time=1.475e-04, forward_time=0.208, loss_ctc=42.072, loss_att=28.620, acc=0.824, loss=32.655, backward_time=0.231, grad_norm=38.183, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.805e-04, train_time=0.927 +[ip-10-0-216-33:0/16] 2024-03-03 09:31:49,395 (trainer:762) INFO: 31epoch:train:11001-11500batch: iter_time=1.480e-04, forward_time=0.209, loss_ctc=40.724, loss_att=28.503, acc=0.823, loss=32.169, backward_time=0.231, grad_norm=41.403, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.804e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-03 09:39:12,637 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 09:39:29,992 (trainer:762) INFO: 31epoch:train:11501-12000batch: iter_time=1.444e-04, forward_time=0.208, loss_ctc=42.645, loss_att=29.309, acc=0.825, loss=33.310, backward_time=0.232, grad_norm=63.531, clip=100.000, loss_scale=3.267e+33, optim_step_time=0.075, optim0_lr0=1.803e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-03 09:47:08,690 (trainer:762) INFO: 31epoch:train:12001-12500batch: iter_time=1.459e-04, forward_time=0.209, loss_ctc=41.121, loss_att=28.245, acc=0.824, loss=32.107, backward_time=0.235, grad_norm=40.139, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.802e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-03 09:54:44,707 (trainer:762) INFO: 31epoch:train:12501-13000batch: iter_time=1.502e-04, forward_time=0.209, loss_ctc=40.833, loss_att=27.606, acc=0.828, loss=31.574, backward_time=0.237, grad_norm=37.213, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.076, optim0_lr0=1.801e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-03 10:02:17,027 (trainer:762) INFO: 31epoch:train:13001-13500batch: iter_time=1.505e-04, forward_time=0.210, loss_ctc=40.940, loss_att=27.658, acc=0.826, loss=31.642, backward_time=0.238, grad_norm=38.486, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.800e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-03 10:09:50,634 (trainer:762) INFO: 31epoch:train:13501-14000batch: iter_time=1.477e-04, forward_time=0.209, loss_ctc=39.922, loss_att=27.685, acc=0.827, loss=31.356, backward_time=0.235, grad_norm=36.730, clip=100.000, loss_scale=2.695e+33, optim_step_time=0.075, optim0_lr0=1.799e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-03 10:12:13,173 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 10:17:24,068 (trainer:762) INFO: 31epoch:train:14001-14500batch: iter_time=1.445e-04, forward_time=0.210, loss_ctc=39.287, loss_att=27.684, acc=0.820, loss=31.165, backward_time=0.238, grad_norm=39.681, clip=100.000, loss_scale=3.403e+33, optim_step_time=0.075, optim0_lr0=1.798e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-03 10:24:54,986 (trainer:762) INFO: 31epoch:train:14501-15000batch: iter_time=1.498e-04, forward_time=0.209, loss_ctc=39.494, loss_att=26.941, acc=0.823, loss=30.707, backward_time=0.231, grad_norm=37.742, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.797e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-03-03 10:39:01,975 (trainer:361) INFO: 31epoch results: [train] iter_time=0.088, forward_time=0.208, loss_ctc=41.081, loss_att=28.179, acc=0.824, loss=32.049, backward_time=0.234, grad_norm=40.487, clip=100.000, loss_scale=3.008e+33, optim_step_time=0.075, optim0_lr0=1.811e-04, train_time=1.003, time=4 hours, 11 minutes and 2.64 seconds, total_count=465000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=37.380, cer_ctc=0.204, loss_att=29.390, acc=0.762, cer=0.228, wer=0.990, loss=31.787, time=13 minutes and 52.22 seconds, total_count=66123, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-03 10:39:11,741 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-03-03 10:39:11,791 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/19epoch.pth, exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/26epoch.pth +[ip-10-0-216-33:0/16] 2024-03-03 10:39:11,791 (trainer:290) INFO: 32/45epoch started. Estimated time to finish: 2 days, 13 hours and 52 minutes +[ip-10-0-216-33:0/16] 2024-03-03 10:39:11,798 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-03 10:39:47,827 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-03 10:39:56,124 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-03 10:39:56,125 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-03 10:39:56,129 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-03 10:53:54,565 (trainer:762) INFO: 32epoch:train:1-500batch: iter_time=0.835, forward_time=0.210, loss_ctc=43.046, loss_att=29.726, acc=0.821, loss=33.722, backward_time=0.234, grad_norm=42.162, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.796e-04, train_time=1.765 +[ip-10-0-216-33:0/16] 2024-03-03 11:01:37,546 (trainer:762) INFO: 32epoch:train:501-1000batch: iter_time=1.461e-04, forward_time=0.206, loss_ctc=40.153, loss_att=27.299, acc=0.826, loss=31.156, backward_time=0.230, grad_norm=39.054, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.795e-04, train_time=0.926 +[ip-10-0-216-33:0/16] 2024-03-03 11:06:41,325 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 11:09:16,965 (trainer:762) INFO: 32epoch:train:1001-1500batch: iter_time=1.481e-04, forward_time=0.209, loss_ctc=41.503, loss_att=28.397, acc=0.820, loss=32.329, backward_time=0.230, grad_norm=38.902, clip=100.000, loss_scale=3.496e+33, optim_step_time=0.075, optim0_lr0=1.794e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-03 11:16:53,989 (trainer:762) INFO: 32epoch:train:1501-2000batch: iter_time=1.480e-04, forward_time=0.210, loss_ctc=41.385, loss_att=28.354, acc=0.827, loss=32.264, backward_time=0.230, grad_norm=39.036, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.793e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-03 11:24:31,549 (trainer:762) INFO: 32epoch:train:2001-2500batch: iter_time=1.474e-04, forward_time=0.207, loss_ctc=39.289, loss_att=26.751, acc=0.823, loss=30.512, backward_time=0.237, grad_norm=40.407, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.792e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-03 11:32:10,486 (trainer:762) INFO: 32epoch:train:2501-3000batch: iter_time=1.493e-04, forward_time=0.209, loss_ctc=40.938, loss_att=28.444, acc=0.826, loss=32.192, backward_time=0.234, grad_norm=38.480, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.791e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-03 11:39:21,179 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 11:39:51,326 (trainer:762) INFO: 32epoch:train:3001-3500batch: iter_time=1.475e-04, forward_time=0.209, loss_ctc=39.596, loss_att=27.670, acc=0.826, loss=31.248, backward_time=0.235, grad_norm=39.719, clip=100.000, loss_scale=3.309e+33, optim_step_time=0.075, optim0_lr0=1.790e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-03 11:47:26,913 (trainer:762) INFO: 32epoch:train:3501-4000batch: iter_time=1.508e-04, forward_time=0.210, loss_ctc=43.119, loss_att=29.599, acc=0.828, loss=33.655, backward_time=0.232, grad_norm=39.290, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.789e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-03 11:51:40,436 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 11:55:06,588 (trainer:762) INFO: 32epoch:train:4001-4500batch: iter_time=1.519e-04, forward_time=0.209, loss_ctc=42.989, loss_att=29.666, acc=0.825, loss=33.663, backward_time=0.234, grad_norm=41.599, clip=100.000, loss_scale=2.008e+33, optim_step_time=0.075, optim0_lr0=1.788e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-03 12:02:45,512 (trainer:762) INFO: 32epoch:train:4501-5000batch: iter_time=1.476e-04, forward_time=0.209, loss_ctc=39.931, loss_att=27.283, acc=0.825, loss=31.077, backward_time=0.232, grad_norm=40.842, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.787e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-03 12:02:50,553 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-03 12:03:26,820 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-03 12:03:35,464 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-03 12:03:35,465 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-03 12:03:35,469 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-03 12:17:58,321 (trainer:762) INFO: 32epoch:train:5001-5500batch: iter_time=0.886, forward_time=0.207, loss_ctc=42.712, loss_att=29.325, acc=0.822, loss=33.341, backward_time=0.230, grad_norm=41.867, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.786e-04, train_time=1.825 +[ip-10-0-216-33:0/16] 2024-03-03 12:25:37,480 (trainer:762) INFO: 32epoch:train:5501-6000batch: iter_time=1.480e-04, forward_time=0.210, loss_ctc=40.032, loss_att=27.244, acc=0.826, loss=31.081, backward_time=0.237, grad_norm=38.595, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.785e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-03 12:31:33,431 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 12:33:10,116 (trainer:762) INFO: 32epoch:train:6001-6500batch: iter_time=1.485e-04, forward_time=0.209, loss_ctc=40.781, loss_att=28.041, acc=0.821, loss=31.863, backward_time=0.235, grad_norm=38.413, clip=100.000, loss_scale=1.608e+33, optim_step_time=0.075, optim0_lr0=1.784e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-03 12:40:50,954 (trainer:762) INFO: 32epoch:train:6501-7000batch: iter_time=1.465e-04, forward_time=0.207, loss_ctc=40.784, loss_att=27.995, acc=0.829, loss=31.832, backward_time=0.233, grad_norm=37.484, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.783e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-03 12:48:27,764 (trainer:762) INFO: 32epoch:train:7001-7500batch: iter_time=1.466e-04, forward_time=0.208, loss_ctc=39.155, loss_att=26.679, acc=0.823, loss=30.422, backward_time=0.232, grad_norm=40.399, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.782e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-03 12:56:10,348 (trainer:762) INFO: 32epoch:train:7501-8000batch: iter_time=1.490e-04, forward_time=0.206, loss_ctc=40.611, loss_att=28.273, acc=0.827, loss=31.974, backward_time=0.235, grad_norm=37.705, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.781e-04, train_time=0.925 +[ip-10-0-216-33:0/16] 2024-03-03 13:03:49,671 (trainer:762) INFO: 32epoch:train:8001-8500batch: iter_time=1.443e-04, forward_time=0.209, loss_ctc=39.202, loss_att=27.502, acc=0.827, loss=31.012, backward_time=0.232, grad_norm=39.641, clip=100.000, loss_scale=1.573e+33, optim_step_time=0.075, optim0_lr0=1.781e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-03 13:06:56,009 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 13:11:21,724 (trainer:762) INFO: 32epoch:train:8501-9000batch: iter_time=1.508e-04, forward_time=0.209, loss_ctc=42.714, loss_att=29.436, acc=0.828, loss=33.419, backward_time=0.234, grad_norm=39.996, clip=100.000, loss_scale=1.831e+33, optim_step_time=0.075, optim0_lr0=1.780e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-03 13:18:57,089 (trainer:762) INFO: 32epoch:train:9001-9500batch: iter_time=1.490e-04, forward_time=0.210, loss_ctc=42.577, loss_att=29.470, acc=0.825, loss=33.402, backward_time=0.230, grad_norm=40.813, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.076, optim0_lr0=1.779e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-03 13:26:28,155 (trainer:762) INFO: 32epoch:train:9501-10000batch: iter_time=1.430e-04, forward_time=0.209, loss_ctc=39.624, loss_att=27.019, acc=0.826, loss=30.800, backward_time=0.231, grad_norm=38.215, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.778e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-03-03 13:26:34,382 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-03 13:27:11,764 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-03 13:27:20,891 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-03 13:27:20,891 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-03 13:27:20,895 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-03 13:41:41,765 (trainer:762) INFO: 32epoch:train:10001-10500batch: iter_time=0.879, forward_time=0.210, loss_ctc=42.614, loss_att=29.274, acc=0.823, loss=33.276, backward_time=0.233, grad_norm=41.873, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.777e-04, train_time=1.827 +[ip-10-0-216-33:0/16] 2024-03-03 13:49:16,434 (trainer:762) INFO: 32epoch:train:10501-11000batch: iter_time=1.472e-04, forward_time=0.210, loss_ctc=39.900, loss_att=27.183, acc=0.826, loss=30.998, backward_time=0.234, grad_norm=39.653, clip=100.000, loss_scale=2.061e+33, optim_step_time=0.075, optim0_lr0=1.776e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-03 13:56:53,497 (trainer:762) INFO: 32epoch:train:11001-11500batch: iter_time=1.475e-04, forward_time=0.208, loss_ctc=40.802, loss_att=28.070, acc=0.822, loss=31.890, backward_time=0.233, grad_norm=38.655, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.775e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-03 14:04:29,364 (trainer:762) INFO: 32epoch:train:11501-12000batch: iter_time=1.463e-04, forward_time=0.209, loss_ctc=40.576, loss_att=27.884, acc=0.829, loss=31.691, backward_time=0.233, grad_norm=38.712, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.774e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-03 14:12:01,300 (trainer:762) INFO: 32epoch:train:12001-12500batch: iter_time=1.529e-04, forward_time=0.208, loss_ctc=38.882, loss_att=26.582, acc=0.824, loss=30.272, backward_time=0.232, grad_norm=39.376, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.773e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-03 14:15:21,580 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 14:19:32,904 (trainer:762) INFO: 32epoch:train:12501-13000batch: iter_time=1.456e-04, forward_time=0.211, loss_ctc=40.547, loss_att=28.250, acc=0.827, loss=31.939, backward_time=0.231, grad_norm=38.182, clip=100.000, loss_scale=2.664e+33, optim_step_time=0.075, optim0_lr0=1.772e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-03 14:27:06,243 (trainer:762) INFO: 32epoch:train:13001-13500batch: iter_time=1.464e-04, forward_time=0.210, loss_ctc=39.046, loss_att=27.422, acc=0.827, loss=30.909, backward_time=0.234, grad_norm=42.302, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.771e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-03 14:34:41,501 (trainer:762) INFO: 32epoch:train:13501-14000batch: iter_time=1.427e-04, forward_time=0.210, loss_ctc=42.734, loss_att=29.525, acc=0.828, loss=33.488, backward_time=0.233, grad_norm=39.680, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.770e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-03 14:39:49,305 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 14:42:23,554 (trainer:762) INFO: 32epoch:train:14001-14500batch: iter_time=1.445e-04, forward_time=0.207, loss_ctc=42.393, loss_att=29.285, acc=0.826, loss=33.218, backward_time=0.231, grad_norm=39.554, clip=100.000, loss_scale=2.157e+33, optim_step_time=0.075, optim0_lr0=1.769e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-03-03 14:49:53,274 (trainer:762) INFO: 32epoch:train:14501-15000batch: iter_time=1.436e-04, forward_time=0.210, loss_ctc=39.823, loss_att=27.112, acc=0.826, loss=30.925, backward_time=0.228, grad_norm=38.523, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.768e-04, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-03-03 15:04:04,981 (trainer:361) INFO: 32epoch results: [train] iter_time=0.087, forward_time=0.209, loss_ctc=40.915, loss_att=28.159, acc=0.825, loss=31.986, backward_time=0.233, grad_norm=39.638, clip=100.000, loss_scale=2.075e+33, optim_step_time=0.075, optim0_lr0=1.782e-04, train_time=1.003, time=4 hours, 11 minutes and 0.77 seconds, total_count=480000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=36.626, cer_ctc=0.199, loss_att=28.425, acc=0.767, cer=0.223, wer=0.991, loss=30.886, time=13 minutes and 52.11 seconds, total_count=68256, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-03 15:04:14,921 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-03-03 15:04:14,975 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/22epoch.pth, exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/27epoch.pth +[ip-10-0-216-33:0/16] 2024-03-03 15:04:14,976 (trainer:290) INFO: 33/45epoch started. Estimated time to finish: 2 days, 9 hours and 27 minutes +[ip-10-0-216-33:0/16] 2024-03-03 15:04:14,983 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-03 15:04:51,026 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-03 15:04:59,256 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-03 15:04:59,256 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-03 15:04:59,260 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-03 15:18:53,719 (trainer:762) INFO: 33epoch:train:1-500batch: iter_time=0.844, forward_time=0.210, loss_ctc=42.744, loss_att=29.097, acc=0.822, loss=33.191, backward_time=0.235, grad_norm=41.769, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.768e-04, train_time=1.757 +[ip-10-0-216-33:0/16] 2024-03-03 15:26:33,036 (trainer:762) INFO: 33epoch:train:501-1000batch: iter_time=1.453e-04, forward_time=0.208, loss_ctc=42.111, loss_att=28.531, acc=0.822, loss=32.605, backward_time=0.237, grad_norm=40.171, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.767e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-03 15:34:10,132 (trainer:762) INFO: 33epoch:train:1001-1500batch: iter_time=1.492e-04, forward_time=0.210, loss_ctc=40.221, loss_att=27.917, acc=0.824, loss=31.608, backward_time=0.238, grad_norm=40.364, clip=100.000, loss_scale=1.737e+33, optim_step_time=0.075, optim0_lr0=1.766e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-03 15:36:13,830 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 15:41:53,533 (trainer:762) INFO: 33epoch:train:1501-2000batch: iter_time=1.484e-04, forward_time=0.209, loss_ctc=40.875, loss_att=28.060, acc=0.828, loss=31.904, backward_time=0.236, grad_norm=40.042, clip=100.000, loss_scale=1.644e+33, optim_step_time=0.075, optim0_lr0=1.765e-04, train_time=0.927 +[ip-10-0-216-33:0/16] 2024-03-03 15:49:28,341 (trainer:762) INFO: 33epoch:train:2001-2500batch: iter_time=1.473e-04, forward_time=0.209, loss_ctc=40.983, loss_att=28.325, acc=0.818, loss=32.123, backward_time=0.233, grad_norm=41.387, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.764e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-03 15:57:02,198 (trainer:762) INFO: 33epoch:train:2501-3000batch: iter_time=1.471e-04, forward_time=0.210, loss_ctc=39.172, loss_att=26.796, acc=0.822, loss=30.509, backward_time=0.234, grad_norm=38.852, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.763e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-03 16:04:40,870 (trainer:762) INFO: 33epoch:train:3001-3500batch: iter_time=1.474e-04, forward_time=0.210, loss_ctc=42.983, loss_att=29.196, acc=0.823, loss=33.332, backward_time=0.235, grad_norm=46.472, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.762e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-03 16:12:20,804 (trainer:762) INFO: 33epoch:train:3501-4000batch: iter_time=1.501e-04, forward_time=0.206, loss_ctc=39.218, loss_att=27.593, acc=0.824, loss=31.080, backward_time=0.235, grad_norm=39.254, clip=100.000, loss_scale=2.248e+33, optim_step_time=0.075, optim0_lr0=1.761e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-03 16:18:50,602 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 16:20:00,621 (trainer:762) INFO: 33epoch:train:4001-4500batch: iter_time=1.454e-04, forward_time=0.209, loss_ctc=40.459, loss_att=28.005, acc=0.830, loss=31.741, backward_time=0.232, grad_norm=40.473, clip=100.000, loss_scale=2.393e+33, optim_step_time=0.076, optim0_lr0=1.760e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-03 16:27:36,605 (trainer:762) INFO: 33epoch:train:4501-5000batch: iter_time=1.450e-04, forward_time=0.209, loss_ctc=40.105, loss_att=27.844, acc=0.814, loss=31.522, backward_time=0.228, grad_norm=42.147, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.759e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-03 16:27:42,631 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-03 16:28:18,758 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-03 16:28:27,619 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-03 16:28:27,620 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-03 16:28:27,624 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-03 16:42:51,307 (trainer:762) INFO: 33epoch:train:5001-5500batch: iter_time=0.866, forward_time=0.210, loss_ctc=42.306, loss_att=29.067, acc=0.823, loss=33.038, backward_time=0.231, grad_norm=41.537, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.758e-04, train_time=1.829 +[ip-10-0-216-33:0/16] 2024-03-03 16:50:33,257 (trainer:762) INFO: 33epoch:train:5501-6000batch: iter_time=1.453e-04, forward_time=0.208, loss_ctc=41.630, loss_att=28.210, acc=0.824, loss=32.236, backward_time=0.239, grad_norm=40.480, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.757e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-03-03 16:58:06,170 (trainer:762) INFO: 33epoch:train:6001-6500batch: iter_time=1.433e-04, forward_time=0.209, loss_ctc=39.956, loss_att=27.959, acc=0.824, loss=31.558, backward_time=0.233, grad_norm=39.125, clip=100.000, loss_scale=1.501e+33, optim_step_time=0.075, optim0_lr0=1.757e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-03 17:05:39,566 (trainer:762) INFO: 33epoch:train:6501-7000batch: iter_time=1.436e-04, forward_time=0.209, loss_ctc=40.696, loss_att=27.964, acc=0.829, loss=31.784, backward_time=0.231, grad_norm=39.028, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.756e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-03 17:13:11,145 (trainer:762) INFO: 33epoch:train:7001-7500batch: iter_time=1.437e-04, forward_time=0.209, loss_ctc=40.793, loss_att=28.268, acc=0.819, loss=32.025, backward_time=0.233, grad_norm=41.215, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.755e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-03 17:20:48,205 (trainer:762) INFO: 33epoch:train:7501-8000batch: iter_time=1.446e-04, forward_time=0.207, loss_ctc=38.964, loss_att=26.679, acc=0.822, loss=30.364, backward_time=0.234, grad_norm=38.362, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.754e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-03 17:28:27,658 (trainer:762) INFO: 33epoch:train:8001-8500batch: iter_time=1.437e-04, forward_time=0.210, loss_ctc=42.675, loss_att=29.083, acc=0.824, loss=33.160, backward_time=0.235, grad_norm=41.421, clip=100.000, loss_scale=3.001e+33, optim_step_time=0.075, optim0_lr0=1.753e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-03 17:36:02,127 (trainer:762) INFO: 33epoch:train:8501-9000batch: iter_time=1.457e-04, forward_time=0.208, loss_ctc=38.832, loss_att=27.387, acc=0.825, loss=30.820, backward_time=0.231, grad_norm=39.025, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.752e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-03 17:43:38,475 (trainer:762) INFO: 33epoch:train:9001-9500batch: iter_time=1.424e-04, forward_time=0.209, loss_ctc=40.392, loss_att=28.013, acc=0.830, loss=31.727, backward_time=0.233, grad_norm=40.656, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.751e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-03 17:51:07,147 (trainer:762) INFO: 33epoch:train:9501-10000batch: iter_time=1.471e-04, forward_time=0.209, loss_ctc=39.695, loss_att=27.550, acc=0.815, loss=31.194, backward_time=0.233, grad_norm=40.774, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.750e-04, train_time=0.897 +[ip-10-0-216-33:0/16] 2024-03-03 17:51:13,165 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-03 17:51:50,720 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-03 17:51:59,381 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-03 17:51:59,381 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-03 17:51:59,385 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-03 18:06:08,509 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 18:06:29,278 (trainer:762) INFO: 33epoch:train:10001-10500batch: iter_time=0.900, forward_time=0.209, loss_ctc=41.984, loss_att=28.924, acc=0.823, loss=32.842, backward_time=0.231, grad_norm=42.229, clip=100.000, loss_scale=5.754e+33, optim_step_time=0.075, optim0_lr0=1.749e-04, train_time=1.844 +[ip-10-0-216-33:0/16] 2024-03-03 18:09:49,711 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 18:14:10,558 (trainer:762) INFO: 33epoch:train:10501-11000batch: iter_time=1.481e-04, forward_time=0.210, loss_ctc=41.094, loss_att=28.044, acc=0.824, loss=31.959, backward_time=0.237, grad_norm=40.703, clip=100.000, loss_scale=3.715e+33, optim_step_time=0.075, optim0_lr0=1.748e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-03-03 18:21:49,809 (trainer:762) INFO: 33epoch:train:11001-11500batch: iter_time=1.487e-04, forward_time=0.209, loss_ctc=39.867, loss_att=27.755, acc=0.825, loss=31.389, backward_time=0.231, grad_norm=39.928, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.748e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-03 18:29:23,754 (trainer:762) INFO: 33epoch:train:11501-12000batch: iter_time=1.470e-04, forward_time=0.211, loss_ctc=40.472, loss_att=27.818, acc=0.830, loss=31.614, backward_time=0.237, grad_norm=38.902, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.747e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-03 18:37:03,052 (trainer:762) INFO: 33epoch:train:12001-12500batch: iter_time=1.491e-04, forward_time=0.208, loss_ctc=40.536, loss_att=28.154, acc=0.820, loss=31.869, backward_time=0.235, grad_norm=40.508, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.746e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-03 18:44:46,829 (trainer:762) INFO: 33epoch:train:12501-13000batch: iter_time=1.539e-04, forward_time=0.209, loss_ctc=38.912, loss_att=26.595, acc=0.823, loss=30.290, backward_time=0.235, grad_norm=38.388, clip=100.000, loss_scale=4.071e+33, optim_step_time=0.075, optim0_lr0=1.745e-04, train_time=0.927 +[ip-10-0-216-33:0/16] 2024-03-03 18:52:22,756 (trainer:762) INFO: 33epoch:train:13001-13500batch: iter_time=1.522e-04, forward_time=0.210, loss_ctc=42.653, loss_att=28.924, acc=0.825, loss=33.043, backward_time=0.237, grad_norm=40.972, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.744e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-03 18:55:19,670 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 18:59:56,649 (trainer:762) INFO: 33epoch:train:13501-14000batch: iter_time=1.480e-04, forward_time=0.210, loss_ctc=38.737, loss_att=27.324, acc=0.825, loss=30.748, backward_time=0.232, grad_norm=39.641, clip=100.000, loss_scale=3.595e+33, optim_step_time=0.075, optim0_lr0=1.743e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-03 19:07:37,258 (trainer:762) INFO: 33epoch:train:14001-14500batch: iter_time=1.501e-04, forward_time=0.209, loss_ctc=39.862, loss_att=27.686, acc=0.831, loss=31.339, backward_time=0.236, grad_norm=39.844, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.742e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-03 19:15:09,547 (trainer:762) INFO: 33epoch:train:14501-15000batch: iter_time=1.457e-04, forward_time=0.210, loss_ctc=39.328, loss_att=27.377, acc=0.816, loss=30.962, backward_time=0.227, grad_norm=42.940, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.741e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-03 19:29:33,252 (trainer:361) INFO: 33epoch results: [train] iter_time=0.087, forward_time=0.209, loss_ctc=40.608, loss_att=28.005, acc=0.823, loss=31.786, backward_time=0.234, grad_norm=40.554, clip=100.000, loss_scale=2.719e+33, optim_step_time=0.075, optim0_lr0=1.754e-04, train_time=1.003, time=4 hours, 11 minutes and 9.93 seconds, total_count=495000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=36.512, cer_ctc=0.199, loss_att=30.865, acc=0.762, cer=0.210, wer=0.988, loss=32.559, time=14 minutes and 8 seconds, total_count=70389, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-03 19:29:43,186 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-03-03 19:29:43,221 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/28epoch.pth +[ip-10-0-216-33:0/16] 2024-03-03 19:29:43,222 (trainer:290) INFO: 34/45epoch started. Estimated time to finish: 2 days, 5 hours and 2 minutes +[ip-10-0-216-33:0/16] 2024-03-03 19:29:43,229 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-03 19:30:18,952 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-03 19:30:26,786 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-03 19:30:26,786 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-03 19:30:26,791 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-03 19:44:26,115 (trainer:762) INFO: 34epoch:train:1-500batch: iter_time=0.834, forward_time=0.211, loss_ctc=39.809, loss_att=27.547, acc=0.822, loss=31.226, backward_time=0.239, grad_norm=41.163, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.741e-04, train_time=1.765 +[ip-10-0-216-33:0/16] 2024-03-03 19:47:45,869 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 19:52:00,629 (trainer:762) INFO: 34epoch:train:501-1000batch: iter_time=1.477e-04, forward_time=0.211, loss_ctc=42.462, loss_att=28.830, acc=0.826, loss=32.920, backward_time=0.235, grad_norm=41.132, clip=100.000, loss_scale=2.731e+33, optim_step_time=0.075, optim0_lr0=1.740e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-03 19:59:35,872 (trainer:762) INFO: 34epoch:train:1001-1500batch: iter_time=1.468e-04, forward_time=0.209, loss_ctc=41.930, loss_att=28.627, acc=0.817, loss=32.618, backward_time=0.236, grad_norm=43.495, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.739e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-03 20:07:11,719 (trainer:762) INFO: 34epoch:train:1501-2000batch: iter_time=1.471e-04, forward_time=0.209, loss_ctc=40.045, loss_att=27.434, acc=0.826, loss=31.218, backward_time=0.239, grad_norm=40.295, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.738e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-03 20:14:45,248 (trainer:762) INFO: 34epoch:train:2001-2500batch: iter_time=1.451e-04, forward_time=0.211, loss_ctc=41.022, loss_att=27.741, acc=0.827, loss=31.725, backward_time=0.234, grad_norm=40.689, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.737e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-03 20:22:20,274 (trainer:762) INFO: 34epoch:train:2501-3000batch: iter_time=1.449e-04, forward_time=0.209, loss_ctc=39.688, loss_att=27.198, acc=0.829, loss=30.945, backward_time=0.231, grad_norm=38.777, clip=100.000, loss_scale=4.050e+33, optim_step_time=0.075, optim0_lr0=1.736e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-03 20:22:36,144 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 20:29:59,797 (trainer:762) INFO: 34epoch:train:3001-3500batch: iter_time=1.479e-04, forward_time=0.209, loss_ctc=40.179, loss_att=28.077, acc=0.823, loss=31.708, backward_time=0.235, grad_norm=39.084, clip=100.000, loss_scale=2.679e+33, optim_step_time=0.075, optim0_lr0=1.735e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-03 20:37:29,927 (trainer:762) INFO: 34epoch:train:3501-4000batch: iter_time=1.473e-04, forward_time=0.209, loss_ctc=41.488, loss_att=28.521, acc=0.825, loss=32.411, backward_time=0.232, grad_norm=41.500, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.734e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-03-03 20:45:08,439 (trainer:762) INFO: 34epoch:train:4001-4500batch: iter_time=1.438e-04, forward_time=0.209, loss_ctc=41.205, loss_att=28.263, acc=0.825, loss=32.145, backward_time=0.227, grad_norm=40.747, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.734e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-03 20:48:40,262 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 20:52:44,961 (trainer:762) INFO: 34epoch:train:4501-5000batch: iter_time=1.454e-04, forward_time=0.209, loss_ctc=40.669, loss_att=28.064, acc=0.825, loss=31.845, backward_time=0.232, grad_norm=39.550, clip=100.000, loss_scale=1.907e+33, optim_step_time=0.075, optim0_lr0=1.733e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-03 20:52:51,137 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-03 20:53:27,785 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-03 20:53:35,750 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-03 20:53:35,751 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-03 20:53:35,755 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-03 21:10:22,937 (trainer:762) INFO: 34epoch:train:5001-5500batch: iter_time=1.177, forward_time=0.208, loss_ctc=39.390, loss_att=27.277, acc=0.824, loss=30.911, backward_time=0.232, grad_norm=41.589, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.732e-04, train_time=2.116 +[ip-10-0-216-33:0/16] 2024-03-03 21:18:05,209 (trainer:762) INFO: 34epoch:train:5501-6000batch: iter_time=1.462e-04, forward_time=0.207, loss_ctc=42.130, loss_att=28.527, acc=0.828, loss=32.608, backward_time=0.232, grad_norm=40.000, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.731e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-03-03 21:25:47,992 (trainer:762) INFO: 34epoch:train:6001-6500batch: iter_time=1.489e-04, forward_time=0.208, loss_ctc=41.798, loss_att=28.563, acc=0.818, loss=32.533, backward_time=0.233, grad_norm=42.901, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.730e-04, train_time=0.925 +[ip-10-0-216-33:0/16] 2024-03-03 21:33:29,953 (trainer:762) INFO: 34epoch:train:6501-7000batch: iter_time=1.467e-04, forward_time=0.207, loss_ctc=39.968, loss_att=27.370, acc=0.826, loss=31.149, backward_time=0.234, grad_norm=40.647, clip=100.000, loss_scale=1.986e+33, optim_step_time=0.075, optim0_lr0=1.729e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-03-03 21:37:05,032 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 21:41:08,146 (trainer:762) INFO: 34epoch:train:7001-7500batch: iter_time=1.479e-04, forward_time=0.209, loss_ctc=40.719, loss_att=27.555, acc=0.827, loss=31.504, backward_time=0.236, grad_norm=40.711, clip=100.000, loss_scale=1.902e+33, optim_step_time=0.075, optim0_lr0=1.728e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-03 21:48:44,465 (trainer:762) INFO: 34epoch:train:7501-8000batch: iter_time=1.524e-04, forward_time=0.210, loss_ctc=39.396, loss_att=27.039, acc=0.830, loss=30.747, backward_time=0.237, grad_norm=39.760, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.728e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-03 21:56:30,871 (trainer:762) INFO: 34epoch:train:8001-8500batch: iter_time=1.470e-04, forward_time=0.205, loss_ctc=39.853, loss_att=27.890, acc=0.824, loss=31.479, backward_time=0.236, grad_norm=38.518, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.727e-04, train_time=0.933 +[ip-10-0-216-33:0/16] 2024-03-03 22:04:08,051 (trainer:762) INFO: 34epoch:train:8501-9000batch: iter_time=1.473e-04, forward_time=0.208, loss_ctc=41.048, loss_att=28.232, acc=0.827, loss=32.077, backward_time=0.236, grad_norm=40.574, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.726e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-03 22:11:45,713 (trainer:762) INFO: 34epoch:train:9001-9500batch: iter_time=1.443e-04, forward_time=0.208, loss_ctc=41.150, loss_att=28.177, acc=0.825, loss=32.069, backward_time=0.237, grad_norm=40.668, clip=100.000, loss_scale=1.991e+33, optim_step_time=0.075, optim0_lr0=1.725e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-03 22:19:22,221 (trainer:762) INFO: 34epoch:train:9501-10000batch: iter_time=1.475e-04, forward_time=0.209, loss_ctc=40.449, loss_att=27.921, acc=0.826, loss=31.679, backward_time=0.231, grad_norm=39.666, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.724e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-03 22:19:28,103 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-03 22:20:05,418 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-03 22:20:13,404 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-03 22:20:13,405 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-03 22:20:13,409 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-03 22:34:39,914 (trainer:762) INFO: 34epoch:train:10001-10500batch: iter_time=0.908, forward_time=0.213, loss_ctc=39.202, loss_att=27.253, acc=0.824, loss=30.837, backward_time=0.230, grad_norm=41.255, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.076, optim0_lr0=1.723e-04, train_time=1.835 +[ip-10-0-216-33:0/16] 2024-03-03 22:42:17,034 (trainer:762) INFO: 34epoch:train:10501-11000batch: iter_time=1.842e-04, forward_time=0.214, loss_ctc=41.753, loss_att=28.370, acc=0.828, loss=32.385, backward_time=0.223, grad_norm=40.902, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.076, optim0_lr0=1.722e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-03 22:49:47,385 (trainer:762) INFO: 34epoch:train:11001-11500batch: iter_time=1.863e-04, forward_time=0.214, loss_ctc=41.670, loss_att=28.497, acc=0.818, loss=32.449, backward_time=0.226, grad_norm=42.598, clip=100.000, loss_scale=3.982e+33, optim_step_time=0.073, optim0_lr0=1.722e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-03-03 22:57:25,915 (trainer:762) INFO: 34epoch:train:11501-12000batch: iter_time=1.947e-04, forward_time=0.215, loss_ctc=39.813, loss_att=27.312, acc=0.827, loss=31.062, backward_time=0.223, grad_norm=39.934, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.081, optim0_lr0=1.721e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-03 23:04:57,943 (trainer:762) INFO: 34epoch:train:12001-12500batch: iter_time=1.722e-04, forward_time=0.215, loss_ctc=40.411, loss_att=27.429, acc=0.828, loss=31.323, backward_time=0.222, grad_norm=40.086, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.076, optim0_lr0=1.720e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-03 23:12:32,306 (trainer:762) INFO: 34epoch:train:12501-13000batch: iter_time=1.710e-04, forward_time=0.214, loss_ctc=39.077, loss_att=26.918, acc=0.831, loss=30.566, backward_time=0.222, grad_norm=37.539, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.076, optim0_lr0=1.719e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-03 23:18:45,716 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 23:20:03,856 (trainer:762) INFO: 34epoch:train:13001-13500batch: iter_time=1.743e-04, forward_time=0.214, loss_ctc=39.649, loss_att=27.714, acc=0.825, loss=31.295, backward_time=0.222, grad_norm=37.864, clip=100.000, loss_scale=7.055e+33, optim_step_time=0.075, optim0_lr0=1.718e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-03 23:27:39,566 (trainer:762) INFO: 34epoch:train:13501-14000batch: iter_time=1.730e-04, forward_time=0.215, loss_ctc=41.085, loss_att=28.304, acc=0.827, loss=32.138, backward_time=0.223, grad_norm=39.122, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.079, optim0_lr0=1.717e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-03 23:35:15,384 (trainer:762) INFO: 34epoch:train:14001-14500batch: iter_time=1.566e-04, forward_time=0.213, loss_ctc=40.882, loss_att=28.049, acc=0.826, loss=31.899, backward_time=0.222, grad_norm=42.366, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.073, optim0_lr0=1.716e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-03 23:36:54,395 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-03 23:42:52,639 (trainer:762) INFO: 34epoch:train:14501-15000batch: iter_time=1.539e-04, forward_time=0.212, loss_ctc=40.325, loss_att=27.880, acc=0.826, loss=31.614, backward_time=0.222, grad_norm=39.326, clip=100.000, loss_scale=3.142e+33, optim_step_time=0.072, optim0_lr0=1.716e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-03 23:57:40,387 (trainer:361) INFO: 34epoch results: [train] iter_time=0.097, forward_time=0.211, loss_ctc=40.609, loss_att=27.886, acc=0.825, loss=31.703, backward_time=0.231, grad_norm=40.416, clip=100.000, loss_scale=2.951e+33, optim_step_time=0.075, optim0_lr0=1.728e-04, train_time=1.012, time=4 hours, 13 minutes and 25.91 seconds, total_count=510000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=36.440, cer_ctc=0.197, loss_att=28.467, acc=0.778, cer=0.192, wer=0.986, loss=30.859, time=14 minutes and 30.95 seconds, total_count=72522, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-03 23:57:50,008 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-03-03 23:57:50,041 (trainer:290) INFO: 35/45epoch started. Estimated time to finish: 2 days, 37 minutes and 52.59 seconds +[ip-10-0-216-33:0/16] 2024-03-03 23:57:50,049 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-03 23:58:26,429 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-03 23:58:34,306 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-03 23:58:34,307 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-03 23:58:34,311 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-04 00:12:39,636 (trainer:762) INFO: 35epoch:train:1-500batch: iter_time=0.859, forward_time=0.213, loss_ctc=40.779, loss_att=27.818, acc=0.823, loss=31.706, backward_time=0.222, grad_norm=41.463, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.073, optim0_lr0=1.715e-04, train_time=1.779 +[ip-10-0-216-33:0/16] 2024-03-04 00:20:14,949 (trainer:762) INFO: 35epoch:train:501-1000batch: iter_time=1.607e-04, forward_time=0.213, loss_ctc=36.925, loss_att=25.542, acc=0.833, loss=28.957, backward_time=0.222, grad_norm=38.171, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.073, optim0_lr0=1.714e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-04 00:27:46,893 (trainer:762) INFO: 35epoch:train:1001-1500batch: iter_time=1.608e-04, forward_time=0.213, loss_ctc=39.956, loss_att=27.685, acc=0.832, loss=31.366, backward_time=0.223, grad_norm=38.732, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.072, optim0_lr0=1.713e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-04 00:35:16,626 (trainer:762) INFO: 35epoch:train:1501-2000batch: iter_time=1.585e-04, forward_time=0.212, loss_ctc=39.247, loss_att=26.890, acc=0.824, loss=30.597, backward_time=0.222, grad_norm=39.880, clip=100.000, loss_scale=4.642e+33, optim_step_time=0.072, optim0_lr0=1.712e-04, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-03-04 00:42:49,482 (trainer:762) INFO: 35epoch:train:2001-2500batch: iter_time=1.680e-04, forward_time=0.213, loss_ctc=40.386, loss_att=27.812, acc=0.827, loss=31.584, backward_time=0.222, grad_norm=40.588, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.073, optim0_lr0=1.711e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-04 00:50:30,104 (trainer:762) INFO: 35epoch:train:2501-3000batch: iter_time=1.703e-04, forward_time=0.214, loss_ctc=43.786, loss_att=29.990, acc=0.826, loss=34.129, backward_time=0.223, grad_norm=41.730, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.072, optim0_lr0=1.711e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-04 00:54:08,429 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 00:58:11,734 (trainer:762) INFO: 35epoch:train:3001-3500batch: iter_time=1.689e-04, forward_time=0.212, loss_ctc=38.653, loss_att=26.460, acc=0.825, loss=30.117, backward_time=0.222, grad_norm=38.845, clip=100.000, loss_scale=3.819e+33, optim_step_time=0.072, optim0_lr0=1.710e-04, train_time=0.923 +[ip-10-0-216-33:0/16] 2024-03-04 01:05:48,228 (trainer:762) INFO: 35epoch:train:3501-4000batch: iter_time=1.706e-04, forward_time=0.213, loss_ctc=40.788, loss_att=27.713, acc=0.824, loss=31.635, backward_time=0.222, grad_norm=41.398, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.072, optim0_lr0=1.709e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-04 01:13:30,536 (trainer:762) INFO: 35epoch:train:4001-4500batch: iter_time=1.899e-04, forward_time=0.217, loss_ctc=39.885, loss_att=27.520, acc=0.833, loss=31.229, backward_time=0.223, grad_norm=38.965, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.083, optim0_lr0=1.708e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-03-04 01:21:10,662 (trainer:762) INFO: 35epoch:train:4501-5000batch: iter_time=1.901e-04, forward_time=0.215, loss_ctc=39.877, loss_att=27.630, acc=0.824, loss=31.304, backward_time=0.222, grad_norm=40.600, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.083, optim0_lr0=1.707e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-04 01:21:15,644 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-04 01:21:52,152 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-04 01:22:00,216 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-04 01:22:00,217 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-04 01:22:00,221 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-04 01:36:22,573 (trainer:762) INFO: 35epoch:train:5001-5500batch: iter_time=0.899, forward_time=0.214, loss_ctc=40.568, loss_att=27.810, acc=0.824, loss=31.637, backward_time=0.222, grad_norm=40.456, clip=100.000, loss_scale=3.967e+33, optim_step_time=0.078, optim0_lr0=1.706e-04, train_time=1.824 +[ip-10-0-216-33:0/16] 2024-03-04 01:38:57,845 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 01:43:54,672 (trainer:762) INFO: 35epoch:train:5501-6000batch: iter_time=1.588e-04, forward_time=0.213, loss_ctc=36.712, loss_att=25.436, acc=0.833, loss=28.819, backward_time=0.222, grad_norm=43.356, clip=100.000, loss_scale=3.491e+33, optim_step_time=0.072, optim0_lr0=1.706e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-04 01:51:23,797 (trainer:762) INFO: 35epoch:train:6001-6500batch: iter_time=1.643e-04, forward_time=0.212, loss_ctc=39.867, loss_att=27.564, acc=0.833, loss=31.255, backward_time=0.222, grad_norm=40.016, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.072, optim0_lr0=1.705e-04, train_time=0.898 +[ip-10-0-216-33:0/16] 2024-03-04 01:58:10,608 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 01:58:57,327 (trainer:762) INFO: 35epoch:train:6501-7000batch: iter_time=1.621e-04, forward_time=0.213, loss_ctc=39.101, loss_att=26.934, acc=0.823, loss=30.584, backward_time=0.222, grad_norm=39.794, clip=100.000, loss_scale=2.461e+33, optim_step_time=0.072, optim0_lr0=1.704e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-04 02:06:26,141 (trainer:762) INFO: 35epoch:train:7001-7500batch: iter_time=1.576e-04, forward_time=0.212, loss_ctc=40.088, loss_att=27.741, acc=0.827, loss=31.445, backward_time=0.223, grad_norm=40.562, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.072, optim0_lr0=1.703e-04, train_time=0.897 +[ip-10-0-216-33:0/16] 2024-03-04 02:14:04,916 (trainer:762) INFO: 35epoch:train:7501-8000batch: iter_time=1.631e-04, forward_time=0.213, loss_ctc=43.305, loss_att=29.777, acc=0.826, loss=33.836, backward_time=0.223, grad_norm=41.759, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.072, optim0_lr0=1.702e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-04 02:21:40,058 (trainer:762) INFO: 35epoch:train:8001-8500batch: iter_time=1.591e-04, forward_time=0.211, loss_ctc=38.570, loss_att=26.472, acc=0.825, loss=30.101, backward_time=0.222, grad_norm=40.548, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.073, optim0_lr0=1.701e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-04 02:29:15,854 (trainer:762) INFO: 35epoch:train:8501-9000batch: iter_time=1.637e-04, forward_time=0.213, loss_ctc=40.543, loss_att=27.525, acc=0.825, loss=31.431, backward_time=0.222, grad_norm=41.083, clip=100.000, loss_scale=1.433e+33, optim_step_time=0.072, optim0_lr0=1.701e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-04 02:36:47,925 (trainer:762) INFO: 35epoch:train:9001-9500batch: iter_time=1.638e-04, forward_time=0.213, loss_ctc=39.706, loss_att=27.439, acc=0.833, loss=31.119, backward_time=0.222, grad_norm=40.340, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.072, optim0_lr0=1.700e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-04 02:44:18,294 (trainer:762) INFO: 35epoch:train:9501-10000batch: iter_time=1.651e-04, forward_time=0.212, loss_ctc=39.981, loss_att=27.658, acc=0.824, loss=31.355, backward_time=0.222, grad_norm=40.987, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.072, optim0_lr0=1.699e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-03-04 02:44:26,198 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-04 02:45:03,156 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-04 02:45:11,310 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-04 02:45:11,310 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-04 02:45:11,315 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-04 03:03:30,019 (trainer:762) INFO: 35epoch:train:10001-10500batch: iter_time=1.363, forward_time=0.213, loss_ctc=40.621, loss_att=27.833, acc=0.824, loss=31.669, backward_time=0.222, grad_norm=40.822, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.072, optim0_lr0=1.698e-04, train_time=2.303 +[ip-10-0-216-33:0/16] 2024-03-04 03:11:06,734 (trainer:762) INFO: 35epoch:train:10501-11000batch: iter_time=1.630e-04, forward_time=0.213, loss_ctc=36.671, loss_att=25.405, acc=0.833, loss=28.785, backward_time=0.223, grad_norm=37.641, clip=100.000, loss_scale=2.866e+33, optim_step_time=0.072, optim0_lr0=1.697e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-04 03:14:13,279 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 03:18:37,248 (trainer:762) INFO: 35epoch:train:11001-11500batch: iter_time=1.666e-04, forward_time=0.213, loss_ctc=39.673, loss_att=27.508, acc=0.833, loss=31.158, backward_time=0.223, grad_norm=39.700, clip=100.000, loss_scale=3.668e+33, optim_step_time=0.073, optim0_lr0=1.697e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-03-04 03:26:10,679 (trainer:762) INFO: 35epoch:train:11501-12000batch: iter_time=1.604e-04, forward_time=0.213, loss_ctc=38.812, loss_att=26.741, acc=0.825, loss=30.362, backward_time=0.222, grad_norm=39.442, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.073, optim0_lr0=1.696e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-04 03:33:41,991 (trainer:762) INFO: 35epoch:train:12001-12500batch: iter_time=1.645e-04, forward_time=0.213, loss_ctc=40.191, loss_att=27.679, acc=0.827, loss=31.432, backward_time=0.222, grad_norm=55.790, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.072, optim0_lr0=1.695e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-03-04 03:41:12,082 (trainer:762) INFO: 35epoch:train:12501-13000batch: iter_time=1.616e-04, forward_time=0.213, loss_ctc=43.220, loss_att=29.712, acc=0.827, loss=33.764, backward_time=0.223, grad_norm=40.560, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.073, optim0_lr0=1.694e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-03-04 03:48:43,706 (trainer:762) INFO: 35epoch:train:13001-13500batch: iter_time=1.580e-04, forward_time=0.213, loss_ctc=38.482, loss_att=26.391, acc=0.825, loss=30.019, backward_time=0.223, grad_norm=41.023, clip=100.000, loss_scale=4.117e+33, optim_step_time=0.072, optim0_lr0=1.693e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-04 03:52:15,224 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 03:53:22,563 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 03:56:10,680 (trainer:762) INFO: 35epoch:train:13501-14000batch: iter_time=1.636e-04, forward_time=0.212, loss_ctc=40.289, loss_att=27.378, acc=0.826, loss=31.252, backward_time=0.222, grad_norm=42.334, clip=100.000, loss_scale=3.336e+33, optim_step_time=0.072, optim0_lr0=1.693e-04, train_time=0.894 +[ip-10-0-216-33:0/16] 2024-03-04 04:03:46,342 (trainer:762) INFO: 35epoch:train:14001-14500batch: iter_time=1.605e-04, forward_time=0.214, loss_ctc=39.531, loss_att=27.375, acc=0.834, loss=31.022, backward_time=0.222, grad_norm=39.430, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.072, optim0_lr0=1.692e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-04 04:11:21,267 (trainer:762) INFO: 35epoch:train:14501-15000batch: iter_time=1.649e-04, forward_time=0.212, loss_ctc=39.551, loss_att=27.382, acc=0.824, loss=31.033, backward_time=0.222, grad_norm=41.090, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.072, optim0_lr0=1.691e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-04 04:25:48,149 (trainer:361) INFO: 35epoch results: [train] iter_time=0.104, forward_time=0.213, loss_ctc=39.859, loss_att=27.427, acc=0.827, loss=31.157, backward_time=0.222, grad_norm=40.903, clip=100.000, loss_scale=2.814e+33, optim_step_time=0.073, optim0_lr0=1.703e-04, train_time=1.014, time=4 hours, 13 minutes and 46.1 seconds, total_count=525000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=36.507, cer_ctc=0.198, loss_att=28.798, acc=0.764, cer=0.236, wer=0.991, loss=31.111, time=14 minutes and 11.65 seconds, total_count=74655, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-04 04:25:57,932 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-03-04 04:25:57,963 (trainer:290) INFO: 36/45epoch started. Estimated time to finish: 1 day, 20 hours and 13 minutes +[ip-10-0-216-33:0/16] 2024-03-04 04:25:57,970 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-04 04:26:34,129 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-04 04:26:42,010 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-04 04:26:42,011 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-04 04:26:42,015 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-04 04:40:35,501 (trainer:762) INFO: 36epoch:train:1-500batch: iter_time=0.842, forward_time=0.214, loss_ctc=42.433, loss_att=28.876, acc=0.821, loss=32.943, backward_time=0.222, grad_norm=44.437, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.690e-04, train_time=1.755 +[ip-10-0-216-33:0/16] 2024-03-04 04:48:11,942 (trainer:762) INFO: 36epoch:train:501-1000batch: iter_time=1.501e-04, forward_time=0.215, loss_ctc=38.738, loss_att=26.661, acc=0.826, loss=30.284, backward_time=0.223, grad_norm=39.887, clip=100.000, loss_scale=1.786e+33, optim_step_time=0.077, optim0_lr0=1.689e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-04 04:55:47,622 (trainer:762) INFO: 36epoch:train:1001-1500batch: iter_time=1.599e-04, forward_time=0.212, loss_ctc=39.304, loss_att=26.632, acc=0.831, loss=30.434, backward_time=0.223, grad_norm=39.217, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.073, optim0_lr0=1.689e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-04 04:57:45,836 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 05:03:24,831 (trainer:762) INFO: 36epoch:train:1501-2000batch: iter_time=1.700e-04, forward_time=0.214, loss_ctc=43.428, loss_att=30.244, acc=0.820, loss=34.199, backward_time=0.222, grad_norm=43.416, clip=100.000, loss_scale=1.631e+33, optim_step_time=0.073, optim0_lr0=1.688e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-04 05:10:57,309 (trainer:762) INFO: 36epoch:train:2001-2500batch: iter_time=1.599e-04, forward_time=0.213, loss_ctc=39.282, loss_att=27.054, acc=0.827, loss=30.723, backward_time=0.222, grad_norm=38.415, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.072, optim0_lr0=1.687e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-04 05:18:36,646 (trainer:762) INFO: 36epoch:train:2501-3000batch: iter_time=1.408e-04, forward_time=0.213, loss_ctc=39.528, loss_att=26.944, acc=0.828, loss=30.719, backward_time=0.222, grad_norm=37.815, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.076, optim0_lr0=1.686e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-04 05:26:13,295 (trainer:762) INFO: 36epoch:train:3001-3500batch: iter_time=1.363e-04, forward_time=0.213, loss_ctc=42.939, loss_att=30.051, acc=0.823, loss=33.917, backward_time=0.223, grad_norm=45.075, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.073, optim0_lr0=1.685e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-04 05:33:44,669 (trainer:762) INFO: 36epoch:train:3501-4000batch: iter_time=1.507e-04, forward_time=0.212, loss_ctc=40.505, loss_att=27.659, acc=0.821, loss=31.513, backward_time=0.222, grad_norm=40.392, clip=100.000, loss_scale=2.261e+33, optim_step_time=0.072, optim0_lr0=1.685e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-04 05:41:17,084 (trainer:762) INFO: 36epoch:train:4001-4500batch: iter_time=1.393e-04, forward_time=0.213, loss_ctc=39.745, loss_att=27.095, acc=0.825, loss=30.890, backward_time=0.223, grad_norm=40.147, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.074, optim0_lr0=1.684e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-04 05:48:48,981 (trainer:762) INFO: 36epoch:train:4501-5000batch: iter_time=1.432e-04, forward_time=0.213, loss_ctc=40.841, loss_att=27.738, acc=0.833, loss=31.669, backward_time=0.222, grad_norm=39.595, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.072, optim0_lr0=1.683e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-04 05:48:53,470 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-04 05:49:31,390 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-04 05:49:39,490 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-04 05:49:39,490 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-04 05:49:39,495 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-04 06:04:00,027 (trainer:762) INFO: 36epoch:train:5001-5500batch: iter_time=0.879, forward_time=0.213, loss_ctc=41.748, loss_att=28.557, acc=0.823, loss=32.514, backward_time=0.222, grad_norm=43.542, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.073, optim0_lr0=1.682e-04, train_time=1.822 +[ip-10-0-216-33:0/16] 2024-03-04 06:11:40,124 (trainer:762) INFO: 36epoch:train:5501-6000batch: iter_time=1.625e-04, forward_time=0.211, loss_ctc=38.610, loss_att=26.525, acc=0.826, loss=30.150, backward_time=0.222, grad_norm=40.421, clip=100.000, loss_scale=4.522e+33, optim_step_time=0.073, optim0_lr0=1.681e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-04 06:19:16,210 (trainer:762) INFO: 36epoch:train:6001-6500batch: iter_time=1.728e-04, forward_time=0.213, loss_ctc=38.842, loss_att=26.445, acc=0.832, loss=30.164, backward_time=0.223, grad_norm=38.550, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.073, optim0_lr0=1.681e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-04 06:26:34,664 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 06:26:52,103 (trainer:762) INFO: 36epoch:train:6501-7000batch: iter_time=1.629e-04, forward_time=0.211, loss_ctc=42.970, loss_att=29.970, acc=0.821, loss=33.870, backward_time=0.224, grad_norm=42.902, clip=100.000, loss_scale=5.093e+33, optim_step_time=0.073, optim0_lr0=1.680e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-04 06:34:21,791 (trainer:762) INFO: 36epoch:train:7001-7500batch: iter_time=1.718e-04, forward_time=0.214, loss_ctc=38.648, loss_att=26.737, acc=0.828, loss=30.311, backward_time=0.223, grad_norm=38.056, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.074, optim0_lr0=1.679e-04, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-03-04 06:42:01,270 (trainer:762) INFO: 36epoch:train:7501-8000batch: iter_time=1.687e-04, forward_time=0.212, loss_ctc=39.450, loss_att=26.910, acc=0.828, loss=30.672, backward_time=0.222, grad_norm=36.970, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.073, optim0_lr0=1.678e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-04 06:49:40,589 (trainer:762) INFO: 36epoch:train:8001-8500batch: iter_time=1.620e-04, forward_time=0.212, loss_ctc=42.565, loss_att=29.810, acc=0.824, loss=33.637, backward_time=0.223, grad_norm=43.570, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.073, optim0_lr0=1.677e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-04 06:57:17,035 (trainer:762) INFO: 36epoch:train:8501-9000batch: iter_time=1.670e-04, forward_time=0.212, loss_ctc=40.005, loss_att=27.509, acc=0.823, loss=31.258, backward_time=0.222, grad_norm=39.325, clip=100.000, loss_scale=2.695e+33, optim_step_time=0.072, optim0_lr0=1.677e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-04 07:01:59,781 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 07:04:45,772 (trainer:762) INFO: 36epoch:train:9001-9500batch: iter_time=1.600e-04, forward_time=0.212, loss_ctc=39.597, loss_att=27.050, acc=0.826, loss=30.814, backward_time=0.223, grad_norm=40.304, clip=100.000, loss_scale=4.219e+33, optim_step_time=0.072, optim0_lr0=1.676e-04, train_time=0.897 +[ip-10-0-216-33:0/16] 2024-03-04 07:12:16,201 (trainer:762) INFO: 36epoch:train:9501-10000batch: iter_time=1.627e-04, forward_time=0.213, loss_ctc=40.447, loss_att=27.579, acc=0.834, loss=31.439, backward_time=0.223, grad_norm=39.651, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.072, optim0_lr0=1.675e-04, train_time=0.901 +[ip-10-0-216-33:0/16] 2024-03-04 07:12:21,076 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-04 07:12:58,136 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-04 07:13:06,253 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-04 07:13:06,253 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-04 07:13:06,257 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-04 07:26:15,503 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 07:27:26,110 (trainer:762) INFO: 36epoch:train:10001-10500batch: iter_time=0.900, forward_time=0.214, loss_ctc=41.340, loss_att=28.360, acc=0.824, loss=32.254, backward_time=0.222, grad_norm=43.361, clip=100.000, loss_scale=2.391e+33, optim_step_time=0.072, optim0_lr0=1.674e-04, train_time=1.820 +[ip-10-0-216-33:0/16] 2024-03-04 07:35:05,361 (trainer:762) INFO: 36epoch:train:10501-11000batch: iter_time=1.666e-04, forward_time=0.213, loss_ctc=38.373, loss_att=26.453, acc=0.826, loss=30.029, backward_time=0.222, grad_norm=39.765, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.073, optim0_lr0=1.673e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-04 07:42:37,832 (trainer:762) INFO: 36epoch:train:11001-11500batch: iter_time=1.790e-04, forward_time=0.216, loss_ctc=38.689, loss_att=26.397, acc=0.832, loss=30.085, backward_time=0.223, grad_norm=39.076, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.077, optim0_lr0=1.673e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-04 07:50:13,905 (trainer:762) INFO: 36epoch:train:11501-12000batch: iter_time=1.708e-04, forward_time=0.213, loss_ctc=42.683, loss_att=29.862, acc=0.822, loss=33.708, backward_time=0.222, grad_norm=45.698, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.073, optim0_lr0=1.672e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-04 07:57:51,548 (trainer:762) INFO: 36epoch:train:12001-12500batch: iter_time=1.613e-04, forward_time=0.213, loss_ctc=38.628, loss_att=26.620, acc=0.828, loss=30.223, backward_time=0.221, grad_norm=38.351, clip=100.000, loss_scale=1.503e+33, optim_step_time=0.073, optim0_lr0=1.671e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-04 08:05:27,594 (trainer:762) INFO: 36epoch:train:12501-13000batch: iter_time=1.595e-04, forward_time=0.212, loss_ctc=39.347, loss_att=26.840, acc=0.829, loss=30.592, backward_time=0.222, grad_norm=37.230, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.072, optim0_lr0=1.670e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-04 08:13:07,847 (trainer:762) INFO: 36epoch:train:13001-13500batch: iter_time=1.685e-04, forward_time=0.214, loss_ctc=42.461, loss_att=29.816, acc=0.824, loss=33.609, backward_time=0.224, grad_norm=43.471, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.073, optim0_lr0=1.670e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-04 08:14:14,214 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 08:20:37,442 (trainer:762) INFO: 36epoch:train:13501-14000batch: iter_time=1.694e-04, forward_time=0.212, loss_ctc=39.916, loss_att=27.375, acc=0.823, loss=31.137, backward_time=0.223, grad_norm=40.815, clip=100.000, loss_scale=1.485e+33, optim_step_time=0.073, optim0_lr0=1.669e-04, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-03-04 08:28:09,422 (trainer:762) INFO: 36epoch:train:14001-14500batch: iter_time=1.751e-04, forward_time=0.214, loss_ctc=39.337, loss_att=26.898, acc=0.826, loss=30.630, backward_time=0.222, grad_norm=40.434, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.073, optim0_lr0=1.668e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-04 08:35:40,866 (trainer:762) INFO: 36epoch:train:14501-15000batch: iter_time=1.688e-04, forward_time=0.213, loss_ctc=40.284, loss_att=27.450, acc=0.834, loss=31.300, backward_time=0.223, grad_norm=40.001, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.072, optim0_lr0=1.667e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-04 08:50:08,297 (trainer:361) INFO: 36epoch results: [train] iter_time=0.088, forward_time=0.213, loss_ctc=40.356, loss_att=27.737, acc=0.826, loss=31.523, backward_time=0.222, grad_norm=40.662, clip=100.000, loss_scale=2.347e+33, optim_step_time=0.073, optim0_lr0=1.679e-04, train_time=0.999, time=4 hours, 9 minutes and 57.8 seconds, total_count=540000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=35.410, cer_ctc=0.192, loss_att=28.720, acc=0.757, cer=0.267, wer=0.997, loss=30.727, time=14 minutes and 12.22 seconds, total_count=76788, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-04 08:50:18,313 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-03-04 08:50:18,358 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/31epoch.pth +[ip-10-0-216-33:0/16] 2024-03-04 08:50:18,359 (trainer:290) INFO: 37/45epoch started. Estimated time to finish: 1 day, 15 hours and 47 minutes +[ip-10-0-216-33:0/16] 2024-03-04 08:50:18,367 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-04 08:50:54,764 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-04 08:51:02,759 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-04 08:51:02,759 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-04 08:51:02,764 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-04 09:04:54,220 (trainer:762) INFO: 37epoch:train:1-500batch: iter_time=0.837, forward_time=0.213, loss_ctc=39.833, loss_att=28.094, acc=0.826, loss=31.616, backward_time=0.228, grad_norm=43.256, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.666e-04, train_time=1.751 +[ip-10-0-216-33:0/16] 2024-03-04 09:12:27,045 (trainer:762) INFO: 37epoch:train:501-1000batch: iter_time=1.584e-04, forward_time=0.213, loss_ctc=39.236, loss_att=26.760, acc=0.825, loss=30.503, backward_time=0.227, grad_norm=42.666, clip=100.000, loss_scale=2.407e+33, optim_step_time=0.074, optim0_lr0=1.666e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-04 09:20:04,876 (trainer:762) INFO: 37epoch:train:1001-1500batch: iter_time=1.745e-04, forward_time=0.216, loss_ctc=40.789, loss_att=27.725, acc=0.828, loss=31.644, backward_time=0.226, grad_norm=40.046, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.665e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-04 09:27:37,968 (trainer:762) INFO: 37epoch:train:1501-2000batch: iter_time=1.551e-04, forward_time=0.209, loss_ctc=39.878, loss_att=27.329, acc=0.831, loss=31.094, backward_time=0.232, grad_norm=42.008, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.664e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-04 09:35:09,818 (trainer:762) INFO: 37epoch:train:2001-2500batch: iter_time=1.511e-04, forward_time=0.211, loss_ctc=38.591, loss_att=26.595, acc=0.830, loss=30.193, backward_time=0.236, grad_norm=39.372, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.663e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-04 09:36:26,753 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 09:39:52,924 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 09:42:48,472 (trainer:762) INFO: 37epoch:train:2501-3000batch: iter_time=1.523e-04, forward_time=0.210, loss_ctc=40.897, loss_att=27.933, acc=0.831, loss=31.822, backward_time=0.237, grad_norm=40.653, clip=100.000, loss_scale=2.135e+33, optim_step_time=0.075, optim0_lr0=1.663e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-04 09:46:53,172 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 09:50:23,106 (trainer:762) INFO: 37epoch:train:3001-3500batch: iter_time=1.451e-04, forward_time=0.209, loss_ctc=40.388, loss_att=27.682, acc=0.824, loss=31.494, backward_time=0.243, grad_norm=41.884, clip=100.000, loss_scale=9.989e+32, optim_step_time=0.075, optim0_lr0=1.662e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-04 09:57:57,202 (trainer:762) INFO: 37epoch:train:3501-4000batch: iter_time=1.507e-04, forward_time=0.210, loss_ctc=40.008, loss_att=27.337, acc=0.830, loss=31.138, backward_time=0.236, grad_norm=39.153, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.661e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-04 10:05:36,784 (trainer:762) INFO: 37epoch:train:4001-4500batch: iter_time=1.493e-04, forward_time=0.209, loss_ctc=42.677, loss_att=28.807, acc=0.826, loss=32.968, backward_time=0.241, grad_norm=42.185, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.660e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-04 10:13:11,411 (trainer:762) INFO: 37epoch:train:4501-5000batch: iter_time=1.463e-04, forward_time=0.210, loss_ctc=39.151, loss_att=26.990, acc=0.836, loss=30.638, backward_time=0.232, grad_norm=41.049, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.660e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-04 10:13:16,135 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-04 10:13:52,571 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-04 10:14:00,598 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-04 10:14:00,598 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-04 10:14:00,603 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-04 10:28:20,379 (trainer:762) INFO: 37epoch:train:5001-5500batch: iter_time=0.901, forward_time=0.209, loss_ctc=39.623, loss_att=27.912, acc=0.826, loss=31.425, backward_time=0.235, grad_norm=42.430, clip=100.000, loss_scale=9.476e+32, optim_step_time=0.075, optim0_lr0=1.659e-04, train_time=1.818 +[ip-10-0-216-33:0/16] 2024-03-04 10:35:53,868 (trainer:762) INFO: 37epoch:train:5501-6000batch: iter_time=1.530e-04, forward_time=0.210, loss_ctc=38.993, loss_att=26.669, acc=0.826, loss=30.366, backward_time=0.231, grad_norm=41.917, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.658e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-04 10:43:28,037 (trainer:762) INFO: 37epoch:train:6001-6500batch: iter_time=1.612e-04, forward_time=0.212, loss_ctc=40.470, loss_att=27.617, acc=0.828, loss=31.473, backward_time=0.223, grad_norm=47.501, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.074, optim0_lr0=1.657e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-04 10:51:09,684 (trainer:762) INFO: 37epoch:train:6501-7000batch: iter_time=1.602e-04, forward_time=0.213, loss_ctc=39.764, loss_att=27.255, acc=0.831, loss=31.008, backward_time=0.224, grad_norm=41.927, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.073, optim0_lr0=1.657e-04, train_time=0.923 +[ip-10-0-216-33:0/16] 2024-03-04 10:57:17,970 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 10:58:47,283 (trainer:762) INFO: 37epoch:train:7001-7500batch: iter_time=1.528e-04, forward_time=0.211, loss_ctc=38.828, loss_att=26.658, acc=0.830, loss=30.309, backward_time=0.222, grad_norm=39.031, clip=100.000, loss_scale=1.644e+33, optim_step_time=0.072, optim0_lr0=1.656e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-04 11:06:23,789 (trainer:762) INFO: 37epoch:train:7501-8000batch: iter_time=1.477e-04, forward_time=0.215, loss_ctc=40.548, loss_att=27.837, acc=0.833, loss=31.650, backward_time=0.222, grad_norm=40.102, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.074, optim0_lr0=1.655e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-04 11:13:55,304 (trainer:762) INFO: 37epoch:train:8001-8500batch: iter_time=1.729e-04, forward_time=0.212, loss_ctc=39.941, loss_att=27.413, acc=0.825, loss=31.172, backward_time=0.222, grad_norm=41.597, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.074, optim0_lr0=1.654e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-04 11:21:30,938 (trainer:762) INFO: 37epoch:train:8501-9000batch: iter_time=1.695e-04, forward_time=0.214, loss_ctc=39.829, loss_att=27.217, acc=0.830, loss=31.001, backward_time=0.222, grad_norm=40.428, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.654e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-04 11:29:10,950 (trainer:762) INFO: 37epoch:train:9001-9500batch: iter_time=1.673e-04, forward_time=0.212, loss_ctc=42.608, loss_att=28.800, acc=0.826, loss=32.942, backward_time=0.222, grad_norm=42.423, clip=100.000, loss_scale=1.547e+33, optim_step_time=0.072, optim0_lr0=1.653e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-04 11:33:56,055 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 11:36:49,859 (trainer:762) INFO: 37epoch:train:9501-10000batch: iter_time=1.633e-04, forward_time=0.212, loss_ctc=39.103, loss_att=26.951, acc=0.837, loss=30.597, backward_time=0.223, grad_norm=39.650, clip=100.000, loss_scale=2.104e+33, optim_step_time=0.072, optim0_lr0=1.652e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-04 11:36:56,575 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-04 11:37:33,870 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-04 11:37:42,858 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-04 11:37:42,858 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-04 11:37:42,863 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-04 11:52:13,801 (trainer:762) INFO: 37epoch:train:10001-10500batch: iter_time=0.913, forward_time=0.211, loss_ctc=39.381, loss_att=27.755, acc=0.827, loss=31.243, backward_time=0.231, grad_norm=43.140, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.651e-04, train_time=1.848 +[ip-10-0-216-33:0/16] 2024-03-04 11:59:52,640 (trainer:762) INFO: 37epoch:train:10501-11000batch: iter_time=1.466e-04, forward_time=0.208, loss_ctc=38.656, loss_att=26.432, acc=0.827, loss=30.099, backward_time=0.232, grad_norm=42.003, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.651e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-04 12:07:34,669 (trainer:762) INFO: 37epoch:train:11001-11500batch: iter_time=1.488e-04, forward_time=0.210, loss_ctc=40.316, loss_att=27.519, acc=0.828, loss=31.358, backward_time=0.234, grad_norm=39.564, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.650e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-03-04 12:15:17,613 (trainer:762) INFO: 37epoch:train:11501-12000batch: iter_time=1.455e-04, forward_time=0.207, loss_ctc=39.501, loss_att=27.135, acc=0.832, loss=30.845, backward_time=0.233, grad_norm=40.581, clip=100.000, loss_scale=1.789e+33, optim_step_time=0.075, optim0_lr0=1.649e-04, train_time=0.926 +[ip-10-0-216-33:0/16] 2024-03-04 12:23:01,727 (trainer:762) INFO: 37epoch:train:12001-12500batch: iter_time=1.438e-04, forward_time=0.208, loss_ctc=38.353, loss_att=26.425, acc=0.831, loss=30.003, backward_time=0.232, grad_norm=40.067, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.648e-04, train_time=0.928 +[ip-10-0-216-33:0/16] 2024-03-04 12:30:36,713 (trainer:762) INFO: 37epoch:train:12501-13000batch: iter_time=1.433e-04, forward_time=0.209, loss_ctc=40.259, loss_att=27.692, acc=0.833, loss=31.463, backward_time=0.233, grad_norm=39.672, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.648e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-04 12:38:20,082 (trainer:762) INFO: 37epoch:train:13001-13500batch: iter_time=1.457e-04, forward_time=0.208, loss_ctc=40.053, loss_att=27.488, acc=0.825, loss=31.258, backward_time=0.236, grad_norm=40.705, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.647e-04, train_time=0.926 +[ip-10-0-216-33:0/16] 2024-03-04 12:42:21,069 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 12:46:00,013 (trainer:762) INFO: 37epoch:train:13501-14000batch: iter_time=1.487e-04, forward_time=0.207, loss_ctc=39.627, loss_att=27.071, acc=0.830, loss=30.838, backward_time=0.236, grad_norm=39.517, clip=100.000, loss_scale=1.974e+33, optim_step_time=0.075, optim0_lr0=1.646e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-04 12:53:43,543 (trainer:762) INFO: 37epoch:train:14001-14500batch: iter_time=1.448e-04, forward_time=0.209, loss_ctc=42.371, loss_att=28.626, acc=0.827, loss=32.750, backward_time=0.235, grad_norm=42.051, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.645e-04, train_time=0.927 +[ip-10-0-216-33:0/16] 2024-03-04 13:01:27,335 (trainer:762) INFO: 37epoch:train:14501-15000batch: iter_time=1.424e-04, forward_time=0.208, loss_ctc=38.783, loss_att=26.750, acc=0.837, loss=30.360, backward_time=0.233, grad_norm=40.196, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.645e-04, train_time=0.927 +[ip-10-0-216-33:0/16] 2024-03-04 13:15:39,098 (trainer:361) INFO: 37epoch results: [train] iter_time=0.089, forward_time=0.210, loss_ctc=39.949, loss_att=27.416, acc=0.829, loss=31.176, backward_time=0.231, grad_norm=41.226, clip=100.000, loss_scale=1.622e+33, optim_step_time=0.075, optim0_lr0=1.655e-04, train_time=1.004, time=4 hours, 11 minutes and 25.3 seconds, total_count=555000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=35.974, cer_ctc=0.197, loss_att=30.290, acc=0.752, cer=0.293, wer=0.995, loss=31.995, time=13 minutes and 55.1 seconds, total_count=78921, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-04 13:15:49,177 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-03-04 13:15:49,207 (trainer:290) INFO: 38/45epoch started. Estimated time to finish: 1 day, 11 hours and 22 minutes +[ip-10-0-216-33:0/16] 2024-03-04 13:15:49,217 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-04 13:16:26,159 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-04 13:16:34,690 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-04 13:16:34,690 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-04 13:16:34,695 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-04 13:30:28,796 (trainer:762) INFO: 38epoch:train:1-500batch: iter_time=0.840, forward_time=0.210, loss_ctc=38.871, loss_att=27.312, acc=0.829, loss=30.780, backward_time=0.234, grad_norm=41.337, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.644e-04, train_time=1.759 +[ip-10-0-216-33:0/16] 2024-03-04 13:38:05,175 (trainer:762) INFO: 38epoch:train:501-1000batch: iter_time=1.482e-04, forward_time=0.209, loss_ctc=40.672, loss_att=27.534, acc=0.827, loss=31.475, backward_time=0.233, grad_norm=42.438, clip=100.000, loss_scale=1.919e+33, optim_step_time=0.075, optim0_lr0=1.643e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-04 13:44:13,978 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 13:45:50,592 (trainer:762) INFO: 38epoch:train:1001-1500batch: iter_time=1.473e-04, forward_time=0.207, loss_ctc=42.723, loss_att=29.375, acc=0.821, loss=33.380, backward_time=0.231, grad_norm=44.545, clip=100.000, loss_scale=2.326e+33, optim_step_time=0.075, optim0_lr0=1.642e-04, train_time=0.931 +[ip-10-0-216-33:0/16] 2024-03-04 13:48:18,352 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 13:53:31,093 (trainer:762) INFO: 38epoch:train:1501-2000batch: iter_time=1.440e-04, forward_time=0.209, loss_ctc=38.999, loss_att=27.358, acc=0.822, loss=30.850, backward_time=0.235, grad_norm=42.788, clip=100.000, loss_scale=8.545e+32, optim_step_time=0.075, optim0_lr0=1.642e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-04 14:01:10,182 (trainer:762) INFO: 38epoch:train:2001-2500batch: iter_time=1.445e-04, forward_time=0.209, loss_ctc=39.047, loss_att=26.688, acc=0.833, loss=30.396, backward_time=0.233, grad_norm=41.273, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.641e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-04 14:08:52,635 (trainer:762) INFO: 38epoch:train:2501-3000batch: iter_time=1.442e-04, forward_time=0.210, loss_ctc=40.311, loss_att=28.063, acc=0.831, loss=31.737, backward_time=0.235, grad_norm=41.777, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.640e-04, train_time=0.925 +[ip-10-0-216-33:0/16] 2024-03-04 14:16:29,727 (trainer:762) INFO: 38epoch:train:3001-3500batch: iter_time=1.450e-04, forward_time=0.210, loss_ctc=40.087, loss_att=27.576, acc=0.829, loss=31.329, backward_time=0.236, grad_norm=40.299, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.639e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-04 14:24:05,794 (trainer:762) INFO: 38epoch:train:3501-4000batch: iter_time=1.446e-04, forward_time=0.209, loss_ctc=38.647, loss_att=26.372, acc=0.830, loss=30.055, backward_time=0.231, grad_norm=39.148, clip=100.000, loss_scale=1.092e+33, optim_step_time=0.075, optim0_lr0=1.639e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-04 14:31:50,816 (trainer:762) INFO: 38epoch:train:4001-4500batch: iter_time=1.451e-04, forward_time=0.210, loss_ctc=41.204, loss_att=28.346, acc=0.825, loss=32.204, backward_time=0.239, grad_norm=43.758, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.638e-04, train_time=0.930 +[ip-10-0-216-33:0/16] 2024-03-04 14:39:34,610 (trainer:762) INFO: 38epoch:train:4501-5000batch: iter_time=1.382e-04, forward_time=0.209, loss_ctc=38.825, loss_att=26.795, acc=0.830, loss=30.404, backward_time=0.229, grad_norm=38.939, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.637e-04, train_time=0.927 +[ip-10-0-216-33:0/16] 2024-03-04 14:39:40,565 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-04 14:40:17,154 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-04 14:40:25,079 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-04 14:40:25,080 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-04 14:40:25,084 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-04 14:54:49,542 (trainer:762) INFO: 38epoch:train:5001-5500batch: iter_time=0.894, forward_time=0.208, loss_ctc=38.578, loss_att=26.979, acc=0.831, loss=30.459, backward_time=0.234, grad_norm=40.801, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.636e-04, train_time=1.830 +[ip-10-0-216-33:0/16] 2024-03-04 15:02:28,335 (trainer:762) INFO: 38epoch:train:5501-6000batch: iter_time=1.471e-04, forward_time=0.208, loss_ctc=40.192, loss_att=27.339, acc=0.828, loss=31.195, backward_time=0.232, grad_norm=41.893, clip=100.000, loss_scale=2.183e+33, optim_step_time=0.075, optim0_lr0=1.636e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-04 15:08:58,028 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 15:10:02,118 (trainer:762) INFO: 38epoch:train:6001-6500batch: iter_time=1.462e-04, forward_time=0.210, loss_ctc=42.717, loss_att=29.326, acc=0.822, loss=33.343, backward_time=0.236, grad_norm=44.476, clip=100.000, loss_scale=2.411e+33, optim_step_time=0.075, optim0_lr0=1.635e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-04 15:17:37,264 (trainer:762) INFO: 38epoch:train:6501-7000batch: iter_time=1.457e-04, forward_time=0.208, loss_ctc=38.736, loss_att=27.152, acc=0.823, loss=30.627, backward_time=0.235, grad_norm=42.243, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.634e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-04 15:25:12,428 (trainer:762) INFO: 38epoch:train:7001-7500batch: iter_time=1.448e-04, forward_time=0.208, loss_ctc=38.862, loss_att=26.619, acc=0.834, loss=30.292, backward_time=0.234, grad_norm=41.739, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.634e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-04 15:32:48,088 (trainer:762) INFO: 38epoch:train:7501-8000batch: iter_time=1.473e-04, forward_time=0.209, loss_ctc=39.829, loss_att=27.907, acc=0.831, loss=31.483, backward_time=0.233, grad_norm=42.086, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.633e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-04 15:40:30,124 (trainer:762) INFO: 38epoch:train:8001-8500batch: iter_time=1.477e-04, forward_time=0.209, loss_ctc=39.747, loss_att=27.427, acc=0.830, loss=31.123, backward_time=0.234, grad_norm=39.590, clip=100.000, loss_scale=1.482e+33, optim_step_time=0.075, optim0_lr0=1.632e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-03-04 15:47:40,864 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 15:48:10,668 (trainer:762) INFO: 38epoch:train:8501-9000batch: iter_time=1.457e-04, forward_time=0.209, loss_ctc=38.397, loss_att=26.219, acc=0.831, loss=29.872, backward_time=0.231, grad_norm=39.544, clip=100.000, loss_scale=2.513e+33, optim_step_time=0.075, optim0_lr0=1.631e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-04 15:55:54,356 (trainer:762) INFO: 38epoch:train:9001-9500batch: iter_time=1.428e-04, forward_time=0.207, loss_ctc=40.931, loss_att=28.233, acc=0.825, loss=32.042, backward_time=0.237, grad_norm=43.748, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.631e-04, train_time=0.927 +[ip-10-0-216-33:0/16] 2024-03-04 16:03:27,308 (trainer:762) INFO: 38epoch:train:9501-10000batch: iter_time=1.429e-04, forward_time=0.209, loss_ctc=38.546, loss_att=26.783, acc=0.830, loss=30.312, backward_time=0.231, grad_norm=40.551, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.630e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-04 16:03:33,282 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-04 16:04:10,646 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-04 16:04:18,441 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-04 16:04:18,441 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-04 16:04:18,445 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-04 16:18:47,906 (trainer:762) INFO: 38epoch:train:10001-10500batch: iter_time=0.912, forward_time=0.210, loss_ctc=38.546, loss_att=26.878, acc=0.831, loss=30.379, backward_time=0.235, grad_norm=42.156, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.629e-04, train_time=1.841 +[ip-10-0-216-33:0/16] 2024-03-04 16:26:32,491 (trainer:762) INFO: 38epoch:train:10501-11000batch: iter_time=1.492e-04, forward_time=0.208, loss_ctc=40.097, loss_att=27.359, acc=0.828, loss=31.180, backward_time=0.234, grad_norm=48.920, clip=100.000, loss_scale=1.381e+33, optim_step_time=0.075, optim0_lr0=1.628e-04, train_time=0.929 +[ip-10-0-216-33:0/16] 2024-03-04 16:34:05,420 (trainer:762) INFO: 38epoch:train:11001-11500batch: iter_time=1.498e-04, forward_time=0.210, loss_ctc=42.268, loss_att=29.089, acc=0.823, loss=33.043, backward_time=0.234, grad_norm=43.688, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.628e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-04 16:41:41,735 (trainer:762) INFO: 38epoch:train:11501-12000batch: iter_time=1.506e-04, forward_time=0.209, loss_ctc=38.730, loss_att=27.157, acc=0.823, loss=30.629, backward_time=0.234, grad_norm=41.837, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.627e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-04 16:45:38,620 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 16:49:19,441 (trainer:762) INFO: 38epoch:train:12001-12500batch: iter_time=1.521e-04, forward_time=0.208, loss_ctc=38.661, loss_att=26.524, acc=0.834, loss=30.165, backward_time=0.233, grad_norm=40.833, clip=100.000, loss_scale=1.969e+33, optim_step_time=0.075, optim0_lr0=1.626e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-04 16:57:00,948 (trainer:762) INFO: 38epoch:train:12501-13000batch: iter_time=1.525e-04, forward_time=0.210, loss_ctc=39.987, loss_att=27.904, acc=0.831, loss=31.529, backward_time=0.234, grad_norm=42.071, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.626e-04, train_time=0.923 +[ip-10-0-216-33:0/16] 2024-03-04 16:59:57,585 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 17:04:39,355 (trainer:762) INFO: 38epoch:train:13001-13500batch: iter_time=1.513e-04, forward_time=0.209, loss_ctc=39.630, loss_att=27.505, acc=0.830, loss=31.142, backward_time=0.235, grad_norm=40.659, clip=100.000, loss_scale=8.975e+32, optim_step_time=0.075, optim0_lr0=1.625e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-04 17:12:14,655 (trainer:762) INFO: 38epoch:train:13501-14000batch: iter_time=1.480e-04, forward_time=0.211, loss_ctc=38.323, loss_att=26.203, acc=0.831, loss=29.839, backward_time=0.233, grad_norm=40.144, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.624e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-04 17:19:52,934 (trainer:762) INFO: 38epoch:train:14001-14500batch: iter_time=1.546e-04, forward_time=0.210, loss_ctc=41.058, loss_att=28.305, acc=0.824, loss=32.131, backward_time=0.234, grad_norm=44.927, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.623e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-04 17:26:12,397 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 17:27:23,821 (trainer:762) INFO: 38epoch:train:14501-15000batch: iter_time=1.505e-04, forward_time=0.211, loss_ctc=38.660, loss_att=26.744, acc=0.830, loss=30.319, backward_time=0.234, grad_norm=39.275, clip=100.000, loss_scale=5.970e+32, optim_step_time=0.075, optim0_lr0=1.623e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-03-04 17:41:34,477 (trainer:361) INFO: 38epoch results: [train] iter_time=0.088, forward_time=0.209, loss_ctc=39.729, loss_att=27.436, acc=0.828, loss=31.124, backward_time=0.234, grad_norm=41.916, clip=100.000, loss_scale=1.411e+33, optim_step_time=0.075, optim0_lr0=1.633e-04, train_time=1.006, time=4 hours, 11 minutes and 50.12 seconds, total_count=570000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=36.125, cer_ctc=0.195, loss_att=29.014, acc=0.761, cer=0.258, wer=0.995, loss=31.147, time=13 minutes and 54.82 seconds, total_count=81054, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-04 17:41:44,558 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-03-04 17:41:44,640 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/33epoch.pth +[ip-10-0-216-33:0/16] 2024-03-04 17:41:44,641 (trainer:290) INFO: 39/45epoch started. Estimated time to finish: 1 day, 6 hours and 57 minutes +[ip-10-0-216-33:0/16] 2024-03-04 17:41:44,649 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-04 17:42:21,234 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-04 17:42:29,170 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-04 17:42:29,170 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-04 17:42:29,175 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-04 17:56:34,116 (trainer:762) INFO: 39epoch:train:1-500batch: iter_time=0.858, forward_time=0.210, loss_ctc=39.459, loss_att=27.234, acc=0.823, loss=30.901, backward_time=0.233, grad_norm=42.404, clip=100.000, loss_scale=3.245e+32, optim_step_time=0.075, optim0_lr0=1.622e-04, train_time=1.779 +[ip-10-0-216-33:0/16] 2024-03-04 18:04:11,025 (trainer:762) INFO: 39epoch:train:501-1000batch: iter_time=1.496e-04, forward_time=0.210, loss_ctc=41.569, loss_att=28.377, acc=0.827, loss=32.335, backward_time=0.232, grad_norm=41.558, clip=100.000, loss_scale=3.245e+32, optim_step_time=0.075, optim0_lr0=1.621e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-04 18:11:43,703 (trainer:762) INFO: 39epoch:train:1001-1500batch: iter_time=1.503e-04, forward_time=0.208, loss_ctc=41.115, loss_att=28.294, acc=0.824, loss=32.140, backward_time=0.235, grad_norm=42.230, clip=100.000, loss_scale=3.245e+32, optim_step_time=0.075, optim0_lr0=1.621e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-04 18:19:17,605 (trainer:762) INFO: 39epoch:train:1501-2000batch: iter_time=1.496e-04, forward_time=0.210, loss_ctc=40.173, loss_att=27.255, acc=0.832, loss=31.131, backward_time=0.234, grad_norm=39.595, clip=100.000, loss_scale=3.764e+32, optim_step_time=0.075, optim0_lr0=1.620e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-04 18:26:53,451 (trainer:762) INFO: 39epoch:train:2001-2500batch: iter_time=1.466e-04, forward_time=0.210, loss_ctc=39.764, loss_att=27.471, acc=0.831, loss=31.159, backward_time=0.234, grad_norm=40.816, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.619e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-04 18:34:30,326 (trainer:762) INFO: 39epoch:train:2501-3000batch: iter_time=1.463e-04, forward_time=0.209, loss_ctc=40.532, loss_att=27.573, acc=0.833, loss=31.461, backward_time=0.235, grad_norm=41.264, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.619e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-04 18:42:04,250 (trainer:762) INFO: 39epoch:train:3001-3500batch: iter_time=1.477e-04, forward_time=0.211, loss_ctc=40.270, loss_att=27.160, acc=0.836, loss=31.093, backward_time=0.229, grad_norm=41.483, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.618e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-04 18:49:41,841 (trainer:762) INFO: 39epoch:train:3501-4000batch: iter_time=1.433e-04, forward_time=0.209, loss_ctc=40.729, loss_att=27.488, acc=0.831, loss=31.460, backward_time=0.233, grad_norm=39.480, clip=100.000, loss_scale=7.529e+32, optim_step_time=0.075, optim0_lr0=1.617e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-04 18:57:15,158 (trainer:762) INFO: 39epoch:train:4001-4500batch: iter_time=1.449e-04, forward_time=0.209, loss_ctc=39.207, loss_att=27.210, acc=0.832, loss=30.809, backward_time=0.230, grad_norm=41.640, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.616e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-04 19:04:52,611 (trainer:762) INFO: 39epoch:train:4501-5000batch: iter_time=1.451e-04, forward_time=0.209, loss_ctc=38.002, loss_att=25.938, acc=0.827, loss=29.557, backward_time=0.242, grad_norm=40.422, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.616e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-04 19:04:58,940 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-04 19:05:35,157 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-04 19:05:43,270 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-04 19:05:43,270 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-04 19:05:43,275 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-04 19:20:14,572 (trainer:762) INFO: 39epoch:train:5001-5500batch: iter_time=0.887, forward_time=0.208, loss_ctc=39.225, loss_att=27.219, acc=0.824, loss=30.821, backward_time=0.233, grad_norm=42.591, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.615e-04, train_time=1.844 +[ip-10-0-216-33:0/16] 2024-03-04 19:27:46,722 (trainer:762) INFO: 39epoch:train:5501-6000batch: iter_time=1.449e-04, forward_time=0.210, loss_ctc=41.163, loss_att=28.139, acc=0.827, loss=32.046, backward_time=0.231, grad_norm=41.204, clip=100.000, loss_scale=1.506e+33, optim_step_time=0.075, optim0_lr0=1.614e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-04 19:35:19,795 (trainer:762) INFO: 39epoch:train:6001-6500batch: iter_time=1.478e-04, forward_time=0.208, loss_ctc=41.068, loss_att=28.190, acc=0.823, loss=32.054, backward_time=0.232, grad_norm=42.628, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.614e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-04 19:42:52,508 (trainer:762) INFO: 39epoch:train:6501-7000batch: iter_time=1.436e-04, forward_time=0.210, loss_ctc=39.964, loss_att=27.142, acc=0.833, loss=30.989, backward_time=0.232, grad_norm=40.359, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.613e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-04 19:50:23,838 (trainer:762) INFO: 39epoch:train:7001-7500batch: iter_time=1.458e-04, forward_time=0.209, loss_ctc=39.603, loss_att=27.334, acc=0.831, loss=31.014, backward_time=0.233, grad_norm=40.170, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.612e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-03-04 19:57:56,652 (trainer:762) INFO: 39epoch:train:7501-8000batch: iter_time=1.459e-04, forward_time=0.210, loss_ctc=40.508, loss_att=27.519, acc=0.833, loss=31.416, backward_time=0.235, grad_norm=41.032, clip=100.000, loss_scale=3.012e+33, optim_step_time=0.075, optim0_lr0=1.611e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-04 20:05:33,207 (trainer:762) INFO: 39epoch:train:8001-8500batch: iter_time=1.423e-04, forward_time=0.208, loss_ctc=40.224, loss_att=27.110, acc=0.835, loss=31.044, backward_time=0.236, grad_norm=39.942, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.611e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-04 20:08:25,000 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 20:13:08,734 (trainer:762) INFO: 39epoch:train:8501-9000batch: iter_time=1.453e-04, forward_time=0.209, loss_ctc=40.613, loss_att=27.424, acc=0.832, loss=31.381, backward_time=0.235, grad_norm=38.063, clip=100.000, loss_scale=3.559e+33, optim_step_time=0.075, optim0_lr0=1.610e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-04 20:20:49,302 (trainer:762) INFO: 39epoch:train:9001-9500batch: iter_time=1.480e-04, forward_time=0.209, loss_ctc=38.744, loss_att=27.109, acc=0.832, loss=30.600, backward_time=0.232, grad_norm=40.034, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.609e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-04 20:28:25,459 (trainer:762) INFO: 39epoch:train:9501-10000batch: iter_time=1.480e-04, forward_time=0.210, loss_ctc=37.835, loss_att=25.929, acc=0.827, loss=29.501, backward_time=0.235, grad_norm=40.395, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.609e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-04 20:28:31,187 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-04 20:29:08,453 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-04 20:29:16,265 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-04 20:29:16,266 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-04 20:29:16,270 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-04 20:43:42,446 (trainer:762) INFO: 39epoch:train:10001-10500batch: iter_time=0.881, forward_time=0.210, loss_ctc=39.075, loss_att=27.084, acc=0.824, loss=30.681, backward_time=0.234, grad_norm=42.437, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.608e-04, train_time=1.834 +[ip-10-0-216-33:0/16] 2024-03-04 20:46:59,891 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 20:51:20,786 (trainer:762) INFO: 39epoch:train:10501-11000batch: iter_time=1.498e-04, forward_time=0.208, loss_ctc=41.007, loss_att=28.060, acc=0.828, loss=31.944, backward_time=0.234, grad_norm=40.908, clip=100.000, loss_scale=2.742e+33, optim_step_time=0.075, optim0_lr0=1.607e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-04 20:58:59,039 (trainer:762) INFO: 39epoch:train:11001-11500batch: iter_time=1.559e-04, forward_time=0.209, loss_ctc=40.932, loss_att=28.179, acc=0.824, loss=32.005, backward_time=0.230, grad_norm=42.364, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.076, optim0_lr0=1.607e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-04 21:06:35,889 (trainer:762) INFO: 39epoch:train:11501-12000batch: iter_time=1.484e-04, forward_time=0.209, loss_ctc=39.957, loss_att=27.137, acc=0.833, loss=30.983, backward_time=0.232, grad_norm=39.483, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.606e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-04 21:14:16,331 (trainer:762) INFO: 39epoch:train:12001-12500batch: iter_time=1.518e-04, forward_time=0.209, loss_ctc=39.337, loss_att=27.162, acc=0.832, loss=30.815, backward_time=0.235, grad_norm=40.390, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.605e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-04 21:18:05,613 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 21:21:57,076 (trainer:762) INFO: 39epoch:train:12501-13000batch: iter_time=1.474e-04, forward_time=0.208, loss_ctc=40.062, loss_att=27.318, acc=0.834, loss=31.141, backward_time=0.233, grad_norm=41.114, clip=100.000, loss_scale=2.763e+33, optim_step_time=0.075, optim0_lr0=1.605e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-04 21:26:39,281 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 21:29:38,306 (trainer:762) INFO: 39epoch:train:13001-13500batch: iter_time=1.472e-04, forward_time=0.209, loss_ctc=40.104, loss_att=27.087, acc=0.836, loss=30.992, backward_time=0.233, grad_norm=40.529, clip=100.000, loss_scale=2.091e+33, optim_step_time=0.075, optim0_lr0=1.604e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-03-04 21:37:20,443 (trainer:762) INFO: 39epoch:train:13501-14000batch: iter_time=1.504e-04, forward_time=0.206, loss_ctc=40.234, loss_att=27.242, acc=0.832, loss=31.139, backward_time=0.232, grad_norm=38.084, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.603e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-03-04 21:45:02,101 (trainer:762) INFO: 39epoch:train:14001-14500batch: iter_time=1.445e-04, forward_time=0.208, loss_ctc=38.758, loss_att=27.093, acc=0.833, loss=30.592, backward_time=0.230, grad_norm=39.951, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.603e-04, train_time=0.923 +[ip-10-0-216-33:0/16] 2024-03-04 21:52:36,036 (trainer:762) INFO: 39epoch:train:14501-15000batch: iter_time=1.497e-04, forward_time=0.209, loss_ctc=37.658, loss_att=25.767, acc=0.828, loss=29.335, backward_time=0.237, grad_norm=39.813, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.602e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-04 22:06:52,299 (trainer:361) INFO: 39epoch results: [train] iter_time=0.088, forward_time=0.209, loss_ctc=39.896, loss_att=27.308, acc=0.830, loss=31.085, backward_time=0.233, grad_norm=40.746, clip=100.000, loss_scale=1.869e+33, optim_step_time=0.075, optim0_lr0=1.612e-04, train_time=1.003, time=4 hours, 11 minutes and 8.02 seconds, total_count=585000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=35.644, cer_ctc=0.193, loss_att=28.599, acc=0.775, cer=0.211, wer=0.983, loss=30.713, time=13 minutes and 59.33 seconds, total_count=83187, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-04 22:07:02,032 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-03-04 22:07:02,083 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/29epoch.pth +[ip-10-0-216-33:0/16] 2024-03-04 22:07:02,083 (trainer:290) INFO: 40/45epoch started. Estimated time to finish: 1 day, 2 hours and 32 minutes +[ip-10-0-216-33:0/16] 2024-03-04 22:07:02,092 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-04 22:07:37,734 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-04 22:07:46,216 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-04 22:07:46,216 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-04 22:07:46,221 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-04 22:21:44,820 (trainer:762) INFO: 40epoch:train:1-500batch: iter_time=0.839, forward_time=0.209, loss_ctc=43.383, loss_att=30.030, acc=0.822, loss=34.036, backward_time=0.234, grad_norm=44.503, clip=100.000, loss_scale=1.802e+33, optim_step_time=0.075, optim0_lr0=1.601e-04, train_time=1.765 +[ip-10-0-216-33:0/16] 2024-03-04 22:29:23,310 (trainer:762) INFO: 40epoch:train:501-1000batch: iter_time=1.503e-04, forward_time=0.209, loss_ctc=38.687, loss_att=26.347, acc=0.833, loss=30.049, backward_time=0.230, grad_norm=41.167, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.600e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-04 22:37:04,186 (trainer:762) INFO: 40epoch:train:1001-1500batch: iter_time=1.476e-04, forward_time=0.210, loss_ctc=40.140, loss_att=27.878, acc=0.834, loss=31.556, backward_time=0.234, grad_norm=41.503, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.600e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-04 22:44:44,273 (trainer:762) INFO: 40epoch:train:1501-2000batch: iter_time=1.463e-04, forward_time=0.209, loss_ctc=37.733, loss_att=26.367, acc=0.825, loss=29.777, backward_time=0.238, grad_norm=44.829, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.599e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-04 22:52:18,803 (trainer:762) INFO: 40epoch:train:2001-2500batch: iter_time=1.444e-04, forward_time=0.208, loss_ctc=37.950, loss_att=25.831, acc=0.833, loss=29.467, backward_time=0.235, grad_norm=38.988, clip=100.000, loss_scale=3.603e+33, optim_step_time=0.075, optim0_lr0=1.598e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-04 22:59:53,859 (trainer:762) INFO: 40epoch:train:2501-3000batch: iter_time=1.498e-04, forward_time=0.210, loss_ctc=38.357, loss_att=26.767, acc=0.833, loss=30.244, backward_time=0.235, grad_norm=42.169, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.598e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-04 23:07:32,614 (trainer:762) INFO: 40epoch:train:3001-3500batch: iter_time=1.475e-04, forward_time=0.210, loss_ctc=39.232, loss_att=27.175, acc=0.835, loss=30.792, backward_time=0.234, grad_norm=41.754, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.597e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-04 23:13:38,483 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-04 23:15:14,239 (trainer:762) INFO: 40epoch:train:3501-4000batch: iter_time=1.470e-04, forward_time=0.208, loss_ctc=41.650, loss_att=28.259, acc=0.828, loss=32.276, backward_time=0.232, grad_norm=42.106, clip=100.000, loss_scale=4.656e+33, optim_step_time=0.075, optim0_lr0=1.596e-04, train_time=0.923 +[ip-10-0-216-33:0/16] 2024-03-04 23:22:55,321 (trainer:762) INFO: 40epoch:train:4001-4500batch: iter_time=1.458e-04, forward_time=0.209, loss_ctc=39.991, loss_att=27.659, acc=0.828, loss=31.359, backward_time=0.237, grad_norm=44.580, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.596e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-03-04 23:30:34,393 (trainer:762) INFO: 40epoch:train:4501-5000batch: iter_time=1.433e-04, forward_time=0.208, loss_ctc=38.014, loss_att=25.799, acc=0.834, loss=29.463, backward_time=0.231, grad_norm=39.597, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.595e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-04 23:30:40,158 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-04 23:31:16,447 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-04 23:31:25,162 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-04 23:31:25,163 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-04 23:31:25,169 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-04 23:45:55,087 (trainer:762) INFO: 40epoch:train:5001-5500batch: iter_time=0.882, forward_time=0.207, loss_ctc=42.761, loss_att=29.533, acc=0.824, loss=33.501, backward_time=0.241, grad_norm=49.138, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.594e-04, train_time=1.841 +[ip-10-0-216-33:0/16] 2024-03-04 23:53:27,874 (trainer:762) INFO: 40epoch:train:5501-6000batch: iter_time=1.452e-04, forward_time=0.210, loss_ctc=38.205, loss_att=26.052, acc=0.834, loss=29.698, backward_time=0.233, grad_norm=40.437, clip=100.000, loss_scale=3.131e+33, optim_step_time=0.075, optim0_lr0=1.594e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-05 00:01:00,399 (trainer:762) INFO: 40epoch:train:6001-6500batch: iter_time=1.456e-04, forward_time=0.210, loss_ctc=40.044, loss_att=27.824, acc=0.834, loss=31.490, backward_time=0.238, grad_norm=41.842, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.593e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-05 00:07:03,599 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 00:08:34,631 (trainer:762) INFO: 40epoch:train:6501-7000batch: iter_time=1.456e-04, forward_time=0.208, loss_ctc=37.371, loss_att=26.102, acc=0.826, loss=29.483, backward_time=0.237, grad_norm=43.885, clip=100.000, loss_scale=4.672e+33, optim_step_time=0.075, optim0_lr0=1.592e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-05 00:16:09,697 (trainer:762) INFO: 40epoch:train:7001-7500batch: iter_time=1.481e-04, forward_time=0.209, loss_ctc=38.051, loss_att=25.903, acc=0.833, loss=29.547, backward_time=0.233, grad_norm=39.799, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.592e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-05 00:23:43,453 (trainer:762) INFO: 40epoch:train:7501-8000batch: iter_time=1.505e-04, forward_time=0.208, loss_ctc=38.217, loss_att=26.841, acc=0.833, loss=30.254, backward_time=0.232, grad_norm=41.681, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.591e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-05 00:31:20,442 (trainer:762) INFO: 40epoch:train:8001-8500batch: iter_time=1.436e-04, forward_time=0.210, loss_ctc=38.965, loss_att=27.024, acc=0.836, loss=30.606, backward_time=0.232, grad_norm=41.773, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.590e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-05 00:38:53,581 (trainer:762) INFO: 40epoch:train:8501-9000batch: iter_time=1.488e-04, forward_time=0.208, loss_ctc=41.237, loss_att=28.035, acc=0.829, loss=31.996, backward_time=0.235, grad_norm=41.997, clip=100.000, loss_scale=3.115e+33, optim_step_time=0.075, optim0_lr0=1.590e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-05 00:39:21,979 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 00:46:25,897 (trainer:762) INFO: 40epoch:train:9001-9500batch: iter_time=1.453e-04, forward_time=0.210, loss_ctc=39.667, loss_att=27.459, acc=0.829, loss=31.121, backward_time=0.231, grad_norm=42.456, clip=100.000, loss_scale=2.752e+33, optim_step_time=0.075, optim0_lr0=1.589e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-05 00:53:58,244 (trainer:762) INFO: 40epoch:train:9501-10000batch: iter_time=1.445e-04, forward_time=0.208, loss_ctc=37.814, loss_att=25.597, acc=0.835, loss=29.262, backward_time=0.235, grad_norm=38.809, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.588e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-05 00:54:03,357 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-05 00:54:40,025 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-05 00:54:48,047 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-05 00:54:48,047 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-05 00:54:48,052 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-05 01:09:38,296 (trainer:762) INFO: 40epoch:train:10001-10500batch: iter_time=0.922, forward_time=0.210, loss_ctc=42.581, loss_att=29.397, acc=0.825, loss=33.352, backward_time=0.236, grad_norm=42.766, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.588e-04, train_time=1.880 +[ip-10-0-216-33:0/16] 2024-03-05 01:17:13,910 (trainer:762) INFO: 40epoch:train:10501-11000batch: iter_time=1.504e-04, forward_time=0.211, loss_ctc=38.100, loss_att=26.071, acc=0.834, loss=29.680, backward_time=0.231, grad_norm=40.923, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.587e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-05 01:19:33,199 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 01:19:45,871 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 01:24:51,814 (trainer:762) INFO: 40epoch:train:11001-11500batch: iter_time=1.523e-04, forward_time=0.208, loss_ctc=39.865, loss_att=27.672, acc=0.835, loss=31.330, backward_time=0.233, grad_norm=41.858, clip=100.000, loss_scale=2.359e+33, optim_step_time=0.075, optim0_lr0=1.586e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-05 01:32:31,807 (trainer:762) INFO: 40epoch:train:11501-12000batch: iter_time=1.485e-04, forward_time=0.210, loss_ctc=37.302, loss_att=26.101, acc=0.826, loss=29.461, backward_time=0.232, grad_norm=42.908, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.586e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-05 01:40:12,173 (trainer:762) INFO: 40epoch:train:12001-12500batch: iter_time=1.547e-04, forward_time=0.208, loss_ctc=37.717, loss_att=25.733, acc=0.834, loss=29.328, backward_time=0.239, grad_norm=41.070, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.585e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-05 01:47:46,141 (trainer:762) INFO: 40epoch:train:12501-13000batch: iter_time=1.513e-04, forward_time=0.210, loss_ctc=38.303, loss_att=26.869, acc=0.833, loss=30.299, backward_time=0.238, grad_norm=42.400, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.584e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-05 01:55:24,124 (trainer:762) INFO: 40epoch:train:13001-13500batch: iter_time=1.519e-04, forward_time=0.209, loss_ctc=38.880, loss_att=27.002, acc=0.836, loss=30.565, backward_time=0.234, grad_norm=41.320, clip=100.000, loss_scale=2.163e+33, optim_step_time=0.075, optim0_lr0=1.584e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-05 02:02:59,970 (trainer:762) INFO: 40epoch:train:13501-14000batch: iter_time=1.494e-04, forward_time=0.210, loss_ctc=41.363, loss_att=28.005, acc=0.829, loss=32.012, backward_time=0.238, grad_norm=41.584, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.583e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-05 02:10:31,382 (trainer:762) INFO: 40epoch:train:14001-14500batch: iter_time=1.468e-04, forward_time=0.210, loss_ctc=39.620, loss_att=27.361, acc=0.829, loss=31.039, backward_time=0.233, grad_norm=42.890, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.582e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-05 02:18:05,844 (trainer:762) INFO: 40epoch:train:14501-15000batch: iter_time=1.488e-04, forward_time=0.210, loss_ctc=37.562, loss_att=25.482, acc=0.835, loss=29.106, backward_time=0.235, grad_norm=39.013, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.076, optim0_lr0=1.582e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-05 02:32:19,189 (trainer:361) INFO: 40epoch results: [train] iter_time=0.088, forward_time=0.209, loss_ctc=39.292, loss_att=27.072, acc=0.831, loss=30.738, backward_time=0.235, grad_norm=41.991, clip=100.000, loss_scale=2.889e+33, optim_step_time=0.075, optim0_lr0=1.591e-04, train_time=1.004, time=4 hours, 11 minutes and 19.9 seconds, total_count=600000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=35.747, cer_ctc=0.193, loss_att=28.133, acc=0.771, cer=0.228, wer=0.989, loss=30.417, time=13 minutes and 56.91 seconds, total_count=85320, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-05 02:32:29,033 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-03-05 02:32:29,078 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/35epoch.pth +[ip-10-0-216-33:0/16] 2024-03-05 02:32:29,079 (trainer:290) INFO: 41/45epoch started. Estimated time to finish: 22 hours, 6 minutes and 42.16 seconds +[ip-10-0-216-33:0/16] 2024-03-05 02:32:29,086 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-05 02:33:04,920 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-05 02:33:13,291 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-05 02:33:13,291 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-05 02:33:13,295 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-05 02:47:09,139 (trainer:762) INFO: 41epoch:train:1-500batch: iter_time=0.834, forward_time=0.209, loss_ctc=38.423, loss_att=26.354, acc=0.833, loss=29.974, backward_time=0.234, grad_norm=40.757, clip=100.000, loss_scale=4.325e+33, optim_step_time=0.075, optim0_lr0=1.581e-04, train_time=1.760 +[ip-10-0-216-33:0/16] 2024-03-05 02:47:12,720 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 02:53:56,846 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 02:54:50,789 (trainer:762) INFO: 41epoch:train:501-1000batch: iter_time=1.506e-04, forward_time=0.209, loss_ctc=38.293, loss_att=26.347, acc=0.837, loss=29.931, backward_time=0.234, grad_norm=41.973, clip=100.000, loss_scale=2.455e+33, optim_step_time=0.075, optim0_lr0=1.580e-04, train_time=0.923 +[ip-10-0-216-33:0/16] 2024-03-05 02:57:48,730 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 03:02:30,635 (trainer:762) INFO: 41epoch:train:1001-1500batch: iter_time=1.504e-04, forward_time=0.209, loss_ctc=37.140, loss_att=26.289, acc=0.831, loss=29.544, backward_time=0.238, grad_norm=41.560, clip=100.000, loss_scale=9.001e+32, optim_step_time=0.075, optim0_lr0=1.580e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-05 03:10:13,120 (trainer:762) INFO: 41epoch:train:1501-2000batch: iter_time=1.482e-04, forward_time=0.209, loss_ctc=40.464, loss_att=27.479, acc=0.833, loss=31.375, backward_time=0.238, grad_norm=42.544, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.579e-04, train_time=0.925 +[ip-10-0-216-33:0/16] 2024-03-05 03:17:54,059 (trainer:762) INFO: 41epoch:train:2001-2500batch: iter_time=1.493e-04, forward_time=0.206, loss_ctc=38.995, loss_att=26.783, acc=0.827, loss=30.447, backward_time=0.233, grad_norm=42.769, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.578e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-03-05 03:25:39,205 (trainer:762) INFO: 41epoch:train:2501-3000batch: iter_time=1.498e-04, forward_time=0.208, loss_ctc=39.537, loss_att=27.126, acc=0.831, loss=30.849, backward_time=0.235, grad_norm=41.892, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.578e-04, train_time=0.930 +[ip-10-0-216-33:0/16] 2024-03-05 03:33:18,623 (trainer:762) INFO: 41epoch:train:3001-3500batch: iter_time=1.461e-04, forward_time=0.208, loss_ctc=39.080, loss_att=26.914, acc=0.832, loss=30.563, backward_time=0.235, grad_norm=43.834, clip=100.000, loss_scale=1.046e+33, optim_step_time=0.075, optim0_lr0=1.577e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-05 03:34:32,573 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 03:40:53,870 (trainer:762) INFO: 41epoch:train:3501-4000batch: iter_time=1.470e-04, forward_time=0.209, loss_ctc=38.533, loss_att=26.562, acc=0.832, loss=30.153, backward_time=0.231, grad_norm=39.533, clip=100.000, loss_scale=7.518e+32, optim_step_time=0.075, optim0_lr0=1.576e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-05 03:48:31,599 (trainer:762) INFO: 41epoch:train:4001-4500batch: iter_time=1.468e-04, forward_time=0.209, loss_ctc=41.329, loss_att=28.429, acc=0.829, loss=32.299, backward_time=0.236, grad_norm=42.234, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.576e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-05 03:56:05,152 (trainer:762) INFO: 41epoch:train:4501-5000batch: iter_time=1.439e-04, forward_time=0.209, loss_ctc=38.907, loss_att=26.650, acc=0.829, loss=30.327, backward_time=0.234, grad_norm=41.096, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.575e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-05 03:56:10,960 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-05 03:56:47,620 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-05 03:56:55,452 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-05 03:56:55,452 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-05 03:56:55,457 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-05 04:11:27,539 (trainer:762) INFO: 41epoch:train:5001-5500batch: iter_time=0.879, forward_time=0.208, loss_ctc=38.020, loss_att=25.988, acc=0.834, loss=29.598, backward_time=0.234, grad_norm=40.255, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.574e-04, train_time=1.845 +[ip-10-0-216-33:0/16] 2024-03-05 04:19:11,826 (trainer:762) INFO: 41epoch:train:5501-6000batch: iter_time=1.448e-04, forward_time=0.208, loss_ctc=38.116, loss_att=26.283, acc=0.837, loss=29.833, backward_time=0.238, grad_norm=41.309, clip=100.000, loss_scale=1.194e+33, optim_step_time=0.075, optim0_lr0=1.574e-04, train_time=0.928 +[ip-10-0-216-33:0/16] 2024-03-05 04:26:49,213 (trainer:762) INFO: 41epoch:train:6001-6500batch: iter_time=1.485e-04, forward_time=0.208, loss_ctc=36.848, loss_att=26.066, acc=0.833, loss=29.301, backward_time=0.232, grad_norm=41.032, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.573e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-05 04:34:23,944 (trainer:762) INFO: 41epoch:train:6501-7000batch: iter_time=1.447e-04, forward_time=0.210, loss_ctc=40.276, loss_att=27.287, acc=0.834, loss=31.183, backward_time=0.233, grad_norm=41.773, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.573e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-05 04:41:58,871 (trainer:762) INFO: 41epoch:train:7001-7500batch: iter_time=1.519e-04, forward_time=0.209, loss_ctc=38.861, loss_att=26.659, acc=0.828, loss=30.320, backward_time=0.236, grad_norm=44.365, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.572e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-05 04:49:41,496 (trainer:762) INFO: 41epoch:train:7501-8000batch: iter_time=1.485e-04, forward_time=0.207, loss_ctc=39.366, loss_att=27.097, acc=0.831, loss=30.778, backward_time=0.234, grad_norm=42.838, clip=100.000, loss_scale=2.388e+33, optim_step_time=0.075, optim0_lr0=1.571e-04, train_time=0.925 +[ip-10-0-216-33:0/16] 2024-03-05 04:57:29,945 (trainer:762) INFO: 41epoch:train:8001-8500batch: iter_time=1.432e-04, forward_time=0.208, loss_ctc=38.798, loss_att=26.757, acc=0.832, loss=30.369, backward_time=0.235, grad_norm=41.589, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.571e-04, train_time=0.937 +[ip-10-0-216-33:0/16] 2024-03-05 05:05:12,299 (trainer:762) INFO: 41epoch:train:8501-9000batch: iter_time=1.493e-04, forward_time=0.206, loss_ctc=38.263, loss_att=26.464, acc=0.832, loss=30.003, backward_time=0.231, grad_norm=38.747, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.570e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-03-05 05:12:48,856 (trainer:762) INFO: 41epoch:train:9001-9500batch: iter_time=1.428e-04, forward_time=0.210, loss_ctc=41.209, loss_att=28.298, acc=0.830, loss=32.171, backward_time=0.232, grad_norm=41.304, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.569e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-05 05:20:20,071 (trainer:762) INFO: 41epoch:train:9501-10000batch: iter_time=1.450e-04, forward_time=0.209, loss_ctc=38.883, loss_att=26.571, acc=0.830, loss=30.265, backward_time=0.231, grad_norm=39.885, clip=100.000, loss_scale=4.777e+33, optim_step_time=0.075, optim0_lr0=1.569e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-03-05 05:20:26,027 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-05 05:21:02,565 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-05 05:21:10,417 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-05 05:21:10,417 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-05 05:21:10,422 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-05 05:35:58,818 (trainer:762) INFO: 41epoch:train:10001-10500batch: iter_time=0.884, forward_time=0.209, loss_ctc=37.944, loss_att=25.992, acc=0.835, loss=29.578, backward_time=0.230, grad_norm=40.022, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.568e-04, train_time=1.877 +[ip-10-0-216-33:0/16] 2024-03-05 05:43:35,811 (trainer:762) INFO: 41epoch:train:10501-11000batch: iter_time=1.473e-04, forward_time=0.208, loss_ctc=37.974, loss_att=26.109, acc=0.839, loss=29.668, backward_time=0.230, grad_norm=40.262, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.567e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-05 05:51:14,350 (trainer:762) INFO: 41epoch:train:11001-11500batch: iter_time=1.537e-04, forward_time=0.207, loss_ctc=36.847, loss_att=26.041, acc=0.832, loss=29.283, backward_time=0.240, grad_norm=41.606, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.567e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-05 05:55:29,732 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 05:58:52,393 (trainer:762) INFO: 41epoch:train:11501-12000batch: iter_time=1.480e-04, forward_time=0.209, loss_ctc=40.149, loss_att=27.192, acc=0.834, loss=31.079, backward_time=0.232, grad_norm=44.454, clip=100.000, loss_scale=7.232e+33, optim_step_time=0.075, optim0_lr0=1.566e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-05 06:06:27,135 (trainer:762) INFO: 41epoch:train:12001-12500batch: iter_time=1.511e-04, forward_time=0.210, loss_ctc=38.785, loss_att=26.574, acc=0.829, loss=30.237, backward_time=0.240, grad_norm=43.144, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.565e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-05 06:14:02,564 (trainer:762) INFO: 41epoch:train:12501-13000batch: iter_time=1.489e-04, forward_time=0.209, loss_ctc=39.217, loss_att=26.959, acc=0.832, loss=30.637, backward_time=0.235, grad_norm=42.044, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.565e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-05 06:21:36,009 (trainer:762) INFO: 41epoch:train:13001-13500batch: iter_time=1.483e-04, forward_time=0.208, loss_ctc=38.811, loss_att=26.738, acc=0.833, loss=30.360, backward_time=0.233, grad_norm=40.549, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.564e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-05 06:29:15,474 (trainer:762) INFO: 41epoch:train:13501-14000batch: iter_time=1.502e-04, forward_time=0.210, loss_ctc=38.492, loss_att=26.470, acc=0.832, loss=30.076, backward_time=0.238, grad_norm=38.974, clip=100.000, loss_scale=7.508e+33, optim_step_time=0.075, optim0_lr0=1.564e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-05 06:30:43,882 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 06:36:50,470 (trainer:762) INFO: 41epoch:train:14001-14500batch: iter_time=1.505e-04, forward_time=0.209, loss_ctc=40.770, loss_att=28.214, acc=0.831, loss=31.981, backward_time=0.233, grad_norm=41.266, clip=100.000, loss_scale=6.202e+33, optim_step_time=0.075, optim0_lr0=1.563e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-05 06:39:56,664 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 06:44:30,699 (trainer:762) INFO: 41epoch:train:14501-15000batch: iter_time=1.454e-04, forward_time=0.207, loss_ctc=38.709, loss_att=26.554, acc=0.830, loss=30.200, backward_time=0.236, grad_norm=40.028, clip=100.000, loss_scale=3.642e+33, optim_step_time=0.075, optim0_lr0=1.562e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-05 06:58:40,784 (trainer:361) INFO: 41epoch results: [train] iter_time=0.087, forward_time=0.208, loss_ctc=38.901, loss_att=26.775, acc=0.832, loss=30.413, backward_time=0.234, grad_norm=41.454, clip=100.000, loss_scale=2.972e+33, optim_step_time=0.075, optim0_lr0=1.572e-04, train_time=1.008, time=4 hours, 12 minutes and 17.14 seconds, total_count=615000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=34.715, cer_ctc=0.188, loss_att=27.154, acc=0.780, cer=0.212, wer=0.989, loss=29.422, time=13 minutes and 54.23 seconds, total_count=87453, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-05 06:58:50,775 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-03-05 06:58:50,831 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/32epoch.pth, exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/36epoch.pth +[ip-10-0-216-33:0/16] 2024-03-05 06:58:50,831 (trainer:290) INFO: 42/45epoch started. Estimated time to finish: 17 hours, 41 minutes and 27.86 seconds +[ip-10-0-216-33:0/16] 2024-03-05 06:58:50,838 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-05 06:59:26,599 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-05 06:59:34,353 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-05 06:59:34,354 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-05 06:59:34,358 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-05 07:13:34,497 (trainer:762) INFO: 42epoch:train:1-500batch: iter_time=0.847, forward_time=0.210, loss_ctc=40.042, loss_att=27.652, acc=0.835, loss=31.369, backward_time=0.231, grad_norm=43.584, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.562e-04, train_time=1.767 +[ip-10-0-216-33:0/16] 2024-03-05 07:19:26,979 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 07:21:10,867 (trainer:762) INFO: 42epoch:train:501-1000batch: iter_time=1.485e-04, forward_time=0.210, loss_ctc=41.029, loss_att=27.692, acc=0.824, loss=31.693, backward_time=0.233, grad_norm=43.151, clip=100.000, loss_scale=2.294e+33, optim_step_time=0.075, optim0_lr0=1.561e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-05 07:28:50,572 (trainer:762) INFO: 42epoch:train:1001-1500batch: iter_time=1.494e-04, forward_time=0.209, loss_ctc=39.558, loss_att=27.365, acc=0.829, loss=31.023, backward_time=0.232, grad_norm=44.833, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.560e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-05 07:36:30,472 (trainer:762) INFO: 42epoch:train:1501-2000batch: iter_time=1.478e-04, forward_time=0.209, loss_ctc=39.976, loss_att=27.094, acc=0.827, loss=30.959, backward_time=0.236, grad_norm=45.195, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.560e-04, train_time=0.920 +[ip-10-0-216-33:0/16] 2024-03-05 07:44:03,553 (trainer:762) INFO: 42epoch:train:2001-2500batch: iter_time=1.515e-04, forward_time=0.210, loss_ctc=37.847, loss_att=26.205, acc=0.830, loss=29.697, backward_time=0.234, grad_norm=41.558, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.559e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-05 07:50:57,748 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 07:51:44,375 (trainer:762) INFO: 42epoch:train:2501-3000batch: iter_time=1.512e-04, forward_time=0.209, loss_ctc=41.090, loss_att=28.012, acc=0.830, loss=31.935, backward_time=0.233, grad_norm=44.410, clip=100.000, loss_scale=1.465e+33, optim_step_time=0.075, optim0_lr0=1.558e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-05 07:57:53,577 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 07:59:22,932 (trainer:762) INFO: 42epoch:train:3001-3500batch: iter_time=1.476e-04, forward_time=0.209, loss_ctc=39.146, loss_att=27.130, acc=0.827, loss=30.735, backward_time=0.229, grad_norm=43.068, clip=100.000, loss_scale=1.173e+33, optim_step_time=0.075, optim0_lr0=1.558e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-05 08:07:06,119 (trainer:762) INFO: 42epoch:train:3501-4000batch: iter_time=1.518e-04, forward_time=0.208, loss_ctc=38.728, loss_att=26.485, acc=0.835, loss=30.158, backward_time=0.240, grad_norm=40.836, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.557e-04, train_time=0.926 +[ip-10-0-216-33:0/16] 2024-03-05 08:14:50,416 (trainer:762) INFO: 42epoch:train:4001-4500batch: iter_time=1.511e-04, forward_time=0.207, loss_ctc=40.766, loss_att=28.508, acc=0.822, loss=32.185, backward_time=0.235, grad_norm=45.488, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.557e-04, train_time=0.928 +[ip-10-0-216-33:0/16] 2024-03-05 08:22:30,245 (trainer:762) INFO: 42epoch:train:4501-5000batch: iter_time=1.432e-04, forward_time=0.208, loss_ctc=40.700, loss_att=27.513, acc=0.831, loss=31.469, backward_time=0.233, grad_norm=41.597, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.556e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-05 08:22:35,123 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-05 08:23:11,449 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-05 08:23:19,376 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-05 08:23:19,377 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-05 08:23:19,381 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-05 08:37:43,841 (trainer:762) INFO: 42epoch:train:5001-5500batch: iter_time=0.892, forward_time=0.210, loss_ctc=39.819, loss_att=27.488, acc=0.835, loss=31.187, backward_time=0.234, grad_norm=44.327, clip=100.000, loss_scale=7.737e+32, optim_step_time=0.075, optim0_lr0=1.555e-04, train_time=1.827 +[ip-10-0-216-33:0/16] 2024-03-05 08:45:22,779 (trainer:762) INFO: 42epoch:train:5501-6000batch: iter_time=1.502e-04, forward_time=0.207, loss_ctc=40.661, loss_att=27.562, acc=0.825, loss=31.492, backward_time=0.233, grad_norm=43.590, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.555e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-05 08:52:53,738 (trainer:762) INFO: 42epoch:train:6001-6500batch: iter_time=1.481e-04, forward_time=0.210, loss_ctc=39.343, loss_att=27.233, acc=0.830, loss=30.866, backward_time=0.232, grad_norm=45.172, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.554e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-03-05 09:00:31,281 (trainer:762) INFO: 42epoch:train:6501-7000batch: iter_time=1.480e-04, forward_time=0.209, loss_ctc=39.734, loss_att=27.019, acc=0.828, loss=30.833, backward_time=0.233, grad_norm=47.081, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.553e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-05 09:08:01,314 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 09:08:07,637 (trainer:762) INFO: 42epoch:train:7001-7500batch: iter_time=1.503e-04, forward_time=0.208, loss_ctc=37.437, loss_att=26.146, acc=0.831, loss=29.533, backward_time=0.236, grad_norm=41.006, clip=100.000, loss_scale=1.527e+33, optim_step_time=0.075, optim0_lr0=1.553e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-05 09:15:43,088 (trainer:762) INFO: 42epoch:train:7501-8000batch: iter_time=1.476e-04, forward_time=0.210, loss_ctc=40.482, loss_att=27.808, acc=0.830, loss=31.610, backward_time=0.233, grad_norm=44.436, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.552e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-05 09:23:20,726 (trainer:762) INFO: 42epoch:train:8001-8500batch: iter_time=1.547e-04, forward_time=0.208, loss_ctc=38.847, loss_att=26.959, acc=0.827, loss=30.525, backward_time=0.230, grad_norm=43.540, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.552e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-05 09:23:50,572 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 09:31:01,886 (trainer:762) INFO: 42epoch:train:8501-9000batch: iter_time=1.542e-04, forward_time=0.209, loss_ctc=38.493, loss_att=26.342, acc=0.836, loss=29.988, backward_time=0.238, grad_norm=41.801, clip=100.000, loss_scale=6.894e+32, optim_step_time=0.075, optim0_lr0=1.551e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-03-05 09:38:41,309 (trainer:762) INFO: 42epoch:train:9001-9500batch: iter_time=1.541e-04, forward_time=0.207, loss_ctc=40.431, loss_att=28.420, acc=0.822, loss=32.023, backward_time=0.230, grad_norm=45.819, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.550e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-05 09:46:19,460 (trainer:762) INFO: 42epoch:train:9501-10000batch: iter_time=1.496e-04, forward_time=0.209, loss_ctc=40.545, loss_att=27.431, acc=0.832, loss=31.365, backward_time=0.231, grad_norm=41.805, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.550e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-05 09:46:24,265 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-05 09:47:01,215 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-05 09:47:09,157 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-05 09:47:09,158 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-05 09:47:09,162 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-05 10:01:33,389 (trainer:762) INFO: 42epoch:train:10001-10500batch: iter_time=0.897, forward_time=0.211, loss_ctc=39.714, loss_att=27.429, acc=0.835, loss=31.115, backward_time=0.237, grad_norm=43.640, clip=100.000, loss_scale=6.490e+32, optim_step_time=0.075, optim0_lr0=1.549e-04, train_time=1.828 +[ip-10-0-216-33:0/16] 2024-03-05 10:09:10,266 (trainer:762) INFO: 42epoch:train:10501-11000batch: iter_time=1.472e-04, forward_time=0.208, loss_ctc=40.454, loss_att=27.481, acc=0.825, loss=31.372, backward_time=0.238, grad_norm=42.374, clip=100.000, loss_scale=1.257e+33, optim_step_time=0.075, optim0_lr0=1.548e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-05 10:16:41,963 (trainer:762) INFO: 42epoch:train:11001-11500batch: iter_time=1.583e-04, forward_time=0.210, loss_ctc=39.261, loss_att=27.221, acc=0.830, loss=30.833, backward_time=0.240, grad_norm=44.831, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.548e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-05 10:24:17,196 (trainer:762) INFO: 42epoch:train:11501-12000batch: iter_time=1.575e-04, forward_time=0.210, loss_ctc=39.664, loss_att=26.924, acc=0.829, loss=30.746, backward_time=0.237, grad_norm=43.116, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.547e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-05 10:31:51,702 (trainer:762) INFO: 42epoch:train:12001-12500batch: iter_time=1.535e-04, forward_time=0.210, loss_ctc=37.422, loss_att=25.995, acc=0.831, loss=29.423, backward_time=0.237, grad_norm=41.303, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.547e-04, train_time=0.909 +[ip-10-0-216-33:0/16] 2024-03-05 10:39:23,546 (trainer:762) INFO: 42epoch:train:12501-13000batch: iter_time=1.513e-04, forward_time=0.210, loss_ctc=40.202, loss_att=27.632, acc=0.831, loss=31.403, backward_time=0.233, grad_norm=43.550, clip=100.000, loss_scale=2.513e+33, optim_step_time=0.075, optim0_lr0=1.546e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-05 10:46:55,925 (trainer:762) INFO: 42epoch:train:13001-13500batch: iter_time=1.517e-04, forward_time=0.210, loss_ctc=38.933, loss_att=27.149, acc=0.827, loss=30.684, backward_time=0.232, grad_norm=42.532, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.545e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-05 10:54:28,555 (trainer:762) INFO: 42epoch:train:13501-14000batch: iter_time=1.543e-04, forward_time=0.209, loss_ctc=38.197, loss_att=26.220, acc=0.836, loss=29.813, backward_time=0.234, grad_norm=40.410, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.545e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-05 11:02:06,765 (trainer:762) INFO: 42epoch:train:14001-14500batch: iter_time=1.491e-04, forward_time=0.210, loss_ctc=40.495, loss_att=28.540, acc=0.822, loss=32.127, backward_time=0.233, grad_norm=45.036, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.544e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-05 11:02:52,252 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 11:09:47,295 (trainer:762) INFO: 42epoch:train:14501-15000batch: iter_time=1.485e-04, forward_time=0.208, loss_ctc=40.397, loss_att=27.442, acc=0.832, loss=31.329, backward_time=0.234, grad_norm=40.207, clip=100.000, loss_scale=2.685e+33, optim_step_time=0.075, optim0_lr0=1.544e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-05 11:23:58,258 (trainer:361) INFO: 42epoch results: [train] iter_time=0.088, forward_time=0.209, loss_ctc=39.647, loss_att=27.270, acc=0.830, loss=30.983, backward_time=0.234, grad_norm=43.310, clip=100.000, loss_scale=1.431e+33, optim_step_time=0.075, optim0_lr0=1.553e-04, train_time=1.004, time=4 hours, 11 minutes and 14.58 seconds, total_count=630000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=34.489, cer_ctc=0.187, loss_att=27.271, acc=0.759, cer=0.299, wer=1.000, loss=29.437, time=13 minutes and 52.47 seconds, total_count=89586, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-05 11:24:08,093 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-03-05 11:24:08,145 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/37epoch.pth +[ip-10-0-216-33:0/16] 2024-03-05 11:24:08,146 (trainer:290) INFO: 43/45epoch started. Estimated time to finish: 13 hours, 16 minutes and 5.55 seconds +[ip-10-0-216-33:0/16] 2024-03-05 11:24:08,154 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-05 11:24:44,171 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-05 11:24:52,689 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-05 11:24:52,690 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-05 11:24:52,694 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-05 11:38:46,475 (trainer:762) INFO: 43epoch:train:1-500batch: iter_time=0.846, forward_time=0.211, loss_ctc=39.584, loss_att=27.068, acc=0.833, loss=30.823, backward_time=0.231, grad_norm=46.800, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.543e-04, train_time=1.756 +[ip-10-0-216-33:0/16] 2024-03-05 11:46:19,156 (trainer:762) INFO: 43epoch:train:501-1000batch: iter_time=1.525e-04, forward_time=0.210, loss_ctc=38.067, loss_att=26.907, acc=0.829, loss=30.255, backward_time=0.233, grad_norm=42.417, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.542e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-05 11:53:54,331 (trainer:762) INFO: 43epoch:train:1001-1500batch: iter_time=1.524e-04, forward_time=0.210, loss_ctc=41.357, loss_att=28.209, acc=0.829, loss=32.153, backward_time=0.232, grad_norm=42.680, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.542e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-05 12:01:29,580 (trainer:762) INFO: 43epoch:train:1501-2000batch: iter_time=1.546e-04, forward_time=0.210, loss_ctc=40.501, loss_att=27.717, acc=0.829, loss=31.552, backward_time=0.231, grad_norm=41.452, clip=100.000, loss_scale=4.933e+33, optim_step_time=0.075, optim0_lr0=1.541e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-05 12:06:15,404 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 12:08:59,138 (trainer:762) INFO: 43epoch:train:2001-2500batch: iter_time=1.495e-04, forward_time=0.210, loss_ctc=42.236, loss_att=28.294, acc=0.835, loss=32.477, backward_time=0.232, grad_norm=41.466, clip=100.000, loss_scale=4.240e+33, optim_step_time=0.075, optim0_lr0=1.540e-04, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-03-05 12:16:35,567 (trainer:762) INFO: 43epoch:train:2501-3000batch: iter_time=1.508e-04, forward_time=0.211, loss_ctc=38.513, loss_att=26.707, acc=0.833, loss=30.249, backward_time=0.237, grad_norm=42.579, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.540e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-05 12:17:27,345 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 12:24:12,580 (trainer:762) INFO: 43epoch:train:3001-3500batch: iter_time=1.509e-04, forward_time=0.209, loss_ctc=39.644, loss_att=27.157, acc=0.828, loss=30.903, backward_time=0.239, grad_norm=46.906, clip=100.000, loss_scale=1.446e+33, optim_step_time=0.075, optim0_lr0=1.539e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-05 12:31:50,327 (trainer:762) INFO: 43epoch:train:3501-4000batch: iter_time=1.513e-04, forward_time=0.210, loss_ctc=39.265, loss_att=26.776, acc=0.837, loss=30.523, backward_time=0.235, grad_norm=41.954, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.539e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-05 12:39:25,956 (trainer:762) INFO: 43epoch:train:4001-4500batch: iter_time=1.520e-04, forward_time=0.211, loss_ctc=40.875, loss_att=27.971, acc=0.830, loss=31.842, backward_time=0.243, grad_norm=42.716, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.538e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-05 12:47:00,313 (trainer:762) INFO: 43epoch:train:4501-5000batch: iter_time=1.571e-04, forward_time=0.209, loss_ctc=36.927, loss_att=25.308, acc=0.832, loss=28.794, backward_time=0.237, grad_norm=40.362, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.537e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-05 12:47:05,678 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-05 12:47:42,015 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-05 12:47:49,925 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-05 12:47:49,925 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-05 12:47:49,930 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-05 13:02:10,663 (trainer:762) INFO: 43epoch:train:5001-5500batch: iter_time=0.899, forward_time=0.209, loss_ctc=39.475, loss_att=26.923, acc=0.834, loss=30.689, backward_time=0.237, grad_norm=42.135, clip=100.000, loss_scale=2.446e+33, optim_step_time=0.075, optim0_lr0=1.537e-04, train_time=1.820 +[ip-10-0-216-33:0/16] 2024-03-05 13:09:51,269 (trainer:762) INFO: 43epoch:train:5501-6000batch: iter_time=1.550e-04, forward_time=0.209, loss_ctc=37.761, loss_att=26.706, acc=0.830, loss=30.023, backward_time=0.238, grad_norm=43.096, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.536e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-05 13:17:22,289 (trainer:762) INFO: 43epoch:train:6001-6500batch: iter_time=1.542e-04, forward_time=0.209, loss_ctc=40.987, loss_att=28.072, acc=0.829, loss=31.946, backward_time=0.235, grad_norm=42.182, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.536e-04, train_time=0.902 +[ip-10-0-216-33:0/16] 2024-03-05 13:21:59,178 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 13:24:58,740 (trainer:762) INFO: 43epoch:train:6501-7000batch: iter_time=1.486e-04, forward_time=0.210, loss_ctc=40.135, loss_att=27.559, acc=0.830, loss=31.332, backward_time=0.235, grad_norm=42.030, clip=100.000, loss_scale=2.081e+33, optim_step_time=0.075, optim0_lr0=1.535e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-05 13:32:31,389 (trainer:762) INFO: 43epoch:train:7001-7500batch: iter_time=1.450e-04, forward_time=0.210, loss_ctc=41.786, loss_att=28.157, acc=0.836, loss=32.246, backward_time=0.233, grad_norm=41.018, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.534e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-05 13:40:09,792 (trainer:762) INFO: 43epoch:train:7501-8000batch: iter_time=1.472e-04, forward_time=0.210, loss_ctc=38.206, loss_att=26.544, acc=0.834, loss=30.043, backward_time=0.236, grad_norm=41.471, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.534e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-05 13:47:41,778 (trainer:762) INFO: 43epoch:train:8001-8500batch: iter_time=1.478e-04, forward_time=0.210, loss_ctc=39.388, loss_att=27.027, acc=0.829, loss=30.735, backward_time=0.232, grad_norm=42.274, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.533e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-05 13:55:10,013 (trainer:762) INFO: 43epoch:train:8501-9000batch: iter_time=1.506e-04, forward_time=0.209, loss_ctc=39.162, loss_att=26.779, acc=0.836, loss=30.494, backward_time=0.233, grad_norm=41.669, clip=100.000, loss_scale=1.812e+33, optim_step_time=0.075, optim0_lr0=1.533e-04, train_time=0.896 +[ip-10-0-216-33:0/16] 2024-03-05 14:02:42,990 (trainer:762) INFO: 43epoch:train:9001-9500batch: iter_time=1.481e-04, forward_time=0.211, loss_ctc=40.591, loss_att=27.836, acc=0.831, loss=31.663, backward_time=0.233, grad_norm=42.843, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.532e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-05 14:10:12,877 (trainer:762) INFO: 43epoch:train:9501-10000batch: iter_time=1.475e-04, forward_time=0.208, loss_ctc=36.821, loss_att=25.355, acc=0.832, loss=28.795, backward_time=0.235, grad_norm=40.189, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.531e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-03-05 14:10:18,396 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-05 14:10:55,410 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-05 14:11:03,386 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-05 14:11:03,386 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-05 14:11:03,391 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-05 14:27:51,833 (trainer:762) INFO: 43epoch:train:10001-10500batch: iter_time=1.203, forward_time=0.209, loss_ctc=39.288, loss_att=26.965, acc=0.835, loss=30.662, backward_time=0.232, grad_norm=41.793, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.531e-04, train_time=2.118 +[ip-10-0-216-33:0/16] 2024-03-05 14:28:33,500 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 14:35:29,761 (trainer:762) INFO: 43epoch:train:10501-11000batch: iter_time=1.492e-04, forward_time=0.209, loss_ctc=37.840, loss_att=26.610, acc=0.830, loss=29.979, backward_time=0.233, grad_norm=42.125, clip=100.000, loss_scale=1.410e+33, optim_step_time=0.075, optim0_lr0=1.530e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-05 14:43:01,495 (trainer:762) INFO: 43epoch:train:11001-11500batch: iter_time=1.504e-04, forward_time=0.209, loss_ctc=40.908, loss_att=27.985, acc=0.830, loss=31.862, backward_time=0.232, grad_norm=41.907, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.530e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-05 14:50:31,750 (trainer:762) INFO: 43epoch:train:11501-12000batch: iter_time=1.456e-04, forward_time=0.211, loss_ctc=40.181, loss_att=27.539, acc=0.830, loss=31.332, backward_time=0.227, grad_norm=41.639, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.529e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-03-05 14:58:04,565 (trainer:762) INFO: 43epoch:train:12001-12500batch: iter_time=1.476e-04, forward_time=0.210, loss_ctc=41.850, loss_att=28.150, acc=0.835, loss=32.260, backward_time=0.235, grad_norm=41.259, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.528e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-05 15:05:33,864 (trainer:762) INFO: 43epoch:train:12501-13000batch: iter_time=1.453e-04, forward_time=0.210, loss_ctc=38.142, loss_att=26.441, acc=0.834, loss=29.952, backward_time=0.228, grad_norm=42.711, clip=100.000, loss_scale=2.482e+33, optim_step_time=0.075, optim0_lr0=1.528e-04, train_time=0.898 +[ip-10-0-216-33:0/16] 2024-03-05 15:13:06,986 (trainer:762) INFO: 43epoch:train:13001-13500batch: iter_time=1.459e-04, forward_time=0.209, loss_ctc=39.434, loss_att=27.093, acc=0.829, loss=30.796, backward_time=0.227, grad_norm=42.236, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.527e-04, train_time=0.906 +[ip-10-0-216-33:0/16] 2024-03-05 15:20:39,825 (trainer:762) INFO: 43epoch:train:13501-14000batch: iter_time=1.466e-04, forward_time=0.208, loss_ctc=38.996, loss_att=26.709, acc=0.837, loss=30.395, backward_time=0.234, grad_norm=56.582, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.527e-04, train_time=0.905 +[ip-10-0-216-33:0/16] 2024-03-05 15:24:29,054 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 15:28:16,558 (trainer:762) INFO: 43epoch:train:14001-14500batch: iter_time=1.470e-04, forward_time=0.210, loss_ctc=40.432, loss_att=27.701, acc=0.831, loss=31.521, backward_time=0.233, grad_norm=43.043, clip=100.000, loss_scale=1.946e+33, optim_step_time=0.075, optim0_lr0=1.526e-04, train_time=0.913 +[ip-10-0-216-33:0/16] 2024-03-05 15:35:50,338 (trainer:762) INFO: 43epoch:train:14501-15000batch: iter_time=1.466e-04, forward_time=0.208, loss_ctc=36.849, loss_att=25.325, acc=0.832, loss=28.782, backward_time=0.229, grad_norm=40.684, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.525e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-05 15:50:17,575 (trainer:361) INFO: 43epoch results: [train] iter_time=0.098, forward_time=0.210, loss_ctc=39.507, loss_att=27.120, acc=0.832, loss=30.836, backward_time=0.234, grad_norm=42.740, clip=100.000, loss_scale=2.144e+33, optim_step_time=0.075, optim0_lr0=1.534e-04, train_time=1.007, time=4 hours, 11 minutes and 57.51 seconds, total_count=645000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=34.780, cer_ctc=0.188, loss_att=29.530, acc=0.772, cer=0.224, wer=0.990, loss=31.105, time=14 minutes and 11.61 seconds, total_count=91719, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-05 15:50:27,230 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-03-05 15:50:27,285 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/30epoch.pth, exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/38epoch.pth +[ip-10-0-216-33:0/16] 2024-03-05 15:50:27,286 (trainer:290) INFO: 44/45epoch started. Estimated time to finish: 8 hours, 50 minutes and 46.43 seconds +[ip-10-0-216-33:0/16] 2024-03-05 15:50:27,293 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-05 15:51:03,322 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-05 15:51:11,206 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-05 15:51:11,207 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-05 15:51:11,211 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-05 16:05:19,209 (trainer:762) INFO: 44epoch:train:1-500batch: iter_time=0.847, forward_time=0.210, loss_ctc=41.452, loss_att=28.085, acc=0.829, loss=32.095, backward_time=0.231, grad_norm=43.882, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.525e-04, train_time=1.784 +[ip-10-0-216-33:0/16] 2024-03-05 16:13:01,742 (trainer:762) INFO: 44epoch:train:501-1000batch: iter_time=1.494e-04, forward_time=0.208, loss_ctc=36.366, loss_att=24.883, acc=0.835, loss=28.328, backward_time=0.233, grad_norm=39.112, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.524e-04, train_time=0.925 +[ip-10-0-216-33:0/16] 2024-03-05 16:20:43,711 (trainer:762) INFO: 44epoch:train:1001-1500batch: iter_time=1.522e-04, forward_time=0.208, loss_ctc=39.008, loss_att=26.809, acc=0.830, loss=30.469, backward_time=0.231, grad_norm=42.061, clip=100.000, loss_scale=1.947e+33, optim_step_time=0.075, optim0_lr0=1.524e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-03-05 16:28:19,447 (trainer:762) INFO: 44epoch:train:1501-2000batch: iter_time=1.474e-04, forward_time=0.210, loss_ctc=39.833, loss_att=27.209, acc=0.831, loss=30.996, backward_time=0.232, grad_norm=42.299, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.523e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-05 16:35:55,816 (trainer:762) INFO: 44epoch:train:2001-2500batch: iter_time=1.491e-04, forward_time=0.210, loss_ctc=40.341, loss_att=27.374, acc=0.832, loss=31.264, backward_time=0.235, grad_norm=44.211, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.523e-04, train_time=0.912 +[ip-10-0-216-33:0/16] 2024-03-05 16:43:34,531 (trainer:762) INFO: 44epoch:train:2501-3000batch: iter_time=1.503e-04, forward_time=0.208, loss_ctc=38.669, loss_att=26.310, acc=0.834, loss=30.018, backward_time=0.231, grad_norm=39.586, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.522e-04, train_time=0.917 +[ip-10-0-216-33:0/16] 2024-03-05 16:51:06,685 (trainer:762) INFO: 44epoch:train:3001-3500batch: iter_time=1.492e-04, forward_time=0.210, loss_ctc=38.861, loss_att=26.625, acc=0.830, loss=30.296, backward_time=0.237, grad_norm=41.660, clip=100.000, loss_scale=3.894e+33, optim_step_time=0.075, optim0_lr0=1.521e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-05 16:53:28,474 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 16:53:42,659 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 16:58:34,277 (trainer:762) INFO: 44epoch:train:3501-4000batch: iter_time=1.504e-04, forward_time=0.209, loss_ctc=39.708, loss_att=26.920, acc=0.835, loss=30.757, backward_time=0.235, grad_norm=40.963, clip=100.000, loss_scale=2.565e+33, optim_step_time=0.075, optim0_lr0=1.521e-04, train_time=0.895 +[ip-10-0-216-33:0/16] 2024-03-05 17:06:08,306 (trainer:762) INFO: 44epoch:train:4001-4500batch: iter_time=1.507e-04, forward_time=0.211, loss_ctc=39.374, loss_att=27.018, acc=0.831, loss=30.725, backward_time=0.235, grad_norm=43.309, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.520e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-05 17:13:40,114 (trainer:762) INFO: 44epoch:train:4501-5000batch: iter_time=1.475e-04, forward_time=0.210, loss_ctc=38.380, loss_att=26.098, acc=0.833, loss=29.783, backward_time=0.236, grad_norm=38.719, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.520e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-05 17:13:45,136 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-05 17:14:21,634 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-05 17:14:29,485 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-05 17:14:29,485 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-05 17:14:29,490 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-05 17:28:49,381 (trainer:762) INFO: 44epoch:train:5001-5500batch: iter_time=0.875, forward_time=0.211, loss_ctc=40.935, loss_att=27.804, acc=0.830, loss=31.743, backward_time=0.234, grad_norm=42.390, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.519e-04, train_time=1.818 +[ip-10-0-216-33:0/16] 2024-03-05 17:36:19,562 (trainer:762) INFO: 44epoch:train:5501-6000batch: iter_time=1.480e-04, forward_time=0.209, loss_ctc=36.236, loss_att=24.758, acc=0.836, loss=28.202, backward_time=0.234, grad_norm=40.024, clip=100.000, loss_scale=2.144e+33, optim_step_time=0.075, optim0_lr0=1.518e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-03-05 17:43:53,600 (trainer:762) INFO: 44epoch:train:6001-6500batch: iter_time=1.497e-04, forward_time=0.209, loss_ctc=38.627, loss_att=26.570, acc=0.831, loss=30.187, backward_time=0.235, grad_norm=42.030, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.518e-04, train_time=0.908 +[ip-10-0-216-33:0/16] 2024-03-05 17:51:32,685 (trainer:762) INFO: 44epoch:train:6501-7000batch: iter_time=1.466e-04, forward_time=0.209, loss_ctc=39.553, loss_att=26.981, acc=0.832, loss=30.752, backward_time=0.236, grad_norm=41.731, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.517e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-05 17:59:02,344 (trainer:762) INFO: 44epoch:train:7001-7500batch: iter_time=1.483e-04, forward_time=0.210, loss_ctc=40.091, loss_att=27.251, acc=0.833, loss=31.103, backward_time=0.231, grad_norm=42.831, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.517e-04, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-03-05 18:02:34,077 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 18:05:04,424 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 18:06:41,551 (trainer:762) INFO: 44epoch:train:7501-8000batch: iter_time=1.452e-04, forward_time=0.209, loss_ctc=38.452, loss_att=26.209, acc=0.835, loss=29.882, backward_time=0.232, grad_norm=39.297, clip=100.000, loss_scale=2.607e+33, optim_step_time=0.075, optim0_lr0=1.516e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-05 18:14:25,421 (trainer:762) INFO: 44epoch:train:8001-8500batch: iter_time=1.442e-04, forward_time=0.208, loss_ctc=38.637, loss_att=26.549, acc=0.831, loss=30.175, backward_time=0.235, grad_norm=41.773, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.516e-04, train_time=0.927 +[ip-10-0-216-33:0/16] 2024-03-05 18:22:02,396 (trainer:762) INFO: 44epoch:train:8501-9000batch: iter_time=1.456e-04, forward_time=0.208, loss_ctc=39.508, loss_att=26.835, acc=0.836, loss=30.637, backward_time=0.231, grad_norm=42.155, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.515e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-05 18:29:43,808 (trainer:762) INFO: 44epoch:train:9001-9500batch: iter_time=1.444e-04, forward_time=0.207, loss_ctc=39.298, loss_att=27.033, acc=0.831, loss=30.712, backward_time=0.240, grad_norm=43.535, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.514e-04, train_time=0.923 +[ip-10-0-216-33:0/16] 2024-03-05 18:37:14,174 (trainer:762) INFO: 44epoch:train:9501-10000batch: iter_time=1.442e-04, forward_time=0.209, loss_ctc=38.315, loss_att=25.998, acc=0.833, loss=29.693, backward_time=0.238, grad_norm=39.109, clip=100.000, loss_scale=1.573e+33, optim_step_time=0.075, optim0_lr0=1.514e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-03-05 18:37:20,369 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-05 18:37:58,028 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-05 18:38:06,311 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-05 18:38:06,311 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-05 18:38:06,316 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-05 18:48:31,876 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 18:52:43,348 (trainer:762) INFO: 44epoch:train:10001-10500batch: iter_time=0.894, forward_time=0.210, loss_ctc=40.914, loss_att=27.796, acc=0.830, loss=31.731, backward_time=0.235, grad_norm=42.359, clip=100.000, loss_scale=1.868e+33, optim_step_time=0.075, optim0_lr0=1.513e-04, train_time=1.858 +[ip-10-0-216-33:0/16] 2024-03-05 19:00:18,648 (trainer:762) INFO: 44epoch:train:10501-11000batch: iter_time=1.460e-04, forward_time=0.208, loss_ctc=36.026, loss_att=24.595, acc=0.836, loss=28.024, backward_time=0.230, grad_norm=38.746, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.513e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-05 19:07:46,987 (trainer:762) INFO: 44epoch:train:11001-11500batch: iter_time=1.477e-04, forward_time=0.210, loss_ctc=38.606, loss_att=26.638, acc=0.831, loss=30.228, backward_time=0.230, grad_norm=41.787, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.512e-04, train_time=0.896 +[ip-10-0-216-33:0/16] 2024-03-05 19:15:18,653 (trainer:762) INFO: 44epoch:train:11501-12000batch: iter_time=1.505e-04, forward_time=0.209, loss_ctc=39.598, loss_att=27.013, acc=0.832, loss=30.789, backward_time=0.235, grad_norm=42.123, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.511e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-05 19:22:52,290 (trainer:762) INFO: 44epoch:train:12001-12500batch: iter_time=1.490e-04, forward_time=0.210, loss_ctc=39.935, loss_att=27.194, acc=0.833, loss=31.016, backward_time=0.234, grad_norm=42.378, clip=100.000, loss_scale=2.025e+33, optim_step_time=0.075, optim0_lr0=1.511e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-05 19:30:32,694 (trainer:762) INFO: 44epoch:train:12501-13000batch: iter_time=1.517e-04, forward_time=0.208, loss_ctc=38.253, loss_att=26.063, acc=0.835, loss=29.720, backward_time=0.237, grad_norm=39.557, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.510e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-05 19:38:14,721 (trainer:762) INFO: 44epoch:train:13001-13500batch: iter_time=1.475e-04, forward_time=0.208, loss_ctc=38.511, loss_att=26.421, acc=0.831, loss=30.048, backward_time=0.232, grad_norm=40.778, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.510e-04, train_time=0.924 +[ip-10-0-216-33:0/16] 2024-03-05 19:41:50,680 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 19:45:52,484 (trainer:762) INFO: 44epoch:train:13501-14000batch: iter_time=1.463e-04, forward_time=0.208, loss_ctc=39.468, loss_att=26.793, acc=0.836, loss=30.596, backward_time=0.230, grad_norm=40.670, clip=100.000, loss_scale=1.899e+33, optim_step_time=0.075, optim0_lr0=1.509e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-05 19:53:33,152 (trainer:762) INFO: 44epoch:train:14001-14500batch: iter_time=1.488e-04, forward_time=0.209, loss_ctc=39.127, loss_att=26.953, acc=0.832, loss=30.605, backward_time=0.235, grad_norm=43.558, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.509e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-05 20:01:08,924 (trainer:762) INFO: 44epoch:train:14501-15000batch: iter_time=1.480e-04, forward_time=0.208, loss_ctc=38.349, loss_att=26.012, acc=0.834, loss=29.713, backward_time=0.233, grad_norm=38.992, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.508e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-05 20:19:12,172 (trainer:361) INFO: 44epoch results: [train] iter_time=0.087, forward_time=0.209, loss_ctc=39.014, loss_att=26.627, acc=0.833, loss=30.343, backward_time=0.234, grad_norm=41.388, clip=100.000, loss_scale=1.939e+33, optim_step_time=0.075, optim0_lr0=1.516e-04, train_time=1.003, time=4 hours, 10 minutes and 56.69 seconds, total_count=660000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=35.323, cer_ctc=0.190, loss_att=26.708, acc=0.786, cer=0.208, wer=0.980, loss=29.292, time=17 minutes and 47.87 seconds, total_count=93852, gpu_max_cached_mem_GB=36.516 +[ip-10-0-216-33:0/16] 2024-03-05 20:19:22,245 (trainer:416) INFO: The best model has been updated: valid.acc, valid.total_count +[ip-10-0-216-33:0/16] 2024-03-05 20:19:22,297 (trainer:290) INFO: 45/45epoch started. Estimated time to finish: 4 hours, 25 minutes and 28.14 seconds +[ip-10-0-216-33:0/16] 2024-03-05 20:19:22,306 (multiple_iter_factory:32) INFO: Building 0th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-05 20:19:59,079 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-05 20:20:07,192 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.0", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.0", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.0", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.0", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-05 20:20:07,192 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.0, +[ip-10-0-216-33:0/16] 2024-03-05 20:20:07,197 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-05 20:34:03,600 (trainer:762) INFO: 45epoch:train:1-500batch: iter_time=0.850, forward_time=0.209, loss_ctc=41.322, loss_att=27.957, acc=0.830, loss=31.967, backward_time=0.235, grad_norm=75.942, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.507e-04, train_time=1.762 +[ip-10-0-216-33:0/16] 2024-03-05 20:41:39,025 (trainer:762) INFO: 45epoch:train:501-1000batch: iter_time=1.465e-04, forward_time=0.210, loss_ctc=41.245, loss_att=28.289, acc=0.828, loss=32.176, backward_time=0.234, grad_norm=53.569, clip=100.000, loss_scale=1.994e+33, optim_step_time=0.075, optim0_lr0=1.507e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-05 20:49:14,285 (trainer:762) INFO: 45epoch:train:1001-1500batch: iter_time=1.426e-04, forward_time=0.210, loss_ctc=38.194, loss_att=26.018, acc=0.839, loss=29.671, backward_time=0.234, grad_norm=42.852, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.506e-04, train_time=0.910 +[ip-10-0-216-33:0/16] 2024-03-05 20:56:52,131 (trainer:762) INFO: 45epoch:train:1501-2000batch: iter_time=1.434e-04, forward_time=0.207, loss_ctc=41.896, loss_att=28.453, acc=0.829, loss=32.486, backward_time=0.232, grad_norm=44.665, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.506e-04, train_time=0.915 +[ip-10-0-216-33:0/16] 2024-03-05 21:04:22,293 (trainer:762) INFO: 45epoch:train:2001-2500batch: iter_time=1.443e-04, forward_time=0.210, loss_ctc=39.119, loss_att=27.232, acc=0.826, loss=30.798, backward_time=0.228, grad_norm=44.750, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.505e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-03-05 21:12:02,165 (trainer:762) INFO: 45epoch:train:2501-3000batch: iter_time=1.416e-04, forward_time=0.208, loss_ctc=40.594, loss_att=27.275, acc=0.826, loss=31.271, backward_time=0.230, grad_norm=41.235, clip=100.000, loss_scale=3.988e+33, optim_step_time=0.075, optim0_lr0=1.505e-04, train_time=0.919 +[ip-10-0-216-33:0/16] 2024-03-05 21:14:15,078 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 21:16:33,931 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 21:19:37,594 (trainer:762) INFO: 45epoch:train:3001-3500batch: iter_time=1.471e-04, forward_time=0.207, loss_ctc=38.424, loss_att=26.225, acc=0.828, loss=29.885, backward_time=0.235, grad_norm=43.784, clip=100.000, loss_scale=2.839e+33, optim_step_time=0.075, optim0_lr0=1.504e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-05 21:27:15,835 (trainer:762) INFO: 45epoch:train:3501-4000batch: iter_time=1.450e-04, forward_time=0.209, loss_ctc=39.141, loss_att=27.363, acc=0.833, loss=30.897, backward_time=0.234, grad_norm=42.768, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.503e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-05 21:34:47,713 (trainer:762) INFO: 45epoch:train:4001-4500batch: iter_time=1.488e-04, forward_time=0.208, loss_ctc=41.121, loss_att=27.817, acc=0.827, loss=31.808, backward_time=0.232, grad_norm=43.922, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.075, optim0_lr0=1.503e-04, train_time=0.903 +[ip-10-0-216-33:0/16] 2024-03-05 21:42:21,269 (trainer:762) INFO: 45epoch:train:4501-5000batch: iter_time=1.464e-04, forward_time=0.209, loss_ctc=37.946, loss_att=26.352, acc=0.830, loss=29.830, backward_time=0.239, grad_norm=44.325, clip=100.000, loss_scale=1.298e+33, optim_step_time=0.076, optim0_lr0=1.502e-04, train_time=0.907 +[ip-10-0-216-33:0/16] 2024-03-05 21:42:26,171 (multiple_iter_factory:32) INFO: Building 1th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-05 21:43:02,984 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-05 21:43:10,939 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.1", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.1", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.1", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.1", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-05 21:43:10,939 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.1, +[ip-10-0-216-33:0/16] 2024-03-05 21:43:10,944 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-05 21:57:31,246 (trainer:762) INFO: 45epoch:train:5001-5500batch: iter_time=0.899, forward_time=0.209, loss_ctc=40.755, loss_att=27.503, acc=0.832, loss=31.478, backward_time=0.234, grad_norm=43.600, clip=100.000, loss_scale=1.815e+33, optim_step_time=0.075, optim0_lr0=1.502e-04, train_time=1.820 +[ip-10-0-216-33:0/16] 2024-03-05 22:05:06,810 (trainer:762) INFO: 45epoch:train:5501-6000batch: iter_time=1.496e-04, forward_time=0.208, loss_ctc=40.202, loss_att=27.893, acc=0.829, loss=31.586, backward_time=0.230, grad_norm=44.578, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.501e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-05 22:12:35,714 (trainer:762) INFO: 45epoch:train:6001-6500batch: iter_time=1.491e-04, forward_time=0.209, loss_ctc=38.005, loss_att=26.022, acc=0.840, loss=29.616, backward_time=0.230, grad_norm=42.936, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.501e-04, train_time=0.898 +[ip-10-0-216-33:0/16] 2024-03-05 22:20:14,940 (trainer:762) INFO: 45epoch:train:6501-7000batch: iter_time=1.481e-04, forward_time=0.209, loss_ctc=41.658, loss_att=28.298, acc=0.830, loss=32.306, backward_time=0.233, grad_norm=43.470, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.500e-04, train_time=0.918 +[ip-10-0-216-33:0/16] 2024-03-05 22:25:35,800 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 22:27:50,353 (trainer:762) INFO: 45epoch:train:7001-7500batch: iter_time=1.474e-04, forward_time=0.208, loss_ctc=38.722, loss_att=27.094, acc=0.826, loss=30.582, backward_time=0.236, grad_norm=45.978, clip=100.000, loss_scale=2.872e+33, optim_step_time=0.075, optim0_lr0=1.500e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-05 22:35:31,016 (trainer:762) INFO: 45epoch:train:7501-8000batch: iter_time=1.473e-04, forward_time=0.208, loss_ctc=40.264, loss_att=27.052, acc=0.827, loss=31.015, backward_time=0.233, grad_norm=41.721, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.499e-04, train_time=0.921 +[ip-10-0-216-33:0/16] 2024-03-05 22:43:06,416 (trainer:762) INFO: 45epoch:train:8001-8500batch: iter_time=1.434e-04, forward_time=0.206, loss_ctc=38.191, loss_att=26.153, acc=0.827, loss=29.764, backward_time=0.232, grad_norm=42.954, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.498e-04, train_time=0.911 +[ip-10-0-216-33:0/16] 2024-03-05 22:50:43,546 (trainer:762) INFO: 45epoch:train:8501-9000batch: iter_time=1.490e-04, forward_time=0.209, loss_ctc=38.758, loss_att=27.212, acc=0.834, loss=30.676, backward_time=0.231, grad_norm=44.139, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.498e-04, train_time=0.914 +[ip-10-0-216-33:0/16] 2024-03-05 22:58:24,744 (trainer:762) INFO: 45epoch:train:9001-9500batch: iter_time=1.512e-04, forward_time=0.208, loss_ctc=41.038, loss_att=27.776, acc=0.828, loss=31.755, backward_time=0.231, grad_norm=42.827, clip=100.000, loss_scale=3.349e+33, optim_step_time=0.075, optim0_lr0=1.497e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-03-05 23:05:54,351 (trainer:762) INFO: 45epoch:train:9501-10000batch: iter_time=1.500e-04, forward_time=0.208, loss_ctc=37.612, loss_att=26.195, acc=0.830, loss=29.620, backward_time=0.235, grad_norm=43.846, clip=100.000, loss_scale=5.192e+33, optim_step_time=0.075, optim0_lr0=1.497e-04, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-03-05 23:05:59,294 (multiple_iter_factory:32) INFO: Building 2th iter-factory... +[ip-10-0-216-33:0/16] 2024-03-05 23:06:37,604 (s2t:445) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') +[ip-10-0-216-33:0/16] 2024-03-05 23:06:45,721 (abs_task:1663) INFO: [train] dataset: +ESPnetDataset( + speech: {"path": "exp/s2t_stats_raw_bpe50000/splits3/wav.scp/split.2", "type": "kaldi_ark"} + text_prev: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.prev/split.2", "type": "text"} + text_ctc: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text.ctc/split.2", "type": "text"} + text: {"path": "exp/s2t_stats_raw_bpe50000/splits3/text/split.2", "type": "text"} + preprocess: ) +[ip-10-0-216-33:0/16] 2024-03-05 23:06:45,721 (abs_task:1664) INFO: [train] Batch sampler: UnsortedBatchSampler(N-batch=28118, batch_size=256, key_file=exp/s2t_stats_raw_bpe50000/splits3/speech_shape/split.2, +[ip-10-0-216-33:0/16] 2024-03-05 23:06:45,726 (abs_task:1665) INFO: [train] mini-batch sizes summary: N-batch=28118, mean=256.0, min=256, max=257 +[ip-10-0-216-33:0/16] 2024-03-05 23:17:45,881 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 23:21:13,627 (trainer:762) INFO: 45epoch:train:10001-10500batch: iter_time=0.902, forward_time=0.211, loss_ctc=40.862, loss_att=27.538, acc=0.832, loss=31.535, backward_time=0.241, grad_norm=43.395, clip=100.000, loss_scale=4.006e+33, optim_step_time=0.076, optim0_lr0=1.496e-04, train_time=1.838 +[ip-10-0-216-33:0/16] 2024-03-05 23:28:43,922 (trainer:762) INFO: 45epoch:train:10501-11000batch: iter_time=1.515e-04, forward_time=0.211, loss_ctc=40.233, loss_att=28.020, acc=0.830, loss=31.684, backward_time=0.235, grad_norm=44.782, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.496e-04, train_time=0.900 +[ip-10-0-216-33:0/16] 2024-03-05 23:36:13,482 (trainer:762) INFO: 45epoch:train:11001-11500batch: iter_time=1.586e-04, forward_time=0.210, loss_ctc=38.147, loss_att=25.999, acc=0.840, loss=29.643, backward_time=0.232, grad_norm=42.014, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.495e-04, train_time=0.899 +[ip-10-0-216-33:0/16] 2024-03-05 23:43:54,525 (trainer:762) INFO: 45epoch:train:11501-12000batch: iter_time=1.540e-04, forward_time=0.209, loss_ctc=41.423, loss_att=28.148, acc=0.830, loss=32.130, backward_time=0.230, grad_norm=49.793, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.495e-04, train_time=0.922 +[ip-10-0-216-33:0/16] 2024-03-05 23:51:32,864 (trainer:762) INFO: 45epoch:train:12001-12500batch: iter_time=1.507e-04, forward_time=0.209, loss_ctc=38.620, loss_att=26.947, acc=0.827, loss=30.449, backward_time=0.234, grad_norm=45.146, clip=100.000, loss_scale=3.780e+33, optim_step_time=0.075, optim0_lr0=1.494e-04, train_time=0.916 +[ip-10-0-216-33:0/16] 2024-03-05 23:57:06,050 (trainer:693) WARNING: The grad norm is nan. Skipping updating the model. +[ip-10-0-216-33:0/16] 2024-03-05 23:59:01,448 (trainer:762) INFO: 45epoch:train:12501-13000batch: iter_time=1.567e-04, forward_time=0.210, loss_ctc=40.405, loss_att=27.146, acc=0.827, loss=31.124, backward_time=0.231, grad_norm=41.074, clip=100.000, loss_scale=4.521e+33, optim_step_time=0.075, optim0_lr0=1.493e-04, train_time=0.897 +[ip-10-0-216-33:0/16] 2024-03-06 00:06:28,409 (trainer:762) INFO: 45epoch:train:13001-13500batch: iter_time=1.567e-04, forward_time=0.210, loss_ctc=38.179, loss_att=26.144, acc=0.828, loss=29.755, backward_time=0.234, grad_norm=43.335, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.493e-04, train_time=0.894 +[ip-10-0-216-33:0/16] 2024-03-06 00:13:57,200 (trainer:762) INFO: 45epoch:train:13501-14000batch: iter_time=1.587e-04, forward_time=0.210, loss_ctc=38.398, loss_att=27.079, acc=0.834, loss=30.475, backward_time=0.232, grad_norm=41.724, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.492e-04, train_time=0.897 +[ip-10-0-216-33:0/16] 2024-03-06 00:21:29,316 (trainer:762) INFO: 45epoch:train:14001-14500batch: iter_time=1.515e-04, forward_time=0.210, loss_ctc=40.669, loss_att=27.592, acc=0.829, loss=31.515, backward_time=0.235, grad_norm=42.560, clip=100.000, loss_scale=2.596e+33, optim_step_time=0.075, optim0_lr0=1.492e-04, train_time=0.904 +[ip-10-0-216-33:0/16] 2024-03-06 00:28:57,349 (trainer:762) INFO: 45epoch:train:14501-15000batch: iter_time=1.641e-04, forward_time=0.210, loss_ctc=37.689, loss_att=26.302, acc=0.830, loss=29.718, backward_time=0.233, grad_norm=43.027, clip=100.000, loss_scale=3.266e+33, optim_step_time=0.075, optim0_lr0=1.491e-04, train_time=0.896 +[ip-10-0-216-33:0/16] 2024-03-06 00:43:10,684 (trainer:361) INFO: 45epoch results: [train] iter_time=0.088, forward_time=0.209, loss_ctc=39.628, loss_att=27.170, acc=0.830, loss=30.907, backward_time=0.233, grad_norm=45.024, clip=100.000, loss_scale=2.725e+33, optim_step_time=0.075, optim0_lr0=1.499e-04, train_time=0.998, time=4 hours, 9 minutes and 58.03 seconds, total_count=675000, gpu_max_cached_mem_GB=36.516, [valid] loss_ctc=33.741, cer_ctc=0.183, loss_att=25.673, acc=0.767, cer=0.299, wer=0.999, loss=28.093, time=13 minutes and 50.04 seconds, total_count=95985, gpu_max_cached_mem_GB=36.516 +ip-10-0-216-33:311885:312456 [5] NCCL INFO [Service thread] Connection closed by localRank 5 +ip-10-0-216-33:311882:312458 [2] NCCL INFO [Service thread] Connection closed by localRank 2 +ip-10-0-216-33:311886:312461 [6] NCCL INFO [Service thread] Connection closed by localRank 6 +ip-10-0-216-33:311885:311885 [5] NCCL INFO comm 0x45e56440 rank 5 nranks 16 cudaDev 5 busId 901d0 - Abort COMPLETE +ip-10-0-216-33:311887:312460 [7] NCCL INFO [Service thread] Connection closed by localRank 7 +ip-10-0-216-33:311883:312462 [3] NCCL INFO [Service thread] Connection closed by localRank 3 +ip-10-0-216-33:311884:312459 [4] NCCL INFO [Service thread] Connection closed by localRank 4 +ip-10-0-216-33:311881:312457 [1] NCCL INFO [Service thread] Connection closed by localRank 1 +ip-10-0-216-33:311886:311886 [6] NCCL INFO comm 0x42c0fbc0 rank 6 nranks 16 cudaDev 6 busId a01c0 - Abort COMPLETE +ip-10-0-216-33:311884:311884 [4] NCCL INFO comm 0x9d5b200 rank 4 nranks 16 cudaDev 4 busId 901c0 - Abort COMPLETE +ip-10-0-216-33:311881:311881 [1] NCCL INFO comm 0x9a92260 rank 1 nranks 16 cudaDev 1 busId 101d0 - Abort COMPLETE +ip-10-0-216-33:311883:311883 [3] NCCL INFO comm 0x6f267bc0 rank 3 nranks 16 cudaDev 3 busId 201d0 - Abort COMPLETE +ip-10-0-216-33:311882:311882 [2] NCCL INFO comm 0x511a1b90 rank 2 nranks 16 cudaDev 2 busId 201c0 - Abort COMPLETE +ip-10-0-216-33:311887:311887 [7] NCCL INFO comm 0x6ee1f170 rank 7 nranks 16 cudaDev 7 busId a01d0 - Abort COMPLETE +ip-10-0-224-187:3788822:3789277 [4] NCCL INFO [Service thread] Connection closed by localRank 4 +ip-10-0-224-187:3788824:3789274 [6] NCCL INFO [Service thread] Connection closed by localRank 6 +ip-10-0-224-187:3788820:3789275 [2] NCCL INFO [Service thread] Connection closed by localRank 2 +ip-10-0-224-187:3788823:3789280 [5] NCCL INFO [Service thread] Connection closed by localRank 5 +ip-10-0-224-187:3788826:3789279 [7] NCCL INFO [Service thread] Connection closed by localRank 7 +ip-10-0-224-187:3788821:3789276 [3] NCCL INFO [Service thread] Connection closed by localRank 3 +ip-10-0-224-187:3788819:3789281 [1] NCCL INFO [Service thread] Connection closed by localRank 1 +ip-10-0-224-187:3788818:3789278 [0] NCCL INFO [Service thread] Connection closed by localRank 0 +ip-10-0-224-187:3788822:3788822 [4] NCCL INFO comm 0x51831900 rank 12 nranks 16 cudaDev 4 busId 901c0 - Abort COMPLETE +ip-10-0-224-187:3788824:3788824 [6] NCCL INFO comm 0x704da1d0 rank 14 nranks 16 cudaDev 6 busId a01c0 - Abort COMPLETE +ip-10-0-224-187:3788820:3788820 [2] NCCL INFO comm 0x965e2c0 rank 10 nranks 16 cudaDev 2 busId 201c0 - Abort COMPLETE +ip-10-0-224-187:3788823:3788823 [5] NCCL INFO comm 0xa4dcd80 rank 13 nranks 16 cudaDev 5 busId 901d0 - Abort COMPLETE +ip-10-0-224-187:3788826:3788826 [7] NCCL INFO comm 0x42154f70 rank 15 nranks 16 cudaDev 7 busId a01d0 - Abort COMPLETE +ip-10-0-224-187:3788821:3788821 [3] NCCL INFO comm 0x729fe240 rank 11 nranks 16 cudaDev 3 busId 201d0 - Abort COMPLETE +ip-10-0-224-187:3788819:3788819 [1] NCCL INFO comm 0x1236d920 rank 9 nranks 16 cudaDev 1 busId 101d0 - Abort COMPLETE +ip-10-0-224-187:3788818:3788818 [0] NCCL INFO comm 0x804dad0 rank 8 nranks 16 cudaDev 0 busId 101c0 - Abort COMPLETE +[ip-10-0-216-33:0/16] 2024-03-06 00:43:20,608 (trainer:416) INFO: The best model has been updated: valid.total_count +[ip-10-0-216-33:0/16] 2024-03-06 00:43:20,662 (trainer:470) INFO: The model files were removed: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/40epoch.pth +[ip-10-0-216-33:0/16] 2024-03-06 00:43:20,662 (trainer:488) INFO: The training was finished at 45 epochs +[ip-10-0-216-33:0/16] 2024-03-06 00:43:20,688 (average_nbest_models:69) INFO: Averaging 5best models: criterion="valid.acc": exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/valid.acc.ave_5best.pth +[ip-10-0-216-33:0/16] 2024-03-06 00:44:08,799 (average_nbest_models:69) INFO: Averaging 5best models: criterion="valid.total_count": exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/valid.total_count.ave_5best.pth +wandb: - 0.000 MB of 0.000 MB uploaded wandb: WARNING Ensure read and write access to run files dir: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/wandb/run-20240226_220402-zw9g4dhv/files, control this via the WANDB_DIR env var. See https://docs.wandb.ai/guides/track/environment-variables +wandb: \ 0.000 MB of 0.000 MB uploaded wandb: | 0.000 MB of 1.212 MB uploaded wandb: / 0.580 MB of 1.212 MB uploaded wandb: - 0.583 MB of 1.212 MB uploaded wandb: \ 0.583 MB of 1.212 MB uploaded wandb: | 0.645 MB of 1.212 MB uploaded wandb: / 0.645 MB of 1.212 MB uploaded wandb: - 0.645 MB of 1.212 MB uploaded wandb: \ 0.645 MB of 1.212 MB uploaded wandb: | 0.645 MB of 1.212 MB uploaded wandb: / 0.645 MB of 1.212 MB uploaded wandb: - 0.645 MB of 1.212 MB uploaded wandb: \ 0.645 MB of 1.212 MB uploaded wandb: | 0.645 MB of 1.212 MB uploaded wandb: / 1.212 MB of 1.212 MB uploaded wandb: +wandb: +wandb: Run history: +wandb: epoch ▁▁▁▁▂▂▂▂▂▂▃▃▃▃▃▄▄▄▄▄▅▅▅▅▅▅▆▆▆▆▆▇▇▇▇▇▇███ +wandb: iteration ▁▁▁▂▂▂▂▂▂▃▃▃▃▃▄▄▄▄▄▄▅▅▅▅▅▅▆▆▆▆▆▇▇▇▇▇▇███ +wandb: metrics/acc ▁▂▆▆▇▇▇▇▇███████████████████████████████ +wandb: metrics/backward_time ▄▆▆████▇▇▇▇▇█▇█▄▇▇██▇▇█▇▇▇█▇▇▇▁▁▅▇▇██▆▆▇ +wandb: metrics/clip ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +wandb: metrics/forward_time ▁▃▃▃▃▃▂▃▃▃▃▄▃▃▄▂▄▃▃▄▄▃▄▃▄▃▂▃▄▄██▆▃▄▄▃▅▃▃ +wandb: metrics/grad_norm █▆▃▂▁▁▁▁▁▁▁▁▁▁▁▂▂▂▂▂▂▂▂▂▂▂▃▃▃▃▃▃▃▃▃▃▃▃▃▃ +wandb: metrics/iter_time ▂▁▁▂▂▁▁▁█▄▂▁▁▁▁█▄▂▁▁▂▂▁▁▁▂▁▂▂▂▁▁▂▂▁▁▁█▄▂ +wandb: metrics/loss █▆▃▂▂▂▂▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +wandb: metrics/loss_att █▆▃▂▂▂▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +wandb: metrics/loss_ctc █▄▃▂▂▂▂▂▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +wandb: metrics/loss_scale ▁▁▁▁▁▁▁▁▁▁▁▇▅▅▆▅▄▅▅█▅▅▅▇▇▅▅▄▃▄▄▄▃▃▃▄▄▄▃▄ +wandb: metrics/optim0_lr0 ▁▂▆█▇▇▆▆▅▅▅▅▅▄▄▄▄▄▄▄▄▄▄▄▃▃▃▃▃▃▃▃▃▃▃▃▃▃▃▃ +wandb: metrics/optim_step_time ▂███▇▆▆▇▇▇▇▆▆▆▇▆▆▆▆▆▆▆▆▆▆▆▆▆▆▆▃▁▄▆▆▆▆▆▆▆ +wandb: train/train_acc_epoch ▁▃▆▆▇▇▇▇▇▇██████████████████████████████ +wandb: train/train_backward_time_epoch ▄▆▇████▇█▇▇▇█▇██▇█▇▇▇▇▇▇▇▇▇▇▇▇▁▁▆▇▇██▇▇▇ +wandb: train/train_clip_epoch ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +wandb: train/train_forward_time_epoch ▁▃▃▃▃▃▂▂▂▃▃▃▂▃▃▃▂▄▃▃▃▃▃▃▃▂▂▂▃▃██▅▃▃▃▂▃▄▃ +wandb: train/train_gpu_max_cached_mem_GB_epoch ▁▁▁▁▁▁▁▁▁▁██████████████████████████████ +wandb: train/train_grad_norm_epoch █▆▃▂▁▁▁▁▁▁▁▁▁▁▂▂▂▂▂▂▂▂▂▂▃▂▃▃▃▃▃▃▃▃▃▃▃▃▃▄ +wandb: train/train_iter_time_epoch ▂▁▁▅▂▂▂▁▂▁▁▂▅▇▂▂▂▂▂▂▆▂▂▂▂▂▂▂▂▂█▂▂▂▂▂▂▂▆▂ +wandb: train/train_loss_att_epoch █▅▂▂▂▂▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +wandb: train/train_loss_ctc_epoch █▄▃▃▂▂▂▂▂▂▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +wandb: train/train_loss_epoch █▅▃▂▂▂▂▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +wandb: train/train_loss_scale_epoch ▁▁▁▁▁▁▁▁▁▁▃▆▆▆▇▆▅▆▆▇▇▆▅▇█▅▅▅▄▅▅▄▃▃▃▅▅▃▄▅ +wandb: train/train_optim0_lr0_epoch ▁▃▇█▇▇▆▆▆▅▅▅▅▄▄▄▄▄▄▄▄▄▄▄▃▃▃▃▃▃▃▃▃▃▃▃▃▃▃▃ +wandb: train/train_optim_step_time_epoch ▂███▇▇▇▇▇▇▇▇▇▇▇▇▆▆▆▇▇▇▆▇▇▇▇▇▇▇▁▁▅▇▇▇▇▇▇▇ +wandb: train/train_time ▁▁▁▂▂▁▁▁█▄▂▁▁▂▁█▄▂▁▁▂▂▂▁▁▂▁▂▂▂▁▁▂▂▁▁▁█▄▂ +wandb: train/train_train_time_epoch ▁▁▂▅▄▃▄▄▄▄▃▃▆▇▃▃▃▄▄▃█▄▄▃▃▄▄▄▄▄▇▃▄▅▄▄▅▄▅▃ +wandb: valid/valid_acc_epoch ▁▃▅▆▆▆▆▇▇▇▇▇▇▇▇██▇▇█▇█▇█████████████████ +wandb: valid/valid_cer_ctc_epoch █▇▅▄▃▃▃▂▂▂▂▂▂▂▂▂▂▂▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +wandb: valid/valid_cer_epoch █▆▅▄▄▂▃▃▃▂▃▃▃▂▂▁▁▂▂▁▂▁▂▂▁▁▁▁▁▁▁▂▂▂▁▁▁▂▁▂ +wandb: valid/valid_gpu_max_cached_mem_GB_epoch ▁▁▁▁▁▁▁▁▁▁██████████████████████████████ +wandb: valid/valid_loss_att_epoch █▅▃▂▂▂▂▂▂▂▂▂▂▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +wandb: valid/valid_loss_ctc_epoch █▆▅▄▃▃▃▂▂▂▂▂▂▂▂▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +wandb: valid/valid_loss_epoch █▅▄▃▂▂▂▂▂▂▂▂▂▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +wandb: valid/valid_wer_epoch █████████████▇▇▅▅█▇▅█▆█▇▄▄▄▄▄▃▄▇▆▆▁▄▄█▄█ +wandb: +wandb: Run summary: +wandb: epoch 45 +wandb: iteration 675000 +wandb: metrics/acc 0.83021 +wandb: metrics/backward_time 0.23303 +wandb: metrics/clip 100.0 +wandb: metrics/forward_time 0.209 +wandb: metrics/grad_norm 45.02424 +wandb: metrics/iter_time 0.08849 +wandb: metrics/loss 30.90718 +wandb: metrics/loss_att 27.16983 +wandb: metrics/loss_ctc 39.62768 +wandb: metrics/loss_scale 2.724960328658875e+33 +wandb: metrics/optim0_lr0 0.00015 +wandb: metrics/optim_step_time 0.07528 +wandb: train/train_acc_epoch 0.83021 +wandb: train/train_backward_time_epoch 0.23303 +wandb: train/train_clip_epoch 100.0 +wandb: train/train_forward_time_epoch 0.209 +wandb: train/train_gpu_max_cached_mem_GB_epoch 36.51562 +wandb: train/train_grad_norm_epoch 45.02424 +wandb: train/train_iter_time_epoch 0.08849 +wandb: train/train_loss_att_epoch 27.16983 +wandb: train/train_loss_ctc_epoch 39.62768 +wandb: train/train_loss_epoch 30.90718 +wandb: train/train_loss_scale_epoch 2.724960328658875e+33 +wandb: train/train_optim0_lr0_epoch 0.00015 +wandb: train/train_optim_step_time_epoch 0.07528 +wandb: train/train_time 0.99808 +wandb: train/train_train_time_epoch 0.99808 +wandb: valid/valid_acc_epoch 0.76696 +wandb: valid/valid_cer_ctc_epoch 0.18327 +wandb: valid/valid_cer_epoch 0.29893 +wandb: valid/valid_gpu_max_cached_mem_GB_epoch 36.51562 +wandb: valid/valid_loss_att_epoch 25.67251 +wandb: valid/valid_loss_ctc_epoch 33.74075 +wandb: valid/valid_loss_epoch 28.09298 +wandb: valid/valid_wer_epoch 0.99948 +wandb: +wandb: 🚀 View run _weka_home-pengyf_espnet-owsm-train_egs2_owsm_v3.1_licensefree_nosa_s2t1 at: https://stability.wandb.io/pengyf/ESPnet_S2TTask/runs/zw9g4dhv +wandb: Synced 4 W&B file(s), 0 media file(s), 0 artifact file(s) and 0 other file(s) +wandb: Find logs at: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/wandb/run-20240226_220402-zw9g4dhv/logs +ip-10-0-216-33:311880:312455 [0] NCCL INFO [Service thread] Connection closed by localRank 0 +ip-10-0-216-33:311880:311880 [0] NCCL INFO comm 0x90d2b80 rank 0 nranks 16 cudaDev 0 busId 101c0 - Abort COMPLETE +# Accounting: begin_time=1708985012 +# Accounting: end_time=1709685882 +# Accounting: time=700870 threads=1 +# Finished at Wed Mar 6 00:44:42 UTC 2024 with status 0 diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/valid.acc.ave_5best.pth b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/valid.acc.ave_5best.pth new file mode 100644 index 0000000000000000000000000000000000000000..69b6c20bc70130ca3e7ec7c3a9aa55479575a6fe --- /dev/null +++ b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/valid.acc.ave_5best.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:03a1dbd2d9498777c9de4f97ab2faae563f6d13f89f4cc85481c916038eb1646 +size 1466924749 diff --git a/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/valid.total_count.ave_5best.pth b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/valid.total_count.ave_5best.pth new file mode 100644 index 0000000000000000000000000000000000000000..111b33536da6abe8c52773f79061e45e8189d9f9 --- /dev/null +++ b/exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/valid.total_count.ave_5best.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2412ee08e7f7c2dfd2a19f26289d93a92ae1f3beb7bc319e98cd74a6b05347e1 +size 1466929645 diff --git a/meta.yaml b/meta.yaml new file mode 100644 index 0000000000000000000000000000000000000000..bb588f6fae3290e9589dc83cbc5ff28e2b74f0d7 --- /dev/null +++ b/meta.yaml @@ -0,0 +1,8 @@ +espnet: '202310' +files: + s2t_model_file: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/valid.acc.ave_5best.pth +python: 3.10.13 | packaged by conda-forge | (main, Dec 23 2023, 15:36:39) [GCC 12.3.0] +timestamp: 1725247130.37343 +torch: 1.13.1 +yaml_files: + s2t_train_config: exp/s2t_train_s2t_ebf_conv2d_size768_e9_d9_piecewise_lr5e-4_warmup60k_flashattn_raw_bpe50000/config.yaml