Create training.sh
Browse files- training.sh +21 -0
training.sh
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
export TRAIN_MANIFEST_PATH="<TRAINING MANIFEST JSON>"
|
2 |
+
export DEV_MANIFEST_PATH="<VALIDATION MANIFEST JSON>"
|
3 |
+
export TOKENIZER_PATH="<TOKENIZER FOLDER>"
|
4 |
+
export HYDRA_FULL_ERROR=1
|
5 |
+
python [NEMO_GIT_FOLDER]/examples/asr/asr_ctc/speech_to_text_ctc_bpe.py --config-path=[NEMO_GIT_FOLDER]/examples/asr/conf/conformer/ --config-name=conformer_ctc_bpe \
|
6 |
+
model.train_ds.manifest_filepath=${TRAIN_MANIFEST_PATH} \
|
7 |
+
model.validation_ds.manifest_filepath=${DEV_MANIFEST_PATH} \
|
8 |
+
model.tokenizer.dir=${TOKENIZER_PATH} \
|
9 |
+
model.tokenizer.type=wpe \
|
10 |
+
trainer.devices=4 \
|
11 |
+
trainer.accelerator="gpu" \
|
12 |
+
trainer.strategy="ddp" \
|
13 |
+
trainer.max_epochs=1000 \
|
14 |
+
model.optim.name="adamw" \
|
15 |
+
model.optim.lr=0.001 \
|
16 |
+
model.optim.betas=[0.9,0.999] \
|
17 |
+
model.optim.weight_decay=0.0001 \
|
18 |
+
model.optim.sched.warmup_steps=2000 \
|
19 |
+
exp_manager.exp_dir=results/ \
|
20 |
+
exp_manager.create_wandb_logger=False \
|
21 |
+
exp_manager.resume_if_exists=true
|