File size: 958 Bytes
a1be16b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 |
command:
- python3
- ${program}
- --streaming
- ${args}
method: grid
metric:
goal: minimize
name: tedlium-long-form/validation/wer
parameters:
model_name_or_path:
value: sanchit-gandhi/large-32-2-ts-freeze-28k-wer-10
subfolder:
value: checkpoint-15000
dataset_name:
value: distil-whisper/tedlium-long-form
dataset_config_name:
value: all
dataset_split_name:
value: validation
cache_dir:
value: /home/sanchitgandhi/.cache
dataset_cache_dir:
value: /home/sanchitgandhi/.cache
output_dir:
value: ./
wandb_dir:
value: /home/sanchitgandhi/.cache
per_device_eval_batch_size:
value: 32
dtype:
value: bfloat16
report_to:
value: wandb
generation_num_beams:
value: 5
generation_max_length:
value: 256
length_penalty:
values:
- 0.6
- 0.8
- 1.0
- 1.2
- 1.4
program: run_long_form_transcription.py
project: distil-whisper-long-form |