File size: 671 Bytes
49d1b76 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 |
model_name: google/gemma-2-2b
out_dir: pretrained_model/models
precision: bf16-mixed
initial_checkpoint_dir: google/gemma-2-2b
resume: false
data:
class_path: litgpt.data.LitData
init_args:
data_path: data
seed: 42
num_workers: 8
train:
save_interval: 1000
log_interval: 1
global_batch_size: 4
micro_batch_size: 1
lr_warmup_steps: 2000
max_tokens: 156800708
max_seq_length: 2048
tie_embeddings: false
max_norm: 1.0
min_lr: 4.0e-05
eval:
interval: 1000
max_iters: 100
initial_validation: false
final_validation: true
optimizer: AdamW
devices: auto
num_nodes: 1
tokenizer_dir: google/gemma-2-2b
logger_name: tensorboard
seed: 42
|