Nanobit commited on
Commit
1aeece6
1 Parent(s): 5294653

chore(doc): clarify micro_batch_size (#1579) [skip ci]

Browse files
Files changed (1) hide show
  1. docs/config.qmd +1 -0
docs/config.qmd CHANGED
@@ -268,6 +268,7 @@ torch_compile_backend: # Optional[str]
268
  # If greater than 1, backpropagation will be skipped and the gradients will be accumulated for the given number of steps.
269
  gradient_accumulation_steps: 1
270
  # The number of samples to include in each batch. This is the number of samples sent to each GPU.
 
271
  micro_batch_size: 2
272
  eval_batch_size:
273
  num_epochs: 4
 
268
  # If greater than 1, backpropagation will be skipped and the gradients will be accumulated for the given number of steps.
269
  gradient_accumulation_steps: 1
270
  # The number of samples to include in each batch. This is the number of samples sent to each GPU.
271
+ # Batch size per gpu = micro_batch_size * gradient_accumulation_steps
272
  micro_batch_size: 2
273
  eval_batch_size:
274
  num_epochs: 4