chore(doc): clarify micro_batch_size (#1579) [skip ci]

This commit is contained in:
NanoCode012
2024-05-01 00:33:53 +09:00
committed by GitHub
parent 5294653a2d
commit 1aeece6e24

View File

@@ -268,6 +268,7 @@ torch_compile_backend: # Optional[str]
# If greater than 1, backpropagation will be skipped and the gradients will be accumulated for the given number of steps.
gradient_accumulation_steps: 1
# The number of samples to include in each batch. This is the number of samples sent to each GPU.
# Batch size per gpu = micro_batch_size * gradient_accumulation_steps
micro_batch_size: 2
eval_batch_size:
num_epochs: 4