File size: 501 Bytes
0ba62b0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
<pre>
from accelerate import Accelerator
accelerator = Accelerator(
+    gradient_accumulation_steps=2,
)
dataloader, model, optimizer scheduler = accelerator.prepare(
        dataloader, model, optimizer, scheduler
)

for batch in dataloader:
+  with accelerator.accumulate(model):
      optimizer.zero_grad()
      inputs, targets = batch
      outputs = model(inputs)
      loss = loss_function(outputs, targets)
      accelerator.backward(loss)
      optimizer.step()
      scheduler.step()
</pre>