Skip to content

Commit 64ee25e

Browse files
willpriceSkafteNickirohitgr7
authored
Correct documentation examples of optimizer_step (Lightning-AI#3326)
* Correct documentation examples of `optimizer_step` Without the default arguments set in `optimizer_step` the examples fail due to the arguments not being provided * Apply suggestions from code review Co-authored-by: Rohit Gupta <rohitgr1998@gmail.com> Co-authored-by: Nicki Skafte <skaftenicki@gmail.com> Co-authored-by: Rohit Gupta <rohitgr1998@gmail.com>
1 parent 227959b commit 64ee25e

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

docs/source/optimizers.rst

+3-3
Original file line numberDiff line numberDiff line change
@@ -85,14 +85,14 @@ For example, here step optimizer A every 2 batches and optimizer B every 4 batch
8585

8686
.. testcode::
8787

88-
def optimizer_step(self, current_epoch, batch_nb, optimizer, optimizer_i, second_order_closure, on_tpu, using_native_amp, using_lbfgs):
88+
def optimizer_step(self, current_epoch, batch_nb, optimizer, optimizer_idx, second_order_closure=None, on_tpu=False, using_native_amp=False, using_lbfgs=False):
8989
optimizer.step()
9090

9191
def optimizer_zero_grad(self, current_epoch, batch_idx, optimizer, opt_idx):
9292
optimizer.zero_grad()
9393

9494
# Alternating schedule for optimizer steps (ie: GANs)
95-
def optimizer_step(self, current_epoch, batch_nb, optimizer, optimizer_i, second_order_closure, on_tpu, using_native_amp, using_lbfgs):
95+
def optimizer_step(self, current_epoch, batch_nb, optimizer, optimizer_idx, second_order_closure=None, on_tpu=False, using_native_amp=False, using_lbfgs=False):
9696
# update generator opt every 2 steps
9797
if optimizer_i == 0:
9898
if batch_nb % 2 == 0 :
@@ -113,7 +113,7 @@ Here we add a learning-rate warm up
113113
.. testcode::
114114

115115
# learning rate warm-up
116-
def optimizer_step(self, current_epoch, batch_nb, optimizer, optimizer_i, second_order_closure, on_tpu, using_native_amp, using_lbfgs):
116+
def optimizer_step(self, current_epoch, batch_nb, optimizer, optimizer_idx, second_order_closure=None, on_tpu=False, using_native_amp=False, using_lbfgs=False):
117117
# warm up lr
118118
if self.trainer.global_step < 500:
119119
lr_scale = min(1., float(self.trainer.global_step + 1) / 500.)

0 commit comments

Comments
 (0)