From af68465753e1dd34607608008de3585aadead3fe Mon Sep 17 00:00:00 2001 From: Sammy-1904 Date: Tue, 2 Dec 2025 19:24:14 +0530 Subject: [PATCH] Improve LR scheduler documentation in transfer learning tutorial --- beginner_source/transfer_learning_tutorial.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/beginner_source/transfer_learning_tutorial.py b/beginner_source/transfer_learning_tutorial.py index 8a344d3d88a..b27f32cf681 100644 --- a/beginner_source/transfer_learning_tutorial.py +++ b/beginner_source/transfer_learning_tutorial.py @@ -142,8 +142,12 @@ def imshow(inp, title=None): # - Scheduling the learning rate # - Saving the best model # -# In the following, parameter ``scheduler`` is an LR scheduler object from -# ``torch.optim.lr_scheduler``. +# In this tutorial, `scheduler` is an LR scheduler object (e.g. StepLR). +# For schedulers like StepLR, the recommended usage is: +# optimizer.step() followed by scheduler.step() +# which is why `scheduler.step()` is called once at the end of each epoch, +# after all optimizer steps for that epoch are complete. + def train_model(model, criterion, optimizer, scheduler, num_epochs=25): @@ -185,7 +189,8 @@ def train_model(model, criterion, optimizer, scheduler, num_epochs=25): _, preds = torch.max(outputs, 1) loss = criterion(outputs, labels) - # backward + optimize only if in training phase + # backward pass + optimizer step (only in training phase) + if phase == 'train': loss.backward() optimizer.step()