Files
pytorch/docs/source/scripts/build_lr_scheduler_images.py
zeshengzong 5eebcb991a Add scripts to generate plots of LRSchedulers (#149189)
Fixes #92007

## Changes

- Add script to generate plots for `lr_scheduler`
- Add plots to `lr_scheduler` docs
- Add example section if it missing in `lr_scheduler` docs

## Test Result

### LambdaLR

![image](https://github.com/user-attachments/assets/37fc0894-e2ec-48f2-a2d6-3514e51e1ea2)

### MultiplicativeLR

![image](https://github.com/user-attachments/assets/2122b3a0-a4ce-42c7-bb45-559c1fc73e0f)

### StepLR

![image](https://github.com/user-attachments/assets/47bc9d96-4b60-4586-a000-f213583bbe8f)

### MultiStepLR

![image](https://github.com/user-attachments/assets/c822b849-d5be-4b94-aa7a-0017a2c9ff15)

### ConstantLR

![image](https://github.com/user-attachments/assets/83107cdd-7b00-44a6-b09d-e8ee849b4a12)

### LinearLR

![image](https://github.com/user-attachments/assets/60190105-691a-4101-8966-5b0c396093a4)

### ExponentialLR

![image](https://github.com/user-attachments/assets/dfcbcbca-89e5-4a2f-b1bd-33e25d2405ec)

### PolynomialLR

![image](https://github.com/user-attachments/assets/7c3d4fce-c846-40a0-b62e-f3e81c7e08bd)

### CosineAnnealingLR

![image](https://github.com/user-attachments/assets/26712769-dde9-4faa-b61b-e23c51daef50)

### ChainedScheduler

![image](https://github.com/user-attachments/assets/20734a8b-e939-424f-b45a-773f86f020b1)

### SequentialLR

![image](https://github.com/user-attachments/assets/2cd3ed67-2a0a-4c42-9ad2-e0be090d3751)

### ReduceLROnPlateau

![image](https://github.com/user-attachments/assets/b77f641e-4810-450d-b2cd-8b3f134ea188)

### CyclicLR

![image](https://github.com/user-attachments/assets/29b8666f-41b3-45e4-9159-6929074e6108)

### OneCycleLR

![image](https://github.com/user-attachments/assets/d5b683ef-41e8-4ca8-9fe8-0f1e6b433866)

### CosineAnnealingWarmRestarts

![image](https://github.com/user-attachments/assets/1d45ea80-dea8-494d-a8ab-e9cfc94c55d6)

Pull Request resolved: https://github.com/pytorch/pytorch/pull/149189
Approved by: https://github.com/janeyx99
2025-04-14 09:53:38 +00:00

97 lines
2.8 KiB
Python

from pathlib import Path
import matplotlib
from matplotlib import pyplot as plt
import torch
import torch.optim as optim
from torch.optim.lr_scheduler import (
ChainedScheduler,
ConstantLR,
CosineAnnealingLR,
CosineAnnealingWarmRestarts,
CyclicLR,
ExponentialLR,
LambdaLR,
LinearLR,
MultiplicativeLR,
MultiStepLR,
OneCycleLR,
PolynomialLR,
ReduceLROnPlateau,
SequentialLR,
StepLR,
)
matplotlib.use("Agg")
LR_SCHEDULER_IMAGE_PATH = Path(__file__).parent / "lr_scheduler_images"
if not LR_SCHEDULER_IMAGE_PATH.exists():
LR_SCHEDULER_IMAGE_PATH.mkdir()
model = torch.nn.Linear(10, 1)
optimizer = optim.SGD(model.parameters(), lr=0.05)
num_epochs = 100
scheduler1 = ConstantLR(optimizer, factor=0.1, total_iters=num_epochs // 5)
scheduler2 = ExponentialLR(optimizer, gamma=0.9)
schedulers = [
(lambda opt: LambdaLR(opt, lr_lambda=lambda epoch: epoch // 30)),
(lambda opt: MultiplicativeLR(opt, lr_lambda=lambda epoch: 0.95)),
(lambda opt: StepLR(opt, step_size=30, gamma=0.1)),
(lambda opt: MultiStepLR(opt, milestones=[30, 80], gamma=0.1)),
(lambda opt: ConstantLR(opt, factor=0.5, total_iters=40)),
(lambda opt: LinearLR(opt, start_factor=0.05, total_iters=40)),
(lambda opt: ExponentialLR(opt, gamma=0.95)),
(lambda opt: PolynomialLR(opt, total_iters=num_epochs / 2, power=0.9)),
(lambda opt: CosineAnnealingLR(opt, T_max=num_epochs)),
(lambda opt: CosineAnnealingWarmRestarts(opt, T_0=20)),
(lambda opt: CyclicLR(opt, base_lr=0.01, max_lr=0.1, step_size_up=10)),
(lambda opt: OneCycleLR(opt, max_lr=0.01, epochs=10, steps_per_epoch=10)),
(lambda opt: ReduceLROnPlateau(opt, mode="min")),
(lambda opt: ChainedScheduler([scheduler1, scheduler2])),
(
lambda opt: SequentialLR(
opt, schedulers=[scheduler1, scheduler2], milestones=[num_epochs // 5]
)
),
]
def plot_function(scheduler):
plt.clf()
plt.grid(color="k", alpha=0.2, linestyle="--")
lrs = []
optimizer.param_groups[0]["lr"] = 0.05
scheduler = scheduler(optimizer)
plot_path = LR_SCHEDULER_IMAGE_PATH / f"{scheduler.__class__.__name__}.png"
if plot_path.exists():
return
for _ in range(num_epochs):
lrs.append(optimizer.param_groups[0]["lr"])
if isinstance(scheduler, ReduceLROnPlateau):
val_loss = torch.randn(1).item()
scheduler.step(val_loss)
else:
scheduler.step()
plt.plot(range(num_epochs), lrs)
plt.title(f"Learning Rate: {scheduler.__class__.__name__}")
plt.xlabel("Epoch")
plt.ylabel("Learning Rate")
plt.xlim([0, num_epochs])
plt.savefig(plot_path)
print(
f"Saved learning rate scheduler image for {scheduler.__class__.__name__} at {plot_path}"
)
for scheduler in schedulers:
plot_function(scheduler)