-
Notifications
You must be signed in to change notification settings - Fork 24k
/
Copy pathbuild_lr_scheduler_images.py
96 lines (79 loc) · 2.82 KB
/
build_lr_scheduler_images.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
from pathlib import Path
import matplotlib
from matplotlib import pyplot as plt
import torch
import torch.optim as optim
from torch.optim.lr_scheduler import (
ChainedScheduler,
ConstantLR,
CosineAnnealingLR,
CosineAnnealingWarmRestarts,
CyclicLR,
ExponentialLR,
LambdaLR,
LinearLR,
MultiplicativeLR,
MultiStepLR,
OneCycleLR,
PolynomialLR,
ReduceLROnPlateau,
SequentialLR,
StepLR,
)
matplotlib.use("Agg")
LR_SCHEDULER_IMAGE_PATH = Path(__file__).parent / "lr_scheduler_images"
if not LR_SCHEDULER_IMAGE_PATH.exists():
LR_SCHEDULER_IMAGE_PATH.mkdir()
model = torch.nn.Linear(10, 1)
optimizer = optim.SGD(model.parameters(), lr=0.05)
num_epochs = 100
scheduler1 = ConstantLR(optimizer, factor=0.1, total_iters=num_epochs // 5)
scheduler2 = ExponentialLR(optimizer, gamma=0.9)
schedulers = [
(lambda opt: LambdaLR(opt, lr_lambda=lambda epoch: epoch // 30)),
(lambda opt: MultiplicativeLR(opt, lr_lambda=lambda epoch: 0.95)),
(lambda opt: StepLR(opt, step_size=30, gamma=0.1)),
(lambda opt: MultiStepLR(opt, milestones=[30, 80], gamma=0.1)),
(lambda opt: ConstantLR(opt, factor=0.5, total_iters=40)),
(lambda opt: LinearLR(opt, start_factor=0.05, total_iters=40)),
(lambda opt: ExponentialLR(opt, gamma=0.95)),
(lambda opt: PolynomialLR(opt, total_iters=num_epochs / 2, power=0.9)),
(lambda opt: CosineAnnealingLR(opt, T_max=num_epochs)),
(lambda opt: CosineAnnealingWarmRestarts(opt, T_0=20)),
(lambda opt: CyclicLR(opt, base_lr=0.01, max_lr=0.1, step_size_up=10)),
(lambda opt: OneCycleLR(opt, max_lr=0.01, epochs=10, steps_per_epoch=10)),
(lambda opt: ReduceLROnPlateau(opt, mode="min")),
(lambda opt: ChainedScheduler([scheduler1, scheduler2])),
(
lambda opt: SequentialLR(
opt, schedulers=[scheduler1, scheduler2], milestones=[num_epochs // 5]
)
),
]
def plot_function(scheduler):
plt.clf()
plt.grid(color="k", alpha=0.2, linestyle="--")
lrs = []
optimizer.param_groups[0]["lr"] = 0.05
scheduler = scheduler(optimizer)
plot_path = LR_SCHEDULER_IMAGE_PATH / f"{scheduler.__class__.__name__}.png"
if plot_path.exists():
return
for _ in range(num_epochs):
lrs.append(optimizer.param_groups[0]["lr"])
if isinstance(scheduler, ReduceLROnPlateau):
val_loss = torch.randn(1).item()
scheduler.step(val_loss)
else:
scheduler.step()
plt.plot(range(num_epochs), lrs)
plt.title(f"Learning Rate: {scheduler.__class__.__name__}")
plt.xlabel("Epoch")
plt.ylabel("Learning Rate")
plt.xlim([0, num_epochs])
plt.savefig(plot_path)
print(
f"Saved learning rate scheduler image for {scheduler.__class__.__name__} at {plot_path}"
)
for scheduler in schedulers:
plot_function(scheduler)