Skip to content

Commit 7fefe60

Browse files
sihyeong671vfdev-5
andauthored
Change LinearCyclicalScheduler to triangle wave to sawtooth wave (#3186)
* feat: change linearcyclicalscheduler triangle to sawtooth * feat: change linearcyclicalscheduler add optional flag for triangle and sawtooth wave * feat: fix LinearCyclicalScheduler - move value error location to __init__ - rename use_legacy to use_sawtooth * test: add LinearCyclicalScheduler using use_sawtooth option test * Revert "test: add LinearCyclicalScheduler using use_sawtooth option test" This reverts commit 6d10fde. * feat: Modify LinearCyclicalScheduler - add monotonic variable - check Error case * test: add test case - warmup_duration, monotonic Value Error Check - add test case for linear cyclical scheduler with warmup_duration * fix: remove #noqa E501, split sentence * fix: remove comment, emptyline. docs: add docstring in LinearCyclicalScheduler * docs: modify to proper docstring * Update ignite/handlers/param_scheduler.py --------- Co-authored-by: vfdev <vfdev.5@gmail.com>
1 parent 2bf7c8b commit 7fefe60

File tree

2 files changed

+123
-1
lines changed

2 files changed

+123
-1
lines changed

ignite/handlers/param_scheduler.py

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -390,6 +390,9 @@ class LinearCyclicalScheduler(CyclicalScheduler):
390390
save_history: whether to log the parameter values to
391391
`engine.state.param_history`, (default=False).
392392
param_group_index: optimizer's parameters group to use.
393+
monotonic: whether to schedule only one half of the cycle: descending or ascending.
394+
If True, this argument can not be used together with ``warmup_duration``.
395+
(default=False).
393396
394397
Note:
395398
If the scheduler is bound to an 'ITERATION_*' event, 'cycle_size' should
@@ -465,12 +468,28 @@ def print_lr():
465468
466469
.. versionchanged:: 0.4.13
467470
Added cyclic warm-up to the scheduler using ``warmup_duration``.
471+
472+
.. versionchanged:: 0.5.0
473+
Added monotonic argument.
468474
"""
469475

476+
def __init__(self, *args: Any, monotonic: bool = False, **kwagrs: Any):
477+
super(LinearCyclicalScheduler, self).__init__(*args, **kwagrs)
478+
self.monotonic = monotonic
479+
if self.warmup_duration > 0 and not self.monotonic:
480+
raise ValueError(
481+
"Invalid combination when warmup_duration > 0 and monotonic=False, "
482+
"please use either set warmup_duration=0 or monotonic=True"
483+
)
484+
470485
def get_param(self) -> float:
471486
"""Method to get current optimizer's parameter value"""
472487
cycle_progress = self.event_index / self.cycle_size
473-
return self.end_value + (self.start_value - self.end_value) * abs(cycle_progress - 0.5) * 2
488+
489+
if self.monotonic:
490+
return self.start_value + (self.end_value - self.start_value) * cycle_progress
491+
else:
492+
return self.end_value + (self.start_value - self.end_value) * abs(cycle_progress - 0.5) * 2
474493

475494

476495
class CosineAnnealingScheduler(CyclicalScheduler):

tests/ignite/handlers/test_param_scheduler.py

Lines changed: 103 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,13 @@ def test_linear_scheduler_asserts():
6868
with pytest.raises(ValueError, match=r"Argument cycle_size should be positive and larger than 1"):
6969
LinearCyclicalScheduler(optimizer, "lr", 1, 0, cycle_size=1)
7070

71+
with pytest.raises(
72+
ValueError,
73+
match=r"Invalid combination when warmup_duration > 0 and monotonic=False, "
74+
r"please use either set warmup_duration=0 or monotonic=True",
75+
):
76+
LinearCyclicalScheduler(optimizer, "lr", 1, 0, cycle_size=2, warmup_duration=1)
77+
7178

7279
def test_linear_scheduler():
7380
tensor = torch.zeros([1], requires_grad=True)
@@ -144,6 +151,102 @@ def save_lr(engine):
144151
scheduler.load_state_dict(state_dict)
145152

146153

154+
def test_linear_scheduler_warmup_duration():
155+
tensor = torch.zeros([1], requires_grad=True)
156+
optimizer = torch.optim.SGD([tensor], lr=0.0)
157+
158+
scheduler = LinearCyclicalScheduler(optimizer, "lr", 1, 0, 10, warmup_duration=5, monotonic=True)
159+
state_dict = scheduler.state_dict()
160+
161+
def save_lr(engine):
162+
lrs.append(optimizer.param_groups[0]["lr"])
163+
164+
trainer = Engine(lambda engine, batch: None)
165+
trainer.add_event_handler(Events.ITERATION_STARTED, scheduler)
166+
trainer.add_event_handler(Events.ITERATION_COMPLETED, save_lr)
167+
lr_values_in_cycle = [
168+
1.0,
169+
0.9,
170+
0.8,
171+
0.7,
172+
0.6,
173+
0.5,
174+
0.4,
175+
0.3,
176+
0.2,
177+
0.1,
178+
0.0,
179+
0.2,
180+
0.4,
181+
0.6,
182+
0.8,
183+
1.0,
184+
0.9,
185+
0.8,
186+
0.7,
187+
0.6,
188+
]
189+
for _ in range(2):
190+
lrs = []
191+
trainer.run([0] * 10, max_epochs=2)
192+
193+
assert lrs == pytest.approx(lr_values_in_cycle)
194+
scheduler.load_state_dict(state_dict)
195+
196+
optimizer = torch.optim.SGD([tensor], lr=0)
197+
scheduler = LinearCyclicalScheduler(optimizer, "lr", 1, 0, 10, cycle_mult=2, warmup_duration=5, monotonic=True)
198+
state_dict = scheduler.state_dict()
199+
200+
trainer = Engine(lambda engine, batch: None)
201+
trainer.add_event_handler(Events.ITERATION_STARTED, scheduler)
202+
trainer.add_event_handler(Events.ITERATION_COMPLETED, save_lr)
203+
204+
for _ in range(2):
205+
lrs = []
206+
trainer.run([0] * 10, max_epochs=3)
207+
208+
assert lrs == list(
209+
map(
210+
pytest.approx,
211+
[
212+
# Cycle 1
213+
1.0,
214+
0.9,
215+
0.8,
216+
0.7,
217+
0.6,
218+
0.5,
219+
0.4,
220+
0.3,
221+
0.2,
222+
0.1,
223+
0.0,
224+
0.2,
225+
0.4,
226+
0.6,
227+
0.8,
228+
# Cycle 2
229+
1.0,
230+
0.95,
231+
0.9,
232+
0.85,
233+
0.8,
234+
0.75,
235+
0.7,
236+
0.65,
237+
0.6,
238+
0.55,
239+
0.5,
240+
0.45,
241+
0.4,
242+
0.35,
243+
0.3,
244+
],
245+
)
246+
)
247+
scheduler.load_state_dict(state_dict)
248+
249+
147250
def test_linear_scheduler_cycle_size_two():
148251
tensor = torch.zeros([1], requires_grad=True)
149252
optimizer = torch.optim.SGD([tensor], lr=0)

0 commit comments

Comments
 (0)