Skip to content
This repository was archived by the owner on Jul 1, 2024. It is now read-only.
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions classy_vision/configs/hmdb51/r3d34.json
Original file line number Diff line number Diff line change
Expand Up @@ -102,13 +102,13 @@
"schedulers": [
{
"name": "linear",
"start_lr": 0.005,
"end_lr": 0.04
"start_value": 0.005,
"end_value": 0.04
},
{
"name": "cosine",
"start_lr": 0.04,
"end_lr": 0.00004
"start_value": 0.04,
"end_value": 0.00004
}
],
"update_interval": "epoch",
Expand Down
4 changes: 2 additions & 2 deletions classy_vision/configs/kinetics400/postactivated_i3d50.json
Original file line number Diff line number Diff line change
Expand Up @@ -102,8 +102,8 @@
"param_schedulers": {
"lr": {
"name": "cosine",
"start_lr": 0.1,
"end_lr": 0.0001
"start_value": 0.1,
"end_value": 0.0001
}
},
"weight_decay": 0.0001,
Expand Down
4 changes: 2 additions & 2 deletions classy_vision/configs/kinetics400/preactivated_i3d50.json
Original file line number Diff line number Diff line change
Expand Up @@ -102,8 +102,8 @@
"param_schedulers": {
"lr": {
"name": "cosine",
"start_lr": 0.1,
"end_lr": 0.0001
"start_value": 0.1,
"end_value": 0.0001
}
},
"weight_decay": 0.0001,
Expand Down
8 changes: 4 additions & 4 deletions classy_vision/configs/ucf101/r3d34.json
Original file line number Diff line number Diff line change
Expand Up @@ -102,13 +102,13 @@
"schedulers": [
{
"name": "linear",
"start_lr": 0.005,
"end_lr": 0.04
"start_value": 0.005,
"end_value": 0.04
},
{
"name": "cosine",
"start_lr": 0.04,
"end_lr": 0.00004
"start_value": 0.04,
"end_value": 0.00004
}
],
"lengths": [0.13, 0.87],
Expand Down
1 change: 1 addition & 0 deletions classy_vision/optim/param_scheduler/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from .classy_vision_param_scheduler import ( # noqa F401
ClassyParamScheduler,
UpdateInterval,
update_interval_from_config,
)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,22 @@ class UpdateInterval(Enum):
STEP = "step"


def update_interval_from_config(
config: Dict[str, Any], default: UpdateInterval
) -> UpdateInterval:
"""Fetches the update interval from a config

Args:
config: The config for the parameter scheduler
default: The value to use if the config doesn't specify an update interval
"""
if "update_interval" not in config:
return default
if config.get("update_interval") not in ["step", "epoch"]:
raise ValueError("Choices for update interval are 'step' or 'epoch'")
return UpdateInterval[config["update_interval"].upper()]


class ClassyParamScheduler(object):
"""
Base class for Classy parameter schedulers.
Expand All @@ -33,7 +49,7 @@ class ClassyParamScheduler(object):
# To be used for comparisons with where
WHERE_EPSILON = 1e-6

def __init__(self, update_interval: UpdateInterval = UpdateInterval.EPOCH):
def __init__(self, update_interval: UpdateInterval):
"""
Constructor for ClassyParamScheduler

Expand Down
18 changes: 6 additions & 12 deletions classy_vision/optim/param_scheduler/composite_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
UpdateInterval,
build_param_scheduler,
register_param_scheduler,
update_interval_from_config,
)


Expand Down Expand Up @@ -41,25 +42,25 @@ class CompositeParamScheduler(ClassyParamScheduler):
update_interval = "step"
schedulers = [
{"name": "constant", "value": 0.42},
{"name": "cosine_decay", "start_lr": 0.42, "end_lr": 0.0001}
{"name": "cosine_decay", "start_value": 0.42, "end_value": 0.0001}
]
interval_scaling = ['rescaled', 'rescaled'],
lengths = [0.3, 0.7]

The parameter value will be 0.42 for the first [0%, 30%) of steps,
and then will cosine decay from 0.42 to 0.0001 for [30%, 100%) of
training.
The schedule is updated after every train step by default.
"""

def __init__(
self,
schedulers: Sequence[ClassyParamScheduler],
lengths: Sequence[float],
update_interval: UpdateInterval,
interval_scaling: Sequence[IntervalScaling],
update_interval: UpdateInterval = UpdateInterval.STEP,
):
super().__init__()
self.update_interval = update_interval
super().__init__(update_interval=update_interval)
self._lengths = lengths
self._schedulers = schedulers
self._interval_scaling = interval_scaling
Expand Down Expand Up @@ -89,13 +90,6 @@ def from_config(cls, config: Dict[str, Any]) -> "CompositeParamScheduler":
), "The sum of all values in lengths must be 1"
if sum(config["lengths"]) != 1.0:
config["lengths"][-1] = 1.0 - sum(config["lengths"][:-1])
update_interval = UpdateInterval.STEP
if "update_interval" in config:
assert config["update_interval"] in {
"step",
"epoch",
}, "Choices for update interval are 'step' or 'epoch'"
update_interval = UpdateInterval[config["update_interval"].upper()]
interval_scaling = []
if "interval_scaling" in config:
assert len(config["schedulers"]) == len(
Expand All @@ -119,7 +113,7 @@ def from_config(cls, config: Dict[str, Any]) -> "CompositeParamScheduler":
build_param_scheduler(scheduler) for scheduler in config["schedulers"]
],
lengths=config["lengths"],
update_interval=update_interval,
update_interval=update_interval_from_config(config, UpdateInterval.STEP),
interval_scaling=interval_scaling,
)

Expand Down
4 changes: 2 additions & 2 deletions classy_vision/optim/param_scheduler/constant_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from typing import Any, Dict

from . import ClassyParamScheduler, register_param_scheduler
from . import ClassyParamScheduler, UpdateInterval, register_param_scheduler


@register_param_scheduler("constant")
Expand All @@ -16,7 +16,7 @@ class ConstantParamScheduler(ClassyParamScheduler):
"""

def __init__(self, value: float):
super().__init__()
super().__init__(update_interval=UpdateInterval.EPOCH)
self._value = value

@classmethod
Expand Down
38 changes: 26 additions & 12 deletions classy_vision/optim/param_scheduler/cosine_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,15 @@
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

import logging
import math
from typing import Any, Dict

from . import ClassyParamScheduler, register_param_scheduler
from . import (
ClassyParamScheduler,
UpdateInterval,
register_param_scheduler,
update_interval_from_config,
)


@register_param_scheduler("cosine")
Expand All @@ -18,19 +22,25 @@ class CosineParamScheduler(ClassyParamScheduler):
//arxiv.org/pdf/1608.03983.pdf>`_.
Can be used for either cosine decay or cosine warmup schedules based on
start and end values.
The schedule is updated after every train step by default.

Example:

.. code-block:: python

start_lr: 0.1
end_lr: 0.0001
start_value: 0.1
end_value: 0.0001
"""

def __init__(self, start_lr: float, end_lr: float):
super().__init__()
self._start_lr = start_lr
self._end_lr = end_lr
def __init__(
self,
start_value: float,
end_value: float,
update_interval: UpdateInterval = UpdateInterval.STEP,
):
super().__init__(update_interval=update_interval)
self._start_value = start_value
self._end_value = end_value

@classmethod
def from_config(cls, config: Dict[str, Any]) -> "CosineParamScheduler":
Expand All @@ -44,12 +54,16 @@ def from_config(cls, config: Dict[str, Any]) -> "CosineParamScheduler":
A CosineParamScheduler instance.
"""
assert (
"start_lr" in config and "end_lr" in config
), "Cosine scheduler requires a start_lr and a end_lr"
"start_value" in config and "end_value" in config
), "Cosine scheduler requires a start_value and a end_value"

return cls(start_lr=config["start_lr"], end_lr=config["end_lr"])
return cls(
start_value=config["start_value"],
end_value=config["end_value"],
update_interval=update_interval_from_config(config, UpdateInterval.STEP),
)

def __call__(self, where: float):
return self._end_lr + 0.5 * (self._start_lr - self._end_lr) * (
return self._end_value + 0.5 * (self._start_value - self._end_value) * (
1 + math.cos(math.pi * where)
)
38 changes: 27 additions & 11 deletions classy_vision/optim/param_scheduler/linear_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,28 +6,39 @@

from typing import Any, Dict

from . import ClassyParamScheduler, register_param_scheduler
from . import (
ClassyParamScheduler,
UpdateInterval,
register_param_scheduler,
update_interval_from_config,
)


@register_param_scheduler("linear")
class LinearParamScheduler(ClassyParamScheduler):
"""
Linearly interpolates parameter between ``start_lr`` and ``end_lr``.
Linearly interpolates parameter between ``start_value`` and ``end_value``.
Can be used for either warmup or decay based on start and end values.
The schedule is updated after every train step by default.

Example:

.. code-block:: python

start_lr: 0.0001
end_lr: 0.01
start_value: 0.0001
end_value: 0.01
Corresponds to a linear increasing schedule with values in [0.0001, 0.01)
"""

def __init__(self, start_lr: float, end_lr: float):
super().__init__()
self._start_lr = start_lr
self._end_lr = end_lr
def __init__(
self,
start_value: float,
end_value: float,
update_interval: UpdateInterval = UpdateInterval.STEP,
):
super().__init__(update_interval=update_interval)
self._start_value = start_value
self._end_value = end_value

@classmethod
def from_config(cls, config: Dict[str, Any]) -> "LinearParamScheduler":
Expand All @@ -41,10 +52,15 @@ def from_config(cls, config: Dict[str, Any]) -> "LinearParamScheduler":
A LinearParamScheduler instance.
"""
assert (
"start_lr" in config and "end_lr" in config
"start_value" in config and "end_value" in config
), "Linear scheduler requires a start and a end"
return cls(start_lr=config["start_lr"], end_lr=config["end_lr"])

return cls(
start_value=config["start_value"],
end_value=config["end_value"],
update_interval=update_interval_from_config(config, UpdateInterval.STEP),
)

def __call__(self, where: float):
# interpolate between start and end values
return self._end_lr * where + self._start_lr * (1 - where)
return self._end_value * where + self._start_value * (1 - where)
15 changes: 11 additions & 4 deletions classy_vision/optim/param_scheduler/multi_step_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,20 @@

from classy_vision.generic.util import is_pos_int

from . import ClassyParamScheduler, UpdateInterval, register_param_scheduler
from . import (
ClassyParamScheduler,
UpdateInterval,
register_param_scheduler,
update_interval_from_config,
)


@register_param_scheduler("multistep")
class MultiStepParamScheduler(ClassyParamScheduler):
"""
Takes a predefined schedule for a param value, and a list of epochs
which stand for the upper boundary (excluded) of each range.
The schedule is updated after every train epoch by default.

Example:

Expand All @@ -37,10 +43,10 @@ def __init__(
self,
values,
num_epochs: int,
update_interval: UpdateInterval,
milestones: Optional[List[int]] = None,
update_interval: UpdateInterval = UpdateInterval.EPOCH,
):
super().__init__(update_interval)
super().__init__(update_interval=update_interval)
self._param_schedule = values
self._num_epochs = num_epochs
self._milestones = milestones
Expand Down Expand Up @@ -96,11 +102,12 @@ def from_config(cls, config: Dict[str, Any]) -> "MultiStepParamScheduler":
"Non-Equi Step scheduler requires a list of %d epochs"
% (len(config["values"]) - 1)
)

return cls(
values=config["values"],
num_epochs=config["num_epochs"],
update_interval=UpdateInterval(config.get("update_interval", "epoch")),
milestones=milestones,
update_interval=update_interval_from_config(config, UpdateInterval.EPOCH),
)

def __call__(self, where: float):
Expand Down
Loading