You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

multi_step_lr.py 2.6 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475
  1. # -*- coding: utf-8 -*-
  2. # MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
  3. #
  4. # Copyright (c) 2014-2020 Megvii Inc. All rights reserved.
  5. #
  6. # Unless required by applicable law or agreed to in writing,
  7. # software distributed under the License is distributed on an
  8. # "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9. from bisect import bisect_right
  10. from typing import Iterable as Iter
  11. from .distributed_optimizer import DistributedOptimizer
  12. from .lr_scheduler import LRScheduler
  13. class MultiStepLR(LRScheduler):
  14. r"""Decays the learning rate of each parameter group by gamma once the
  15. number of epoch reaches one of the milestones.
  16. :param optimizer: Wrapped optimizer.
  17. :param milestones (list): List of epoch indices. Must be increasing.
  18. :param gamma (float): Multiplicative factor of learning rate decay. Default: 0.1.
  19. :param current_epoch: The index of current epoch. Default: -1.
  20. """
  21. def __init__(
  22. self,
  23. optimizer: DistributedOptimizer,
  24. milestones: Iter[int],
  25. gamma: float = 0.1,
  26. current_epoch: int = -1,
  27. ):
  28. if not list(milestones) == sorted(milestones):
  29. raise ValueError(
  30. "Milestones should be a list of increasing integers. Got {}".format(
  31. milestones
  32. )
  33. )
  34. self.milestones = milestones
  35. self.gamma = gamma
  36. super().__init__(optimizer, current_epoch)
  37. def state_dict(self):
  38. r"""Returns the state of the scheduler as a :class:`dict`.
  39. It contains an entry for every variable in self.__dict__ which
  40. is not the optimizer.
  41. """
  42. return {
  43. key: value
  44. for key, value in self.__dict__.items()
  45. if key in ["milestones", "gamma", "current_epoch"]
  46. }
  47. def load_state_dict(self, state_dict):
  48. r"""Loads the schedulers state.
  49. :param state_dict (dict): scheduler state.
  50. """
  51. tmp_dict = {}
  52. for key in ["milestones", "gamma", "current_epoch"]:
  53. if not key in state_dict.keys():
  54. raise KeyError(
  55. "key '{}'' is not specified in "
  56. "state_dict when loading state dict".format(key)
  57. )
  58. tmp_dict[key] = state_dict[key]
  59. self.__dict__.update(tmp_dict)
  60. def get_lr(self):
  61. return [
  62. base_lr * self.gamma ** bisect_right(self.milestones, self.current_epoch)
  63. for base_lr in self.base_lrs
  64. ]

MegEngine 安装包中集成了使用 GPU 运行代码所需的 CUDA 环境,不用区分 CPU 和 GPU 版。 如果想要运行 GPU 程序,请确保机器本身配有 GPU 硬件设备并安装好驱动。 如果你想体验在云端 GPU 算力平台进行深度学习开发的感觉,欢迎访问 MegStudio 平台