You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

normalization.py 4.2 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130
  1. import numpy as np
  2. import megengine as mge
  3. import megengine.functional as F
  4. from megengine import Parameter
  5. from .init import ones_, zeros_
  6. from .module import Module
  7. class GroupNorm(Module):
  8. """Simple implementation of GroupNorm. Only support 4d tensor now.
  9. Reference: https://arxiv.org/pdf/1803.08494.pdf.
  10. """
  11. def __init__(self, num_groups, num_channels, eps=1e-5, affine=True, **kwargs):
  12. super().__init__(**kwargs)
  13. assert num_channels % num_groups == 0
  14. self.num_groups = num_groups
  15. self.num_channels = num_channels
  16. self.eps = eps
  17. self.affine = affine
  18. if self.affine:
  19. self.weight = Parameter(np.ones(num_channels, dtype=np.float32))
  20. self.bias = Parameter(np.zeros(num_channels, dtype=np.float32))
  21. else:
  22. self.weight = None
  23. self.bias = None
  24. self.reset_parameters()
  25. def reset_parameters(self):
  26. if self.affine:
  27. ones_(self.weight)
  28. zeros_(self.bias)
  29. def forward(self, x):
  30. x = F.nn.group_norm(
  31. x, self.num_groups, self.affine, self.weight, self.bias, self.eps
  32. )
  33. return x
  34. def _module_info_string(self) -> str:
  35. s = (
  36. "groups={num_groups}, channels={num_channels}, "
  37. "eps={eps}, affine={affine}"
  38. )
  39. return s.format(**self.__dict__)
  40. class InstanceNorm(Module):
  41. """Simple implementation of InstanceNorm. Only support 4d tensor now.
  42. Reference: https://arxiv.org/abs/1607.08022.
  43. Note that InstanceNorm equals using GroupNome with num_groups=num_channels.
  44. """
  45. def __init__(self, num_channels, eps=1e-05, affine=True, **kwargs):
  46. super().__init__(**kwargs)
  47. self.num_channels = num_channels
  48. self.eps = eps
  49. self.affine = affine
  50. if self.affine:
  51. self.weight = Parameter(np.ones(num_channels, dtype="float32"))
  52. self.bias = Parameter(np.zeros(num_channels, dtype="float32"))
  53. else:
  54. self.weight = None
  55. self.bias = None
  56. self.reset_parameters()
  57. def reset_parameters(self):
  58. if self.affine:
  59. ones_(self.weight)
  60. zeros_(self.bias)
  61. def forward(self, x):
  62. N, C, H, W = x.shape
  63. format = x.format
  64. assert C == self.num_channels
  65. x = x.reshape(N, C, -1)
  66. mean = x.mean(axis=2, keepdims=True)
  67. var = (x ** 2).mean(axis=2, keepdims=True) - mean * mean
  68. x = (x - mean) / F.sqrt(var + self.eps)
  69. x = x.reshape(N, C, H, W)
  70. if self.affine:
  71. x = self.weight.reshape(1, -1, 1, 1) * x + self.bias.reshape(1, -1, 1, 1)
  72. # FIXME(czh): remove this after making it a builtin op.
  73. if format == "nhwc":
  74. x = mge.amp.convert_tensor_format(x, inplace=False)
  75. return x
  76. def _module_info_string(self) -> str:
  77. s = "channels={num_channels}, eps={eps}, affine={affine}"
  78. return s.format(**self.__dict__)
  79. class LayerNorm(Module):
  80. """Simple implementation of LayerNorm. Support tensor of any shape as input.
  81. Reference: https://arxiv.org/pdf/1803.08494.pdf.
  82. """
  83. def __init__(self, normalized_shape, eps=1e-05, affine=True, **kwargs):
  84. super().__init__(**kwargs)
  85. if isinstance(normalized_shape, int):
  86. normalized_shape = (normalized_shape,)
  87. self.normalized_shape = tuple(normalized_shape)
  88. self.eps = eps
  89. self.affine = affine
  90. if self.affine:
  91. self.weight = Parameter(np.ones(self.normalized_shape, dtype="float32"))
  92. self.bias = Parameter(np.zeros(self.normalized_shape, dtype="float32"))
  93. else:
  94. self.weight = None
  95. self.bias = None
  96. self.reset_parameters()
  97. def reset_parameters(self):
  98. if self.affine:
  99. ones_(self.weight)
  100. zeros_(self.bias)
  101. def forward(self, x):
  102. x = F.nn.layer_norm(
  103. x, self.normalized_shape, self.affine, self.weight, self.bias, self.eps
  104. )
  105. return x
  106. def _module_info_string(self) -> str:
  107. s = "normalized_shape={normalized_shape}, eps={eps}, affine={affine}"
  108. return s.format(**self.__dict__)