You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

normalization.py 4.3 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135
  1. import numpy as np
  2. import megengine.functional as F
  3. from megengine import Parameter
  4. from .init import ones_, zeros_
  5. from .module import Module
  6. class GroupNorm(Module):
  7. """Simple implementation of GroupNorm. Only support 4d tensor now.
  8. Reference: https://arxiv.org/pdf/1803.08494.pdf.
  9. """
  10. def __init__(self, num_groups, num_channels, eps=1e-5, affine=True, **kwargs):
  11. super().__init__(**kwargs)
  12. assert num_channels % num_groups == 0
  13. self.num_groups = num_groups
  14. self.num_channels = num_channels
  15. self.eps = eps
  16. self.affine = affine
  17. if self.affine:
  18. self.weight = Parameter(np.ones(num_channels, dtype=np.float32))
  19. self.bias = Parameter(np.zeros(num_channels, dtype=np.float32))
  20. else:
  21. self.weight = None
  22. self.bias = None
  23. self.reset_parameters()
  24. def reset_parameters(self):
  25. if self.affine:
  26. ones_(self.weight)
  27. zeros_(self.bias)
  28. def forward(self, x):
  29. N, C, H, W = x.shape
  30. assert C == self.num_channels
  31. x = x.reshape(N, self.num_groups, -1)
  32. mean = x.mean(axis=2, keepdims=True)
  33. var = (x * x).mean(axis=2, keepdims=True) - mean * mean
  34. x = (x - mean) / F.sqrt(var + self.eps)
  35. x = x.reshape(N, C, H, W)
  36. if self.affine:
  37. x = self.weight.reshape(1, -1, 1, 1) * x + self.bias.reshape(1, -1, 1, 1)
  38. return x
  39. def _module_info_string(self) -> str:
  40. s = (
  41. "groups={num_groups}, channels={num_channels}, "
  42. "eps={eps}, affine={affine}"
  43. )
  44. return s.format(**self.__dict__)
  45. class InstanceNorm(Module):
  46. """Simple implementation of InstanceNorm. Only support 4d tensor now.
  47. Reference: https://arxiv.org/abs/1607.08022.
  48. Note that InstanceNorm equals using GroupNome with num_groups=num_channels.
  49. """
  50. def __init__(self, num_channels, eps=1e-05, affine=True, **kwargs):
  51. super().__init__(**kwargs)
  52. self.num_channels = num_channels
  53. self.eps = eps
  54. self.affine = affine
  55. if self.affine:
  56. self.weight = Parameter(np.ones(num_channels, dtype="float32"))
  57. self.bias = Parameter(np.zeros(num_channels, dtype="float32"))
  58. else:
  59. self.weight = None
  60. self.bias = None
  61. self.reset_parameters()
  62. def reset_parameters(self):
  63. if self.affine:
  64. ones_(self.weight)
  65. zeros_(self.bias)
  66. def forward(self, x):
  67. N, C, H, W = x.shape
  68. assert C == self.num_channels
  69. x = x.reshape(N, C, -1)
  70. mean = x.mean(axis=2, keepdims=True)
  71. var = (x ** 2).mean(axis=2, keepdims=True) - mean * mean
  72. x = (x - mean) / F.sqrt(var + self.eps)
  73. x = x.reshape(N, C, H, W)
  74. if self.affine:
  75. x = self.weight.reshape(1, -1, 1, 1) * x + self.bias.reshape(1, -1, 1, 1)
  76. return x
  77. def _module_info_string(self) -> str:
  78. s = "channels={num_channels}, eps={eps}, affine={affine}"
  79. return s.format(**self.__dict__)
  80. class LayerNorm(Module):
  81. """Simple implementation of LayerNorm. Support tensor of any shape as input.
  82. Reference: https://arxiv.org/pdf/1803.08494.pdf.
  83. """
  84. def __init__(self, normalized_shape, eps=1e-05, affine=True, **kwargs):
  85. super().__init__(**kwargs)
  86. if isinstance(normalized_shape, int):
  87. normalized_shape = (normalized_shape,)
  88. self.normalized_shape = tuple(normalized_shape)
  89. self.eps = eps
  90. self.affine = affine
  91. if self.affine:
  92. self.weight = Parameter(np.ones(self.normalized_shape, dtype="float32"))
  93. self.bias = Parameter(np.zeros(self.normalized_shape, dtype="float32"))
  94. else:
  95. self.weight = None
  96. self.bias = None
  97. self.reset_parameters()
  98. def reset_parameters(self):
  99. if self.affine:
  100. ones_(self.weight)
  101. zeros_(self.bias)
  102. def forward(self, x):
  103. x = F.nn.layer_norm(
  104. x, self.normalized_shape, self.affine, self.weight, self.bias, self.eps
  105. )
  106. return x
  107. def _module_info_string(self) -> str:
  108. s = "normalized_shape={normalized_shape}, eps={eps}, affine={affine}"
  109. return s.format(**self.__dict__)