You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

activation.py 5.5 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231
  1. # -*- coding: utf-8 -*-
  2. # MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
  3. #
  4. # Copyright (c) 2014-2020 Megvii Inc. All rights reserved.
  5. #
  6. # Unless required by applicable law or agreed to in writing,
  7. # software distributed under the License is distributed on an
  8. # "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9. import numpy as np
  10. from ..core import Parameter
  11. from ..functional import leaky_relu, prelu, relu, sigmoid, softmax
  12. from .module import Module
  13. class Softmax(Module):
  14. r"""
  15. Applies a softmax function. Softmax is defined as:
  16. .. math::
  17. \text{Softmax}(x_{i}) = \frac{exp(x_i)}{\sum_j exp(x_j)}
  18. It is applied to an n-dimensional input Tensor and rescaling them so that the elements of the
  19. n-dimensional output Tensor lie in the range of `[0, 1]` and sum to 1.
  20. :param axis: An axis along which softmax will be applied. By default,
  21. softmax will apply along the highest ranked axis.
  22. Examples:
  23. .. testcode::
  24. import numpy as np
  25. import megengine as mge
  26. import megengine.module as M
  27. data = mge.tensor(np.array([-2,-1,0,1,2]).astype(np.float32))
  28. softmax = M.Softmax()
  29. output = softmax(data)
  30. with np.printoptions(precision=6):
  31. print(output.numpy())
  32. Outputs:
  33. .. testoutput::
  34. [0.011656 0.031685 0.086129 0.234122 0.636409]
  35. """
  36. def __init__(self, axis=None):
  37. super().__init__()
  38. self.axis = axis
  39. def forward(self, inputs):
  40. return softmax(inputs, self.axis)
  41. class Sigmoid(Module):
  42. r"""
  43. Applies the element-wise function:
  44. .. math::
  45. \text{Sigmoid}(x) = \frac{1}{1 + \exp(-x)}
  46. Examples:
  47. .. testcode::
  48. import numpy as np
  49. import megengine as mge
  50. import megengine.module as M
  51. data = mge.tensor(np.array([-2,-1,0,1,2,]).astype(np.float32))
  52. sigmoid = M.Sigmoid()
  53. output = sigmoid(data)
  54. with np.printoptions(precision=6):
  55. print(output.numpy())
  56. Outputs:
  57. .. testoutput::
  58. [0.119203 0.268941 0.5 0.731059 0.880797]
  59. """
  60. def forward(self, inputs):
  61. return sigmoid(inputs)
  62. class ReLU(Module):
  63. r"""
  64. Applies the element-wise function:
  65. .. math::
  66. \text{ReLU}(x) = \max(x, 0)
  67. Examples:
  68. .. testcode::
  69. import numpy as np
  70. import megengine as mge
  71. import megengine.module as M
  72. data = mge.tensor(np.array([-2,-1,0,1,2,]).astype(np.float32))
  73. relu = M.ReLU()
  74. output = relu(data)
  75. with np.printoptions(precision=6):
  76. print(output.numpy())
  77. Outputs:
  78. .. testoutput::
  79. [0. 0. 0. 1. 2.]
  80. """
  81. def forward(self, x):
  82. return relu(x)
  83. class PReLU(Module):
  84. r"""
  85. Applies the element-wise function:
  86. .. math::
  87. \text{PReLU}(x) = \max(0,x) + a * \min(0,x)
  88. or
  89. .. math::
  90. \text{PReLU}(x) =
  91. \begin{cases}
  92. x, & \text{ if } x \geq 0 \\
  93. ax, & \text{ otherwise }
  94. \end{cases}
  95. Here :math:`a` is a learnable parameter. When called without arguments, `PReLU()` uses
  96. a single paramter :math:`a` across all input channel. If called with `PReLU(num_of_channels)`,
  97. a seperate :math:`a` is used for each input channle.
  98. :param num_parameters: number of :math:`a` to learn, there is only two
  99. values are legitimate: 1, or the number of channels at input. Default: 1
  100. :param init: the initial value of :math:`a`. Default: 0.25
  101. Examples:
  102. .. testcode::
  103. import numpy as np
  104. import megengine as mge
  105. import megengine.module as M
  106. data = mge.tensor(np.array([-1.2, -3.7, 2.7]).astype(np.float32))
  107. prelu = M.PReLU()
  108. output = prelu(data)
  109. print(output.numpy())
  110. Outputs:
  111. .. testoutput::
  112. [-0.3 -0.925 2.7 ]
  113. """
  114. def __init__(self, num_parameters: int = 1, init: float = 0.25):
  115. super().__init__()
  116. self.num_parameters = num_parameters
  117. if num_parameters > 1:
  118. # Assume format is NCHW
  119. self.weight = Parameter(
  120. value=np.full((1, num_parameters, 1, 1), init, dtype=np.float32)
  121. )
  122. else:
  123. self.weight = Parameter(value=[init])
  124. def forward(self, inputs):
  125. assert self.weight.shape == (1,) or self.weight.shape == (
  126. 1,
  127. int(inputs.shape[1]),
  128. 1,
  129. 1,
  130. ), "invalid weight's shape"
  131. return prelu(inputs, self.weight)
  132. class LeakyReLU(Module):
  133. r"""
  134. Applies the element-wise function:
  135. .. math::
  136. \text{LeakyReLU}(x) = \max(0,x) + 0.01 * \min(0,x)
  137. or
  138. .. math::
  139. \text{LeakyReLU}(x) =
  140. \begin{cases}
  141. x, & \text{ if } x \geq 0 \\
  142. 0.01x, & \text{ otherwise }
  143. \end{cases}
  144. Examples:
  145. .. testcode::
  146. import numpy as np
  147. import megengine as mge
  148. import megengine.module as M
  149. data = mge.tensor(np.array([-8, -12, 6, 10]).astype(np.float32))
  150. leakyrelu = M.LeakyReLU()
  151. output = leakyrelu(data)
  152. print(output.numpy())
  153. Outputs:
  154. .. testoutput::
  155. [-0.08 -0.12 6. 10. ]
  156. """
  157. def __init__(self, negative_slope: float = 0.01):
  158. super().__init__()
  159. self.negative_slope = negative_slope
  160. def forward(self, inputs):
  161. return leaky_relu(inputs, self.negative_slope)

MegEngine 安装包中集成了使用 GPU 运行代码所需的 CUDA 环境,不用区分 CPU 和 GPU 版。 如果想要运行 GPU 程序,请确保机器本身配有 GPU 硬件设备并安装好驱动。 如果你想体验在云端 GPU 算力平台进行深度学习开发的感觉,欢迎访问 MegStudio 平台