Browse Source

feat(mge/functional): add softplus function

GitOrigin-RevId: 26c1ab7416
tags/v0.5.0
Megvii Engine Team 5 years ago
parent
commit
5eca4da323
3 changed files with 37 additions and 1 deletions
  1. +1
    -0
      python_module/megengine/functional/__init__.py
  2. +20
    -1
      python_module/megengine/functional/nn.py
  3. +16
    -0
      python_module/test/unit/functional/test_functional.py

+ 1
- 0
python_module/megengine/functional/__init__.py View File

@@ -72,6 +72,7 @@ from .nn import (
roi_align,
roi_pooling,
softmax,
softplus,
warp_perspective,
)
from .quantized import conv_bias_activation


+ 20
- 1
python_module/megengine/functional/nn.py View File

@@ -18,7 +18,8 @@ from ..jit import barrier, mark_impure
from ..random import uniform
from ..utils.types import _pair, _pair_nonzero
from .debug_param import get_conv_execution_strategy
from .tensor import concat
from .elemwise import exp, log
from .tensor import concat, where
from .utils import _decide_comp_node_and_comp_graph


@@ -268,6 +269,24 @@ def leaky_relu(inp: Tensor, negative_slope: float = 0.01) -> Tensor:


@wrap_io_tensor
def softplus(inp: Tensor, beta: float = 1, threshold: float = 20) -> Tensor:
r"""
Performs the elementwise function:
.. math::
\mathsf{softplus}(x) = \log(1+\exp(\beta x)) / \beta.

For numerical stability the identity function is used when :math:`\beta x > \textrm{threshold}`.

"""
mask = beta * inp <= threshold
out = log(1 + exp(beta * inp)) / beta
out = where(mask, out, inp)
return out


@wrap_io_tensor
def flatten(inp: Tensor, start_axis: int = 0, end_axis: int = -1) -> Tensor:
r"""
Reshapes the tensor by flattening the sub-tensor from dimension ``start_axis`` to dimension ``end_axis``.


+ 16
- 0
python_module/test/unit/functional/test_functional.py View File

@@ -439,3 +439,19 @@ def test_conv_bias():

run(10, 36, 8, 46, 26, 2, 2, 2, 1, 1, 2, False, "RELU")
run(10, 36, 8, 46, 26, 2, 2, 2, 1, 1, 2, True, "RELU")


def test_softplus():
x = np.arange(1000).astype(np.float32)
out = F.softplus(tensor(x))
mask = x <= 20
with np.errstate(over="ignore"):
expected = np.where(mask, np.log(1 + np.exp(x)), x)
assertTensorClose(out, expected)
beta = 2
out = F.softplus(tensor(x), beta=beta, threshold=30)
mask = beta * x <= 30
# ignore overflow
with np.errstate(over="ignore"):
expected = np.where(mask, np.log(1 + np.exp(x * beta)) / beta, x)
assertTensorClose(out, expected)

Loading…
Cancel
Save