1import numpy
2
3from chainer.backends import cuda
4from chainer import function_node
5from chainer import utils
6from chainer.utils import type_check
7
8
9class HardSigmoid(function_node.FunctionNode):
10
11    """Hard-sigmoid function."""
12
13    def check_type_forward(self, in_types):
14        type_check._argname(in_types, ('x',))
15        x_type, = in_types
16
17        type_check.expect(x_type.dtype.kind == 'f')
18
19    def forward_cpu(self, inputs):
20        x = inputs[0]
21        y = numpy.clip(x * 0.2 + 0.5, 0.0, 1.0)
22        self.retain_inputs((0,))
23        return utils.force_array(y, x.dtype),
24
25    def forward_gpu(self, inputs):
26        x = inputs[0]
27        self.retain_inputs((0,))
28        return cuda.elementwise(
29            'T x', 'T y',
30            'y = min(1.0, max(0.0, x * 0.2 + 0.5))',
31            'hard_sigmoid_fwd'
32        )(x),
33
34    def backward(self, indexes, grad_outputs):
35        x, = self.get_retained_inputs()
36        return HardSigmoidGrad(x.data).apply(grad_outputs)
37
38
39class HardSigmoidGrad(function_node.FunctionNode):
40
41    """Hard-sigmoid gradient function."""
42
43    def __init__(self, x):
44        self.x = x
45
46    def check_type_forward(self, in_types):
47        type_check._argname(in_types, ('gy',))
48
49        type_check.expect(
50            in_types[0].dtype.kind == 'f',
51            in_types[0].dtype == self.x.dtype
52        )
53
54    def forward_cpu(self, inputs):
55        gy, = inputs
56        gx = ((-2.5 < self.x) & (self.x < 2.5)) * gy * 0.2
57        return utils.force_array(gx, self.x.dtype),
58
59    def forward_gpu(self, inputs):
60        gy, = inputs
61        return cuda.elementwise(
62            'T x, T g', 'T gx',
63            'gx = fabs(x) < 2.5 ? 0.2 * g : 0',
64            'hard_sigmoid_bwd'
65        )(self.x, gy),
66
67    def backward(self, indexes, grad_outputs):
68        return HardSigmoidGrad(self.x).apply(grad_outputs)
69
70
71def hard_sigmoid(x):
72    """Element-wise hard-sigmoid function.
73
74    This function is defined as
75
76    .. math::
77
78        f(x) = \\left \\{ \\begin{array}{ll}
79        0 & {\\rm if}~ x < -2.5 \\\\
80        0.2 x + 0.5 & {\\rm if}~ -2.5 < x < 2.5 \\\\
81        1 & {\\rm if}~ 2.5 < x.
82        \\end{array} \\right.
83
84    Args:
85        x (:class:`~chainer.Variable` or :ref:`ndarray`):
86            Input variable. A :math:`(s_1, s_2, ..., s_N)`-shaped float array.
87
88    Returns:
89        ~chainer.Variable: Output variable. A
90        :math:`(s_1, s_2, ..., s_N)`-shaped float array.
91
92    .. admonition:: Example
93
94        It maps the input values into the range of :math:`[0, 1]`.
95
96        >>> x = np.array([-2.6, -1, 0, 1, 2.6])
97        >>> x
98        array([-2.6, -1. ,  0. ,  1. ,  2.6])
99        >>> F.hard_sigmoid(x).array
100        array([0. , 0.3, 0.5, 0.7, 1. ])
101
102    """
103    return HardSigmoid().apply((x,))[0]
104