1import math
2
3import numpy
4
5from chainer import backend
6from chainer.backends import cuda
7from chainer import function_node
8from chainer import utils
9from chainer.utils import type_check
10import chainerx
11
12
13class Exp(function_node.FunctionNode):
14
15    @property
16    def label(self):
17        return 'exp'
18
19    def check_type_forward(self, in_types):
20        type_check.expect(in_types.size() == 1)
21        type_check.expect(in_types[0].dtype.kind == 'f')
22
23    def forward_chainerx(self, x):
24        return chainerx.exp(x[0]),
25
26    def forward_cpu(self, x):
27        self.retain_outputs((0,))
28        return utils.force_array(numpy.exp(x[0])),
29
30    def forward_gpu(self, x):
31        self.retain_outputs((0,))
32        return cuda.cupy.exp(x[0]),
33
34    def backward(self, indexes, gy):
35        y = self.get_retained_outputs()[0]
36        return y * gy[0],
37
38
39def exp(x):
40    """Elementwise exponential function.
41
42    Args:
43        x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable.
44
45    Returns:
46        ~chainer.Variable: Output variable.
47    """
48    return Exp().apply((x,))[0]
49
50
51class Log(function_node.FunctionNode):
52
53    @property
54    def label(self):
55        return 'log'
56
57    def check_type_forward(self, in_types):
58        type_check._argname(in_types, ('x',))
59        type_check.expect(in_types[0].dtype.kind == 'f')
60
61    def forward_chainerx(self, x):
62        return chainerx.log(x[0]),
63
64    def forward_cpu(self, x):
65        self.retain_inputs((0,))
66        return utils.force_array(numpy.log(x[0])),
67
68    def forward_gpu(self, x):
69        self.retain_inputs((0,))
70        return cuda.cupy.log(x[0]),
71
72    def backward(self, indexes, gy):
73        x = self.get_retained_inputs()[0]
74        return utils.force_array(gy[0] / x),
75
76
77def log(x):
78    """Elementwise natural logarithm function.
79
80    Args:
81        x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable.
82
83    Returns:
84        ~chainer.Variable: Output variable.
85    """
86    return Log().apply((x,))[0]
87
88
89class Log2(function_node.FunctionNode):
90
91    @property
92    def label(self):
93        return 'log2'
94
95    def check_type_forward(self, in_types):
96        type_check._argname(in_types, ('x',))
97        type_check.expect(in_types[0].dtype.kind == 'f')
98
99    def forward(self, inputs):
100        self.retain_inputs((0,))
101        x = inputs[0]
102        xp = backend.get_array_module(x)
103        return utils.force_array(xp.log2(x)),
104
105    def backward(self, indexes, gy):
106        x = self.get_retained_inputs()[0]
107        return gy[0] / x * (1 / math.log(2)),
108
109
110def log2(x):
111    """Elementwise logarithm function to the base 2.
112
113    .. math::
114       y_i = \\log_2 x_i.
115
116    Args:
117        x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable.
118
119    Returns:
120        ~chainer.Variable: Output variable.
121    """
122    return Log2().apply((x,))[0]
123
124
125class Log10(function_node.FunctionNode):
126
127    @property
128    def label(self):
129        return 'log10'
130
131    def check_type_forward(self, in_types):
132        type_check._argname(in_types, ('x',))
133        type_check.expect(in_types[0].dtype.kind == 'f')
134
135    def forward(self, inputs):
136        self.retain_inputs((0,))
137        x = inputs[0]
138        xp = backend.get_array_module(x)
139        return utils.force_array(xp.log10(x)),
140
141    def backward(self, indexes, gy):
142        x = self.get_retained_inputs()[0]
143        return gy[0] / x * (1 / math.log(10)),
144
145
146def log10(x):
147    """Elementwise logarithm function to the base 10.
148
149    .. math::
150       y_i = \\log_{10} x_i.
151
152    Args:
153        x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable.
154
155    Returns:
156        ~chainer.Variable: Output variable.
157    """
158    return Log10().apply((x,))[0]
159