1import numpy 2 3from chainer.backends import cuda 4from chainer import function_node 5from chainer import utils 6from chainer.utils import type_check 7 8 9class Log1p(function_node.FunctionNode): 10 11 @property 12 def label(self): 13 return 'log1p' 14 15 def check_type_forward(self, in_types): 16 type_check._argname(in_types, ('x',)) 17 type_check.expect(in_types[0].dtype.kind == 'f') 18 19 def forward_cpu(self, x): 20 self.retain_inputs((0,)) 21 return utils.force_array(numpy.log1p(x[0])), 22 23 def forward_gpu(self, x): 24 self.retain_inputs((0,)) 25 return cuda.cupy.log1p(x[0]), 26 27 def backward(self, indexes, gy): 28 x = self.get_retained_inputs() 29 return gy[0] / (x[0] + 1.0), 30 31 32def log1p(x): 33 """Elementwise natural logarithm plus one function. 34 35 Args: 36 x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable. 37 38 Returns: 39 ~chainer.Variable: Output variable. 40 """ 41 return Log1p().apply((x,))[0] 42