1import chainer
2from chainer.backends import cuda
3from chainer import function_node
4from chainer import utils
5from chainer.utils import type_check
6
7_lgamma_cpu = None
8
9
10class LGamma(function_node.FunctionNode):
11
12    @property
13    def label(self):
14        return 'lgamma'
15
16    def check_type_forward(self, in_types):
17        type_check._argname(in_types, ('x',))
18        type_check.expect(in_types[0].dtype.kind == 'f')
19
20    def forward_cpu(self, x):
21        global _lgamma_cpu
22        if _lgamma_cpu is None:
23            try:
24                from scipy import special
25                _lgamma_cpu = special.gammaln
26            except ImportError:
27                raise ImportError('SciPy is not available. Forward computation'
28                                  ' of lgamma can not be done.')
29        self.retain_inputs((0,))
30        return utils.force_array(_lgamma_cpu(x[0]), dtype=x[0].dtype),
31
32    def forward_gpu(self, x):
33        self.retain_inputs((0,))
34        return utils.force_array(
35            cuda.cupyx.scipy.special.gammaln(x[0]), dtype=x[0].dtype),
36
37    def backward(self, indexes, gy):
38        z = self.get_retained_inputs()[0]
39        return chainer.functions.digamma(z) * gy[0],
40
41
42def lgamma(x):
43    """logarithm of gamma function.
44
45    .. note::
46       Forward computation in CPU can not be done if
47       `SciPy <https://www.scipy.org/>`_ is not available.
48
49    Args:
50        x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable.
51
52    Returns:
53        ~chainer.Variable: Output variable.
54    """
55    return LGamma().apply((x,))[0]
56