1import chainer
2from chainer import backend
3from chainer.backends import cuda
4from chainer import function_node
5from chainer.utils import type_check
6import chainerx
7
8
9class Copy(function_node.FunctionNode):
10
11    """Copies the input variable onto the specified device."""
12
13    def __init__(self, in_device, out_device):
14        self._in_device = in_device
15        self.out_device = out_device
16
17    def check_type_forward(self, in_types):
18        type_check._argname(in_types, ('x',))
19
20    def forward(self, inputs):
21        x, = inputs
22        return self.out_device.send(x),
23
24    def forward_chainerx(self, inputs):
25        x, = inputs
26        return x.to_device(self.out_device.device),
27
28    def backward(self, indexes, grad_outputs):
29        f = Copy(self.out_device, self._in_device)
30        return f.apply(grad_outputs)
31
32
33# TODO(niboshi): Link from `dst` to an appropriate device specifier docs.
34def copy(x, dst):
35    """Copies the input variable onto the specified device.
36
37    If the input ``x`` already resides on the device specified by ``dst``, no
38    copy will actually take place and the returned variable will hold a view
39    of the input. In other cases, the input will be copied to ``dst``.
40    When ``dst == -1``, the array is copied to the host memory.
41    This function supports copies from host to host, from host to device,
42    from device to device and from device to host.
43
44    Args:
45        x (:class:`~chainer.Variable` or :ref:`ndarray`):
46            Variable to be copied.
47        dst: Target device specifier.
48
49    Returns:
50        ~chainer.Variable: Output variable.
51
52    .. admonition:: Example
53
54        >>> import chainer.backends.cuda as cuda
55        >>> x_arr = np.random.uniform(-1, 1, (5, 10))
56        >>> x = chainer.Variable(x_arr)
57        >>> x.device
58        <CpuDevice (numpy)>
59        >>> y = F.copy(x, '@cupy:0') # from CPU (NumPy) to GPU 0 (CuPy)
60        >>> y.device
61        <GpuDevice (cupy):0>
62
63    .. note::
64        Copies between non-ChainerX devices and ChainerX devices are not
65        supported.
66
67    """
68    # For backward compatibility
69    if dst is cuda.DummyDevice:
70        dst = chainer.get_device('@numpy')
71
72    x_is_var = isinstance(x, chainer.Variable)
73    in_device = backend.get_device_from_array(x.array if x_is_var else x)
74    out_device = chainer.get_device(dst)
75
76    if in_device.xp is chainerx:
77        x_arr = x.chx_array if x_is_var else x
78        if out_device.xp is not chainerx:
79            # ChainerX to non-ChainerX
80            if x_arr.is_backprop_required():
81                raise RuntimeError(
82                    'F.copy does not support copy from a ChainerX array with '
83                    'backprop required to a non-ChainerX device.\n'
84                    'From: {}\n'
85                    'To: {}'.format(in_device, out_device))
86            return chainer.Variable(
87                out_device.send(x_arr), requires_grad=False)
88
89        # ChainerX to ChainerX
90        return chainer.Variable(
91            out_device.send(x_arr), requires_grad=x_arr.is_backprop_required())
92
93    if out_device.xp is chainerx:
94        # Non-ChainerX to ChainerX
95        if x_is_var and x.requires_grad:
96            raise RuntimeError(
97                'F.copy does not support copy from a non-ChainerX array with '
98                'backprop required to a ChainerX device.\n'
99                'From: {}\n'
100                'To: {}'.format(in_device, out_device))
101        x_arr = x.array if x_is_var else x
102        return chainer.Variable(out_device.send(x_arr), requires_grad=False)
103
104    # Non-ChainerX to non-ChainerX
105    y, = Copy(in_device, out_device).apply((x,))
106    return y
107