1# Licensed to the Apache Software Foundation (ASF) under one
2# or more contributor license agreements.  See the NOTICE file
3# distributed with this work for additional information
4# regarding copyright ownership.  The ASF licenses this file
5# to you under the Apache License, Version 2.0 (the
6# "License"); you may not use this file except in compliance
7# with the License.  You may obtain a copy of the License at
8#
9#   http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing,
12# software distributed under the License is distributed on an
13# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14# KIND, either express or implied.  See the License for the
15# specific language governing permissions and limitations
16# under the License.
17import numpy as np
18import pytest
19
20import tvm
21from tvm import relay
22from tvm.relay.testing import check_grad, ctx_list, run_infer_type
23from tvm.relay.transform import gradient
24
25
26def sigmoid(x):
27    one = np.ones_like(x)
28    return one / (one + np.exp(-x))
29
30
31def relu(x):
32    x_copy = np.copy(x)
33    np.maximum(x_copy, 0, x_copy)
34    return x_copy
35
36
37def test_unary_op():
38    def check_single_op(opfunc, ref):
39        shape = (10, 4)
40        dtype = 'float32'
41        tp = relay.TensorType(shape, dtype)
42        x = relay.var("x", tp)
43        y = opfunc(x)
44
45        if ref is not None:
46            data = np.random.rand(*shape).astype(dtype)
47            ref_grad = ref(data)
48            fwd_func = relay.Function([x], y)
49            fwd_func = run_infer_type(fwd_func)
50            bwd_func = run_infer_type(gradient(fwd_func))
51
52            for target, ctx in ctx_list():
53                intrp = relay.create_executor(ctx=ctx, target=target)
54                op_res, (op_grad, ) = intrp.evaluate(bwd_func)(data)
55                np.testing.assert_allclose(op_grad.asnumpy(), ref_grad, rtol=0.01)
56
57    for opfunc, ref in [(tvm.relay.log, lambda x: 1 / x),
58                        (tvm.relay.exp, np.exp),
59                        (tvm.relay.sigmoid, lambda x: sigmoid(x) * (1 - sigmoid(x))),
60                        (tvm.relay.tanh, lambda x: 1 - np.tanh(x) * np.tanh(x)),
61                        (tvm.relay.sqrt, lambda x: 0.5 * np.power(x, -0.5)),
62                        (tvm.relay.abs, lambda x: np.where(x < 0, -np.ones_like(x), np.ones_like(x))),
63                        (relay.nn.relu, lambda x: np.where(x < 0, np.zeros_like(x), np.ones_like(x))),
64                        (tvm.relay.cos, lambda x: -1.0 * np.sin(x)),
65                        (tvm.relay.sin, lambda x: np.cos(x)),
66                        (tvm.relay.atan, lambda x: 1 / (1 + np.power(x, 2.0)))]:
67        check_single_op(opfunc, ref)
68
69
70def test_binary_op():
71    def inst(vars, sh):
72        return [vars.get(s, s) for s in sh]
73
74    def check_binary_op(opfunc, ref):
75        s = (5, 10, 5)
76        t = relay.TensorType((5, 10, 5))
77        x = relay.var("x", t)
78        y = relay.var("y", t)
79        z = opfunc(x, y)
80
81        x_data = np.random.rand(*s).astype(t.dtype)
82        y_data = np.random.rand(*s).astype(t.dtype)
83        ref_grad0, ref_grad1 = ref(x_data, y_data)
84        fwd_func = relay.Function([x, y], z)
85        fwd_func = run_infer_type(fwd_func)
86        bwd_func = run_infer_type(gradient(fwd_func))
87
88        for target, ctx in ctx_list():
89            intrp = relay.create_executor(ctx=ctx, target=target)
90            op_res, (op_grad0, op_grad1) = intrp.evaluate(bwd_func)(x_data, y_data)
91            np.testing.assert_allclose(op_grad0.asnumpy(), ref_grad0, rtol=0.01)
92            np.testing.assert_allclose(op_grad1.asnumpy(), ref_grad1, rtol=0.01)
93
94    for opfunc, ref in [(relay.add, lambda x, y: [np.ones_like(x), np.ones_like(y)]),
95                        (relay.subtract, lambda x, y: [np.ones_like(x), -np.ones_like(y)]),
96                        (relay.multiply, lambda x, y: [y, x]),
97                        (relay.divide, lambda x, y: [1 / y, - x / (y**2)])]:
98        check_binary_op(opfunc, ref)
99
100
101def test_softmax_grad():
102    data = relay.var("data", relay.TensorType((1, 16), "float64"))
103    fwd_func = relay.Function([data], relay.nn.softmax(data))
104    check_grad(fwd_func, scale=1)
105
106
107def test_log_softmax_grad():
108    data = relay.var("data", relay.TensorType((2, 16), "float64"))
109    fwd_func = relay.Function([data], relay.nn.log_softmax(data))
110    check_grad(fwd_func, scale=1)
111
112
113def verify_bias_add(d_shape, b_shape, axis=1):
114    data = relay.var("data", relay.TensorType(d_shape, "float32"))
115    bias = relay.var("bias", relay.TensorType(b_shape, "float32"))
116    fwd_func = relay.Function([data, bias], relay.nn.bias_add(data, bias, axis=axis))
117    check_grad(fwd_func)
118
119
120def test_bias_add_grad():
121    verify_bias_add((1, 16), (16,))
122    verify_bias_add((1, 8, 2, 2), (8,))
123    verify_bias_add((1, 2, 2, 8), (8,), 3)
124    verify_bias_add((4, 8), (8,))
125
126
127if __name__ == "__main__":
128    pytest.main([__file__])
129