Home
last modified time | relevance | path

Searched refs:grad_outputs (Results 1 – 25 of 132) sorted by relevance

123456

/dports/science/py-chainer/chainer-7.8.0/chainer/
H A D_backprop.py15 def backward(outputs, grad_outputs=None, **kwargs): argument
57 if grad_outputs is not None:
61 .format(type(grad_outputs)))
79 if grad_outputs is None:
80 grad_outputs = []
89 grad_outputs = [grad_outputs[i] for i in indices]
97 grad_outputs = chainer.functions.identity(*grad_outputs)
99 grad_outputs = grad_outputs,
128 if grad_outputs is None:
129 grad_outputs = []
[all …]
H A Dfunction_node.py789 assert isinstance(grad_outputs, tuple)
815 for a in grad_outputs])
839 for gy in grad_outputs]))
1082 if grad_outputs is not None:
1083 if not isinstance(grad_outputs, (tuple, list)):
1086 .format(type(grad_outputs)))
1087 if len(outputs) != len(grad_outputs):
1109 if grad_outputs:
1192 if grad_outputs is None:
1193 grad_outputs = (None,) * len(outputs)
[all …]
H A Dfunction.py181 def backward(self, target_input_indexes, grad_outputs): argument
192 for grad in grad_outputs])
454 def backward(self, inputs, grad_outputs): argument
478 if any(isinstance(x, cuda.ndarray) for x in inputs + grad_outputs):
479 return self.backward_gpu(inputs, grad_outputs)
481 return self.backward_cpu(inputs, grad_outputs)
483 def backward_cpu(self, inputs, grad_outputs): argument
504 def backward_gpu(self, inputs, grad_outputs): argument
/dports/science/py-chainer/chainer-7.8.0/chainer/functions/math/
H A Dtrigonometric.py26 def backward(self, indexes, grad_outputs): argument
48 def backward(self, indexes, grad_outputs): argument
54 ret.append(cos(x) * grad_outputs[0])
85 def backward(self, indexes, grad_outputs): argument
108 def backward(self, indexes, grad_outputs): argument
145 def backward(self, indexes, grad_outputs): argument
177 def backward(self, indexes, grad_outputs): argument
205 def backward(self, indexes, grad_outputs): argument
245 def backward(self, indexes, grad_outputs): argument
274 def backward(self, indexes, grad_outputs): argument
[all …]
H A Dminimum.py28 def backward(self, indexes, grad_outputs): argument
30 return MinimumGrad(x1.data, x2.data).apply((grad_outputs[0],))
59 def backward(self, indexes, grad_outputs): argument
62 ggy = chainer.functions.where(cond, grad_outputs[0], grad_outputs[1])
H A Dmaximum.py35 def backward(self, indexes, grad_outputs): argument
37 return MaximumGrad(x1.data, x2.data).apply((grad_outputs[0],))
68 def backward(self, indexes, grad_outputs): argument
70 utils.force_array(self.cond), grad_outputs[0], grad_outputs[1]),
H A Dsqrt.py29 def backward(self, indexes, grad_outputs): argument
31 gy = grad_outputs[0]
51 def backward(self, indexes, grad_outputs): argument
53 gy, = grad_outputs
H A Dclip.py38 def backward(self, indexes, grad_outputs): argument
40 return ClipGrad(x.data, self.x_min, self.x_max).apply(grad_outputs)
71 def backward(self, indexes, grad_outputs): argument
72 return grad_outputs[0] * self.cond,
H A Dlinear_interpolate.py36 def backward(self, indexes, grad_outputs): argument
38 gz, = grad_outputs
65 def backward(self, indexes, grad_outputs): argument
67 ggp, ggx, ggy = grad_outputs
/dports/science/py-chainer/chainer-7.8.0/chainerx_cc/chainerx/
H A Dcheck_backward.cc67 if (grad_outputs.has_value()) { in BackwardGradients()
69 if (nout != grad_outputs->size()) { in BackwardGradients()
166 const std::vector<Array>& grad_outputs, in CheckBackwardComputation() argument
262 const std::vector<Array>& grad_outputs, in CheckBackward() argument
273 CHAINERX_ASSERT(!grad_outputs.empty()); in CheckBackward()
275grad_outputs.begin(), grad_outputs.end(), [&backprop_id](const Array& a) { return a.IsBackpropRequ… in CheckBackward()
337 const std::vector<Array>& grad_outputs, in CheckDoubleBackwardComputationImpl() argument
345 const std::size_t nout = grad_outputs.size(); in CheckDoubleBackwardComputationImpl()
428 … std::copy(grad_outputs.begin(), grad_outputs.end(), std::back_inserter(inputs_and_grad_outputs)); in CheckDoubleBackwardComputationImpl()
438 const std::vector<Array>& grad_outputs, in CheckDoubleBackwardComputation() argument
[all …]
H A Dnumerical_gradient_test.cc36 const Arrays& grad_outputs, in CheckElementwiseNumericalGradient() argument
51 Arrays grads = CalculateNumericalGradient(checked_func, center_inputs, grad_outputs, eps); in CheckElementwiseNumericalGradient()
96 Arrays grad_outputs = { in TEST_P() local
104 Arrays expected_grads = {grad_outputs[0], grad_outputs[0]}; in TEST_P()
107 CheckElementwiseNumericalGradient<float>(forward, inputs, grad_outputs, eps, expected_grads); in TEST_P()
127 Arrays grad_outputs = { in TEST_P() local
135 Arrays expected_grads = {inputs[1] * grad_outputs[0], inputs[0] * grad_outputs[0]}; in TEST_P()
138 CheckElementwiseNumericalGradient<float>(forward, inputs, grad_outputs, eps, expected_grads); in TEST_P()
H A Dcheck_backward_test.cc86 Arrays grad_outputs{testing::BuildArray(shape).WithData(grad_output_data)}; in CheckCheckBackward() local
90 … CheckBackward(fprop, {input}, grad_outputs, eps, 2, atol, rtol, backprop_scope.backprop_id()); in CheckCheckBackward()
92 …EXPECT_THROW(CheckBackward(fprop, {input}, grad_outputs, eps, 2, atol, rtol, backprop_scope.backpr… in CheckCheckBackward()
120 Arrays grad_outputs{testing::BuildArray(shape).WithData(grad_output_data)}; in CheckCheckDoubleBackward() local
128 for (auto& grad_output : grad_outputs) { in CheckCheckDoubleBackward()
133 …CheckDoubleBackwardComputation(fprop, inputs, grad_outputs, grad_grad_inputs, eps, 2, atol, rtol, … in CheckCheckDoubleBackward()
/dports/science/py-chainer/chainer-7.8.0/chainermn/functions/
H A Dcollective_communication.py28 def backward(self, inputs, grad_outputs): argument
30 grad_dtype = grad_outputs[0].dtype
36 gxs = self.comm.alltoall(grad_outputs)
71 def backward(self, inputs, grad_outputs): argument
72 assert self.comm.size == len(grad_outputs)
80 gys = tuple([gy for gy in grad_outputs])
131 def backward(self, inputs, grad_outputs): argument
132 gx, = grad_outputs
182 def backward(self, inputs, grad_outputs): argument
236 def backward(self, inputs, grad_outputs): argument
[all …]
/dports/science/py-chainer/chainer-7.8.0/chainer/testing/
H A Dfunction_link.py104 grad_outputs = tuple([
107 return grad_outputs
144 return grad_outputs
246 grad_outputs = backend_config.get_array(grad_outputs)
248 grad_outputs = self._to_noncontiguous_as_needed(grad_outputs)
299 grad_outputs = backend_config.get_array(grad_outputs)
302 grad_outputs = self._to_noncontiguous_as_needed(grad_outputs)
766 return grad_outputs
827 grad_outputs = backend_config.get_array(grad_outputs)
831 grad_outputs = self._to_noncontiguous_as_needed(grad_outputs)
[all …]
/dports/science/py-chainer/chainer-7.8.0/chainerx_cc/chainerx/python/
H A Dcheck_backward.cc49 const std::vector<ArrayBodyPtr>& grad_outputs, in InitChainerxCheckBackward()
58 {grad_outputs.begin(), grad_outputs.end()}, in InitChainerxCheckBackward()
76 const std::vector<ArrayBodyPtr>& grad_outputs, in InitChainerxCheckBackward() argument
86 {grad_outputs.begin(), grad_outputs.end()}, in InitChainerxCheckBackward()
/dports/science/py-chainer/chainer-7.8.0/chainer/functions/activation/
H A Dclipped_relu.py63 def backward(self, indexes, grad_outputs): argument
68 grad_outputs)
70 return ClippedReLUGrad2(x.data, self.cap).apply(grad_outputs)
99 def backward(self, indexes, grad_outputs): argument
100 return ClippedReLUGrad2(self.x, self.cap).apply(grad_outputs)
126 def backward(self, indexes, grad_outputs): argument
127 return ClippedReLUGrad3(self.x, self.y, self.cap).apply(grad_outputs)
H A Drelu.py62 def backward(self, indexes, grad_outputs): argument
63 gy, = grad_outputs
111 def backward(self, indexes, grad_outputs): argument
112 return ReLUGrad2(self.b).apply(grad_outputs)
137 def backward(self, indexes, grad_outputs): argument
138 return ReLUGrad2(self.y).apply(grad_outputs)
H A Dhard_sigmoid.py34 def backward(self, indexes, grad_outputs): argument
36 return HardSigmoidGrad(x.data).apply(grad_outputs)
67 def backward(self, indexes, grad_outputs): argument
68 return HardSigmoidGrad(self.x).apply(grad_outputs)
H A Delu.py43 def backward(self, indexes, grad_outputs): argument
49 gy, = grad_outputs
75 def backward(self, indexes, grad_outputs): argument
76 ggx, = grad_outputs
H A Dtanh.py45 def backward(self, indexes, grad_outputs): argument
51 gy = grad_outputs[0]
84 def backward(self, indexes, grad_outputs): argument
86 ggx = grad_outputs[0]
/dports/science/py-chainer/chainer-7.8.0/chainer/functions/array/
H A Dspatial_transformer_grid.py76 def backward_cpu(self, inputs, grad_outputs): argument
77 return self._backward(inputs, grad_outputs)
79 def backward_gpu(self, inputs, grad_outputs): argument
81 return self._backward(inputs, grad_outputs)
83 ggrid, = grad_outputs
93 def _backward(self, inputs, grad_outputs): argument
95 ggrid, = grad_outputs
H A Dseparate.py31 def backward(self, indexes, grad_outputs): argument
32 grad_outputs = [
34 if g is None else g for g in grad_outputs]
35 return stack.stack(grad_outputs, self.axis),
H A Dbroadcast.py26 def backward(self, indexes, grad_outputs): argument
27 return tuple([None if grad_outputs[i] is None else
29 grad_outputs[i], self.inputs[i].shape)
102 def backward(self, indexes, grad_outputs): argument
103 gx, = grad_outputs
H A Ddiagonal.py27 def backward(self, indexes, grad_outputs): argument
30 ).apply(grad_outputs)
52 def backward(self, indexes, grad_outputs): argument
54 grad_outputs)
/dports/science/py-chainer/chainer-7.8.0/chainer/functions/normalization/
H A Dl2_normalization.py24 def backward(self, indices, grad_outputs): argument
25 g, = grad_outputs
57 def backward(self, indexes, grad_outputs): argument
59 gy, = grad_outputs

123456