Home
last modified time | relevance | path

Searched defs:AdaGrad (Results 1 – 12 of 12) sorted by relevance

/dports/misc/mxnet/incubator-mxnet-1.9.0/julia/src/optimizers/
H A Dadagrad.jl67 create_state(::AdaGrad, ::Int, W::NDArray) = zeros(size(W), context(W))
69 function update!(ada::AdaGrad, ::Int, W::NDArray, ∇::NDArray, x::NDArray)
/dports/misc/py-mxnet/incubator-mxnet-1.9.0/julia/src/optimizers/
H A Dadagrad.jl67 create_state(::AdaGrad, ::Int, W::NDArray) = zeros(size(W), context(W))
69 function update!(ada::AdaGrad, ::Int, W::NDArray, ∇::NDArray, x::NDArray)
/dports/mail/nextcloud-mail/mail/vendor/rubix/ml/docs/neural-network/optimizers/
H A Dadagrad.md3 # AdaGrad chapter
/dports/misc/mxnet/incubator-mxnet-1.9.0/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/
H A DAdaGrad.scala33 class AdaGrad(val learningRate: Float = 0.05f, rescaleGradient: Float = 1.0f, class
/dports/misc/py-mxnet/incubator-mxnet-1.9.0/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/
H A DAdaGrad.scala33 class AdaGrad(val learningRate: Float = 0.05f, rescaleGradient: Float = 1.0f, class
/dports/math/ensmallen/ensmallen-2.17.0/include/ensmallen_bits/ada_grad/
H A Dada_grad_impl.hpp20 inline AdaGrad::AdaGrad(const double stepSize, in AdaGrad() function in ens::AdaGrad
H A Dada_grad.hpp46 class AdaGrad class
/dports/mail/nextcloud-mail/mail/vendor/rubix/ml/src/NeuralNet/Optimizers/
H A DAdaGrad.php28 class AdaGrad implements Optimizer, Adaptive class
/dports/mail/nextcloud-mail/mail/vendor/rubix/ml/tests/NeuralNet/Optimizers/
H A DAdaGradTest.php8 use Rubix\ML\NeuralNet\Optimizers\AdaGrad; alias
/dports/science/py-chainer/chainer-7.8.0/chainer/optimizers/
H A Dada_grad.py83 class AdaGrad(optimizer.GradientMethod): class
/dports/misc/mxnet/incubator-mxnet-1.9.0/python/mxnet/optimizer/
H A Doptimizer.py1631 class AdaGrad(Optimizer): class
/dports/misc/py-mxnet/incubator-mxnet-1.9.0/python/mxnet/optimizer/
H A Doptimizer.py1631 class AdaGrad(Optimizer): class