/dports/misc/py-gluonnlp/gluon-nlp-0.10.0/src/gluonnlp/model/ |
H A D | language_model.py | 66 tie_weights, dropout, weight_drop, drop_h, argument 69 tie_weights, dropout, weight_drop, 133 num_layers, dropout, tie_weights, **kwargs): argument 134 if tie_weights: 140 num_layers, dropout, tie_weights, **kwargs)
|
H A D | translation.py | 72 src_embed=None, tgt_embed=None, share_embed=False, tie_weights=False, argument 117 if tie_weights:
|
H A D | transformer.py | 931 decoder, one_step_ahead_decoder, share_embed, embed_size, tie_weights, argument 942 kwargs['tie_weights'] = tie_weights
|
/dports/misc/py-gluonnlp/gluon-nlp-0.10.0/tests/unittest/ |
H A D | test_model_weight_share.py | 42 tie_weights=weight_tied) 44 tie_weights=weight_tied,
|
/dports/misc/mxnet/incubator-mxnet-1.9.0/example/gluon/word_language_model/ |
H A D | model.py | 26 num_layers, dropout=0.5, tie_weights=False, **kwargs): argument 48 if tie_weights:
|
/dports/misc/py-mxnet/incubator-mxnet-1.9.0/example/gluon/word_language_model/ |
H A D | model.py | 26 num_layers, dropout=0.5, tie_weights=False, **kwargs): argument 48 if tie_weights:
|
/dports/misc/py-gluonnlp/gluon-nlp-0.10.0/src/gluonnlp/model/train/ |
H A D | language_model.py | 61 tie_weights=True, dropout=0.4, weight_drop=0.5, drop_h=0.2, argument 74 self._tie_weights = tie_weights 237 num_layers, dropout=0.5, tie_weights=False, **kwargs): argument 238 if tie_weights: 249 self._tie_weights = tie_weights
|
/dports/misc/py-gluonnlp/gluon-nlp-0.10.0/scripts/machine_translation/ |
H A D | inference_transformer.py | 163 embed_size=args.num_units, tie_weights=args.dataset != 'TOY',
|
H A D | train_transformer.py | 184 tie_weights=args.dataset not in ('TOY', 'IWSLT2015'), embed_initializer=None,
|
/dports/misc/mxnet/incubator-mxnet-1.9.0/docs/python_docs/python/tutorials/packages/gluon/text/ |
H A D | transformer.rst | 469 share_embed=True, embed_size=hparams.num_units, tie_weights=True,
|
/dports/misc/py-mxnet/incubator-mxnet-1.9.0/docs/python_docs/python/tutorials/packages/gluon/text/ |
H A D | transformer.rst | 469 share_embed=True, embed_size=hparams.num_units, tie_weights=True,
|