1from __future__ import absolute_import, print_function, division
2import six
3
4import numpy as np
5import unittest
6from functools import update_wrapper
7
8import theano
9from theano import tensor
10from theano.tests import unittest_tools as utt
11from theano.misc.may_share_memory import may_share_memory
12import theano.sparse
13
14utt.seed_rng()
15
16
17def makeSharedTester(shared_constructor_,
18                     dtype_,
19                     get_value_borrow_true_alias_,
20                     shared_borrow_true_alias_,
21                     set_value_borrow_true_alias_,
22                     set_value_inplace_,
23                     set_cast_value_inplace_,
24                     shared_constructor_accept_ndarray_,
25                     internal_type_,
26                     test_internal_type_,
27                     theano_fct_,
28                     ref_fct_,
29                     cast_value_=np.asarray,
30                     expect_fail_fast_shape_inplace=True,
31                     ):
32    """
33    This is a generic fct to allow reusing the same test function
34    for many shared variable of many types.
35
36    :param shared_constructor_: The shared variable constructor to use
37    :param dtype_: The dtype of the data to test
38    :param get_value_borrow_true_alias_: Should a get_value(borrow=True) return the internal object
39    :param shared_borrow_true_alias_: Should shared(val,borrow=True) reuse the val memory space
40    :param set_value_borrow_true_alias_: Should set_value(val,borrow=True) reuse the val memory space
41    :param set_value_inplace_: Should this shared variable overwrite the current
42                               memory when the new value is an ndarray
43    :param set_cast_value_inplace_: Should this shared variable overwrite the
44                               current memory when the new value is of the same
45                               type as the internal type.
46    :param shared_constructor_accept_ndarray_: Do the shared_constructor accept an ndarray as input?
47    :param internal_type_: The internal type used.
48    :param test_internal_type_: A function that tell if its input is of the same
49                                type as this shared variable internal type.
50    :param theano_fct_: A theano op that will be used to do some computation on the shared variable
51    :param ref_fct_: A reference function that should return the same value as the theano_fct_
52    :param cast_value_: A callable that cast an ndarray into the internal shared variable representation
53    :param name: This string is used to set the returned class' __name__
54                 attribute. This is needed for nosetests to properly tag the
55                 test with its correct name, rather than use the generic
56                 SharedTester name. This parameter is mandatory (keeping the
57                 default None value will raise an error), and must be set to
58                 the name of the variable that will hold the returned class.
59    :note:
60        We must use /= as sparse type don't support other inplace operation.
61    """
62    class m(type):
63        pass
64
65    class SharedTester(unittest.TestCase):
66        # This is to allow setting __doc__ with python 2
67        if not six.PY3:
68            __metaclass__ = m
69        shared_constructor = staticmethod(shared_constructor_)
70        dtype = dtype_
71        get_value_borrow_true_alias = get_value_borrow_true_alias_
72        shared_borrow_true_alias = shared_borrow_true_alias_
73        internal_type = internal_type_
74        test_internal_type = staticmethod(test_internal_type_)
75        theano_fct = staticmethod(theano_fct_)
76        ref_fct = staticmethod(ref_fct_)
77        set_value_borrow_true_alias = set_value_borrow_true_alias_
78        set_value_inplace = set_value_inplace_
79        set_cast_value_inplace = set_cast_value_inplace_
80        shared_constructor_accept_ndarray = shared_constructor_accept_ndarray_
81        cast_value = staticmethod(cast_value_)
82
83        def test_shared_dont_alias(self):
84            dtype = self.dtype
85            if dtype is None:
86                dtype = theano.config.floatX
87
88            rng = np.random.RandomState(utt.fetch_seed())
89            x = np.asarray(rng.uniform(0, 1, [2, 4]), dtype=dtype)
90            x = self.cast_value(x)
91
92            x_ref = self.ref_fct(x)
93            x_shared = self.shared_constructor(x, borrow=False)
94            total = self.theano_fct(x_shared)
95
96            total_func = theano.function([], total)
97
98            total_val = total_func()
99
100            assert np.allclose(self.ref_fct(x), total_val)
101
102            x /= .5
103            total_val_2 = total_func()
104
105            # value used to construct should not alias with internal
106            assert np.allclose(total_val, total_val_2)
107
108            x = x_shared.get_value(borrow=False)
109
110            x /= .5
111
112            total_val_3 = total_func()
113
114            # value returned by access should not alias with internal
115            assert np.allclose(total_val, total_val_3)
116
117            # in this case we can alias
118            x = x_shared.get_value(borrow=True)
119            x /= .5
120
121            # this is not required by the contract but it is a feature we've
122            # implemented for some type of SharedVariable.
123            if self.get_value_borrow_true_alias:
124                assert np.allclose(self.ref_fct(x), total_func())
125            else:
126                assert np.allclose(x_ref, total_func())
127
128        def test_shape(self):
129            dtype = self.dtype
130            if dtype is None:
131                dtype = theano.config.floatX
132
133            rng = np.random.RandomState(utt.fetch_seed())
134            x = np.asarray(rng.uniform(0, 1, [2, 4]), dtype=dtype)
135            x = self.cast_value(x)
136
137            x_ref = self.ref_fct(x)
138            x_shared = self.shared_constructor(x, borrow=False)
139            total = self.theano_fct(x_shared)
140
141            f = theano.function([], x_shared.shape)
142            topo = f.maker.fgraph.toposort()
143
144            assert np.all(f() == (2, 4))
145            if theano.config.mode != 'FAST_COMPILE':
146                assert len(topo) == 3
147                assert isinstance(topo[0].op, tensor.opt.Shape_i)
148                assert isinstance(topo[1].op, tensor.opt.Shape_i)
149                assert isinstance(topo[2].op, tensor.opt.MakeVector)
150
151        def test_shape_i(self):
152            dtype = self.dtype
153            if dtype is None:
154                dtype = theano.config.floatX
155
156            rng = np.random.RandomState(utt.fetch_seed())
157            x = np.asarray(rng.uniform(0, 1, [2, 4]), dtype=dtype)
158            x = self.cast_value(x)
159
160            x_ref = self.ref_fct(x)
161            x_shared = self.shared_constructor(x, borrow=False)
162            total = self.theano_fct(x_shared)
163
164            f = theano.function([], x_shared.shape[1])
165            topo = f.maker.fgraph.toposort()
166
167            assert np.all(f() == (4))
168            if theano.config.mode != 'FAST_COMPILE':
169                assert len(topo) == 1
170                assert isinstance(topo[0].op, tensor.opt.Shape_i)
171
172        def test_return_internal_type(self):
173            dtype = self.dtype
174            if dtype is None:
175                dtype = theano.config.floatX
176
177            rng = np.random.RandomState(utt.fetch_seed())
178            x = np.asarray(rng.uniform(0, 1, [2, 4]), dtype=dtype)
179            x = self.cast_value(x)
180
181            x_ref = self.ref_fct(x)
182            x_shared = self.shared_constructor(x, borrow=False)
183            total = self.theano_fct(x_shared)
184
185            total_func = theano.function([], total)
186
187            # in this case we can alias with the internal value
188            x = x_shared.get_value(borrow=True, return_internal_type=True)
189            assert self.test_internal_type(x)
190
191            x /= .5
192
193            # this is not required by the contract but it is a feature we can
194            # implement for some type of SharedVariable.
195            assert np.allclose(self.ref_fct(x), total_func())
196
197            x = x_shared.get_value(borrow=False, return_internal_type=True)
198            assert self.test_internal_type(x)
199            assert x is not x_shared.container.value
200            x /= .5
201
202            # this is required by the contract
203            assert not np.allclose(self.ref_fct(x), total_func())
204
205        def test_get_value(self):
206            # Test that get_value returns a ndarray
207            dtype = self.dtype
208            if dtype is None:
209                dtype = theano.config.floatX
210
211            rng = np.random.RandomState(utt.fetch_seed())
212            x_orig = np.asarray(rng.uniform(0, 1, [2, 4]), dtype=dtype)
213            x_cast = self.cast_value(x_orig)
214            if self.shared_constructor_accept_ndarray:
215                x_shared = self.shared_constructor(x_orig, borrow=False)
216                assert isinstance(x_shared.get_value(), x_orig.__class__)
217
218            x_shared = self.shared_constructor(x_cast, borrow=False)
219            assert isinstance(x_shared.get_value(), x_cast.__class__)
220
221        def test_set_value(self):
222            dtype = self.dtype
223            if dtype is None:
224                dtype = theano.config.floatX
225
226            rng = np.random.RandomState(utt.fetch_seed())
227            x = np.asarray(rng.uniform(0, 1, [2, 4]), dtype=dtype)
228            x = self.cast_value(x)
229
230            x_orig = x
231            x_orig_copy = x.copy()
232            x_ref = self.ref_fct(x)
233            x_shared = self.shared_constructor(x, borrow=False)
234            total = self.theano_fct(x_shared)
235
236            total_func = theano.function([], total)
237            total_func()
238
239            # test if that theano shared variable optimize set_value(borrow=True)
240            get_x = x_shared.get_value(borrow=True)
241            assert get_x is not x_orig  # borrow=False to shared_constructor
242            get_x /= .5
243            x_shared.set_value(get_x, borrow=True)
244            x = x_shared.get_value(borrow=True)
245            if self.set_value_borrow_true_alias:
246                assert x is get_x
247            else:
248                assert x is not get_x
249            assert np.allclose(self.ref_fct(np.asarray(x_orig)/.5), self.ref_fct(x))
250
251            # test optimized get set value on the gpu(don't pass data to the cpu)
252            get_x = x_shared.get_value(borrow=True, return_internal_type=True)
253            assert get_x is not x_orig  # borrow=False to shared_constructor
254            assert self.test_internal_type(get_x)
255
256            get_x /= .5
257            assert self.test_internal_type(get_x)
258            x_shared.set_value(get_x, borrow=True)
259            x = x_shared.get_value(borrow=True, return_internal_type=True)
260            assert self.test_internal_type(x)
261            assert x is get_x
262
263            # TODO test Out.
264        def test_shared_do_alias(self):
265            dtype = self.dtype
266            if dtype is None:
267                dtype = theano.config.floatX
268
269            rng = np.random.RandomState(utt.fetch_seed())
270            x = np.asarray(rng.uniform(1, 2, [4, 2]), dtype=dtype)
271            x = self.cast_value(x)
272            x_ref = self.ref_fct(x)
273
274            x_shared = self.shared_constructor(x, borrow=True)
275
276            total = self.theano_fct(x_shared)
277
278            total_func = theano.function([], total)
279
280            total_val = total_func()
281
282            assert np.allclose(self.ref_fct(x), total_val)
283
284            x /= .5
285
286            # not required by the contract but it is a feature we've implemented
287            if self.shared_borrow_true_alias:
288                assert np.allclose(self.ref_fct(x), total_func())
289            else:
290                assert np.allclose(x_ref, total_func())
291
292        def test_inplace_set_value(self):
293            # We test that if the SharedVariable implement it we do inplace set_value
294            # We also test this for partial inplace modification when accessing the internal of theano.
295
296            dtype = self.dtype
297            if dtype is None:
298                dtype = theano.config.floatX
299
300            shp = (100//4, 1024)  # 100KB
301
302            x = np.zeros(shp, dtype=dtype)
303            x = self.cast_value(x)
304            x_shared = self.shared_constructor(x, borrow=True)
305
306            old_data = x_shared.container.storage[0]
307            nd = np.ones(shp, dtype=dtype)
308
309            if x.__class__.__name__ != 'csr_matrix':
310                # sparse matrix don't support inplace affectation
311                x_shared.container.value[:] = nd
312                assert (np.asarray(x_shared.get_value(borrow=True)) == nd).all()
313                # This should always share value!
314                assert may_share_memory(old_data, x_shared.container.storage[0])
315                assert may_share_memory(old_data, x_shared.get_value(borrow=True, return_internal_type=True))
316
317                nd[0] += 1
318                x_shared.container.value[0] = nd[0]
319                assert (np.asarray(x_shared.get_value(borrow=True)[0]) == nd[0]).all()
320                assert (np.asarray(x_shared.get_value(borrow=True)[1:]) == nd[1:]).all()
321                # This should always share value!
322                assert may_share_memory(old_data, x_shared.container.storage[0])
323                assert may_share_memory(old_data, x_shared.get_value(borrow=True, return_internal_type=True))
324
325            if x.__class__.__name__ != 'csr_matrix':
326                # sparse matrix don't support inplace affectation
327                nd += 1
328                # THIS DOENS'T DO WHAT WE EXPECT the content of a is
329                # not updated for GpuArray, but it is for ndarray
330                x_shared.get_value(borrow=True)[:] = nd
331                assert may_share_memory(old_data, x_shared.container.storage[0])
332                x_shared.get_value(borrow=True)
333
334            # Test by set_value with borrow=False
335            nd += 1
336            old_data = x_shared.container.storage[0]
337            x_shared.set_value(nd, borrow=False)
338            assert np.allclose(self.ref_fct(x_shared.get_value(borrow=True)),
339                    self.ref_fct(self.cast_value(nd)))
340            assert may_share_memory(old_data, x_shared.container.storage[0]) == self.set_value_inplace
341
342            # Test by set_value with borrow=False when new data cast.
343            # specificaly useful for gpu data
344            nd += 1
345            old_data = x_shared.container.storage[0]
346            x_shared.set_value(self.cast_value(nd), borrow=False)
347            assert np.allclose(self.ref_fct(x_shared.get_value(borrow=True)),
348                    self.ref_fct(self.cast_value(nd)))
349            assert may_share_memory(old_data, x_shared.container.storage[0]) == self.set_cast_value_inplace
350
351            # Test by set_value with borrow=True
352            nd += 1
353            old_data = x_shared.container.storage[0]
354            x_shared.set_value(nd.copy(), borrow=True)
355            assert np.allclose(self.ref_fct(x_shared.get_value(borrow=True)),
356                    self.ref_fct(self.cast_value(nd)))
357            assert may_share_memory(old_data, x_shared.container.storage[0]) == self.set_value_inplace
358
359            # Test by set_value with borrow=True when new data cast.
360            nd += 1
361            old_data = x_shared.container.storage[0]
362            x_shared.set_value(self.cast_value(nd.copy()), borrow=True)
363            assert np.allclose(self.ref_fct(x_shared.get_value(borrow=True)), self.ref_fct(self.cast_value(nd)))
364            assert may_share_memory(old_data, x_shared.container.storage[0]) == self.set_cast_value_inplace
365
366        def test_specify_shape(self):
367            dtype = self.dtype
368            if dtype is None:
369                dtype = theano.config.floatX
370
371            rng = np.random.RandomState(utt.fetch_seed())
372            x1_1 = np.asarray(rng.uniform(1, 2, [4, 2]), dtype=dtype)
373            x1_1 = self.cast_value(x1_1)
374            x1_2 = np.asarray(rng.uniform(1, 2, [4, 2]), dtype=dtype)
375            x1_2 = self.cast_value(x1_2)
376            x2 = np.asarray(rng.uniform(1, 2, [4, 3]), dtype=dtype)
377            x2 = self.cast_value(x2)
378
379            # Test that we can replace with values of the same shape
380            x1_shared = self.shared_constructor(x1_1)
381            x1_specify_shape = tensor.specify_shape(x1_shared, x1_1.shape)
382            x1_shared.set_value(x1_2)
383            assert np.allclose(self.ref_fct(x1_shared.get_value(borrow=True)),
384                    self.ref_fct( x1_2))
385            shape_op_fct = theano.function([], x1_shared.shape)
386            topo = shape_op_fct.maker.fgraph.toposort()
387            if theano.config.mode != 'FAST_COMPILE':
388                assert len(topo) == 3
389                assert isinstance(topo[0].op, tensor.opt.Shape_i)
390                assert isinstance(topo[1].op, tensor.opt.Shape_i)
391                assert isinstance(topo[2].op, tensor.opt.MakeVector)
392
393            # Test that we forward the input
394            specify_shape_fct = theano.function([], x1_specify_shape)
395            assert np.all(self.ref_fct(specify_shape_fct()) ==
396                             self.ref_fct(x1_2))
397            topo_specify = specify_shape_fct.maker.fgraph.toposort()
398            assert len(topo_specify) == 2
399
400            # Test that we put the shape info into the graph
401            shape_constant_fct = theano.function([], x1_specify_shape.shape)
402            assert np.all(shape_constant_fct() == shape_op_fct())
403            topo_cst = shape_constant_fct.maker.fgraph.toposort()
404            if theano.config.mode != 'FAST_COMPILE':
405                assert len(topo_cst) == 1
406                topo_cst[0].op == theano.compile.function_module.deep_copy_op
407
408            # Test that we can take the grad.
409            if (theano.sparse.enable_sparse and
410                isinstance(x1_specify_shape.type, theano.sparse.SparseType)):
411                # SparseVariable don't support sum for now.
412                assert not hasattr(x1_specify_shape, 'sum')
413            else:
414                shape_grad = tensor.grad(x1_specify_shape.sum(), x1_shared)
415                shape_constant_fct_grad = theano.function([], shape_grad)
416                # theano.printing.debugprint(shape_constant_fct_grad)
417                shape_constant_fct_grad()
418
419            # Test that we can replace with values of the different shape
420            # but that will raise an error in some case, but not all
421            specify_shape_fct()
422            x1_shared.set_value(x2)
423            self.assertRaises(AssertionError, specify_shape_fct)
424
425            # No assertion will be raised as the Op is removed from the graph
426            # when their is optimization
427            if theano.config.mode not in ['FAST_COMPILE', 'DebugMode', 'DEBUG_MODE']:
428                shape_constant_fct()
429            else:
430                self.assertRaises(AssertionError, shape_constant_fct)
431
432        def test_specify_shape_partial(self):
433            dtype = self.dtype
434            if dtype is None:
435                dtype = theano.config.floatX
436
437            rng = np.random.RandomState(utt.fetch_seed())
438            x1_1 = np.asarray(rng.uniform(1, 2, [4, 2]), dtype=dtype)
439            x1_1 = self.cast_value(x1_1)
440            x1_2 = np.asarray(rng.uniform(1, 2, [4, 2]), dtype=dtype)
441            x1_2 = self.cast_value(x1_2)
442            x2 = np.asarray(rng.uniform(1, 2, [5, 2]), dtype=dtype)
443            x2 = self.cast_value(x2)
444
445            # Test that we can replace with values of the same shape
446            x1_shared = self.shared_constructor(x1_1)
447            x1_specify_shape = tensor.specify_shape(x1_shared,
448                                                    (tensor.as_tensor_variable(x1_1.shape[0]),
449                                                     x1_shared.shape[1]))
450            x1_shared.set_value(x1_2)
451            assert np.allclose(
452                    self.ref_fct(x1_shared.get_value(borrow=True)),
453                    self.ref_fct( x1_2))
454            shape_op_fct = theano.function([], x1_shared.shape)
455            topo = shape_op_fct.maker.fgraph.toposort()
456            shape_op_fct()
457            if theano.config.mode != 'FAST_COMPILE':
458                assert len(topo) == 3
459                assert isinstance(topo[0].op, tensor.opt.Shape_i)
460                assert isinstance(topo[1].op, tensor.opt.Shape_i)
461                assert isinstance(topo[2].op, tensor.opt.MakeVector)
462
463            # Test that we forward the input
464            specify_shape_fct = theano.function([], x1_specify_shape)
465            specify_shape_fct()
466            # theano.printing.debugprint(specify_shape_fct)
467            assert np.all(self.ref_fct(specify_shape_fct())
468                             == self.ref_fct(x1_2))
469            topo_specify = specify_shape_fct.maker.fgraph.toposort()
470            if theano.config.mode != 'FAST_COMPILE':
471                assert len(topo_specify) == 4
472
473            # Test that we put the shape info into the graph
474            shape_constant_fct = theano.function([], x1_specify_shape.shape)
475            # theano.printing.debugprint(shape_constant_fct)
476            assert np.all(shape_constant_fct() == shape_op_fct())
477            topo_cst = shape_constant_fct.maker.fgraph.toposort()
478            if theano.config.mode != 'FAST_COMPILE':
479                assert len(topo_cst) == 2
480
481            # Test that we can replace with values of the different shape
482            # but that will raise an error in some case, but not all
483            x1_shared.set_value(x2)
484            self.assertRaises(AssertionError, specify_shape_fct)
485
486            # No assertion will be raised as the Op is removed from the graph
487            if theano.config.mode not in ['FAST_COMPILE', 'DebugMode', 'DEBUG_MODE']:
488                shape_constant_fct()
489            else:
490                self.assertRaises(AssertionError, shape_constant_fct)
491
492        def test_specify_shape_inplace(self):
493            # test that specify_shape don't break inserting inplace op
494
495            dtype = self.dtype
496            if dtype is None:
497                dtype = theano.config.floatX
498
499            rng = np.random.RandomState(utt.fetch_seed())
500            a = np.asarray(rng.uniform(1, 2, [40, 40]), dtype=dtype)
501            a = self.cast_value(a)
502            a_shared = self.shared_constructor(a)
503            b = np.asarray(rng.uniform(1, 2, [40, 40]), dtype=dtype)
504            b = self.cast_value(b)
505            b_shared = self.shared_constructor(b)
506            s = np.zeros((40, 40), dtype=dtype)
507            s = self.cast_value(s)
508            s_shared = self.shared_constructor(s)
509            f = theano.function([],
510                                updates=[(s_shared, theano.dot(a_shared, b_shared)
511                                         + s_shared)])
512            topo = f.maker.fgraph.toposort()
513            f()
514            # [Gemm{inplace}(<TensorType(float64, matrix)>, 0.01, <TensorType(float64, matrix)>, <TensorType(float64, matrix)>, 2e-06)]
515            if theano.config.mode != 'FAST_COMPILE':
516                assert sum([node.op.__class__.__name__ in ["Gemm", "GpuGemm", "StructuredDot"] for node in topo]) == 1
517                assert all(node.op == tensor.blas.gemm_inplace for node in topo if isinstance(node.op, tensor.blas.Gemm))
518                assert all(node.op.inplace for node in topo if node.op.__class__.__name__ == "GpuGemm")
519            # Their is no inplace gemm for sparse
520            # assert all(node.op.inplace for node in topo if node.op.__class__.__name__ == "StructuredDot")
521            s_shared_specify = tensor.specify_shape(s_shared, s_shared.get_value(borrow=True).shape)
522
523            # now test with the specify shape op in the output
524            f = theano.function([], s_shared.shape,
525                                updates=[(s_shared, theano.dot(a_shared, b_shared)
526                                         + s_shared_specify)])
527            topo = f.maker.fgraph.toposort()
528            shp = f()
529            assert np.all(shp == (40, 40))
530            if theano.config.mode != 'FAST_COMPILE':
531                assert sum([node.op.__class__.__name__ in ["Gemm", "GpuGemm", "StructuredDot"] for node in topo]) == 1
532                assert all(node.op == tensor.blas.gemm_inplace for node in topo if isinstance(node.op, tensor.blas.Gemm))
533                assert all(node.op.inplace for node in topo if node.op.__class__.__name__ == "GpuGemm")
534            # now test with the specify shape op in the inputs and outputs
535            a_shared = tensor.specify_shape(a_shared,
536                    a_shared.get_value(borrow=True).shape)
537            b_shared = tensor.specify_shape(b_shared,
538                    b_shared.get_value(borrow=True).shape)
539
540            f = theano.function([], s_shared.shape,
541                                updates=[(s_shared, theano.dot(a_shared, b_shared)
542                                         + s_shared_specify)])
543            topo = f.maker.fgraph.toposort()
544            shp = f()
545            assert np.all(shp == (40, 40))
546            if theano.config.mode != 'FAST_COMPILE':
547                assert sum([node.op.__class__.__name__ in ["Gemm", "GpuGemm", "StructuredDot"] for node in topo]) == 1
548                assert all(node.op == tensor.blas.gemm_inplace for node in topo if isinstance(node.op, tensor.blas.Gemm))
549                assert all(node.op.inplace for node in topo if node.op.__class__.__name__ == "GpuGemm")
550
551        if theano.config.cycle_detection == 'fast' and expect_fail_fast_shape_inplace and theano.config.mode != 'FAST_COMPILE':
552            test_specify_shape_inplace = unittest.expectedFailure(test_specify_shape_inplace)
553
554        def test_values_eq(self):
555            # Test the type.values_eq[_approx] function
556            dtype = self.dtype
557            if dtype is None:
558                dtype = theano.config.floatX
559
560            # We need big shape as in the past there have been a bug in the
561            # sparse values_eq_approx.
562            shp = (1024, 1024)
563
564            # Test the case with all zeros element
565            rng = np.random.RandomState(utt.fetch_seed())
566            for x in [np.asarray(rng.rand(*shp), dtype=dtype),
567                      np.zeros(shp, dtype=dtype)]:
568                zeros = (x == 0).all()
569                x = self.cast_value(x)
570                x_shared = self.shared_constructor(x, borrow=True)
571
572                y = x.copy()
573                y[0, 0], y[1, 0] = y[1, 0], y[0, 0]
574                y = self.cast_value(y)
575
576                assert x_shared.type.values_eq(x, x)
577                assert x_shared.type.values_eq_approx(x, x)
578                if not zeros:
579                    assert not np.allclose(self.ref_fct(x), self.ref_fct(y))
580                    assert not x_shared.type.values_eq(x, y)
581                    assert not x_shared.type.values_eq_approx(x, y)
582
583    def f(cls):
584        return update_wrapper(SharedTester, cls, updated=())
585    return f
586
587
588@makeSharedTester(
589    shared_constructor_=tensor._shared,
590    dtype_=theano.config.floatX,
591    get_value_borrow_true_alias_=True,
592    shared_borrow_true_alias_=True,
593    set_value_borrow_true_alias_=True,
594    set_value_inplace_=False,
595    set_cast_value_inplace_=False,
596    shared_constructor_accept_ndarray_=True,
597    internal_type_=np.ndarray,
598    test_internal_type_=lambda a: isinstance(a, np.ndarray),
599    theano_fct_=lambda a: a*2,
600    ref_fct_=lambda a: np.asarray((a*2)),
601    cast_value_=np.asarray)
602class test_shared_options(object):
603    pass
604
605
606def test_scalar_shared_options():
607    # Simple test to make sure we do not loose that fonctionality.
608    theano.shared(value=0., name='lk', borrow=True)
609    theano.shared(value=np.float32(0.), name='lk', borrow=True)
610