1from __future__ import absolute_import, print_function, division
2import copy
3import sys
4import numpy as np
5import theano
6from theano import tensor
7from theano.tensor.nnet import crossentropy_softmax_argmax_1hot_with_bias
8
9
10def test_bug_2009_06_02_trac_387():
11    y = tensor.lvector('y')
12    f = theano.function([y],
13            tensor.int_div(
14                tensor.DimShuffle(y[0].broadcastable, ['x'])(y[0]), 2))
15    print(f(np.ones(1, dtype='int64') * 3))
16    # XXX: there is no assert, nor comment that DEBUGMODE is to do the
17    #      checking. What was the bug, and how is it being tested?
18
19
20def test_bug_2009_07_17_borrowed_output():
21    # Regression test for a bug where output was borrowed by mistake.
22    a = theano.tensor.dmatrix()
23    b = theano.tensor.dmatrix()
24    # The output should *NOT* be borrowed.
25    g = theano.function([a, b],
26            theano.Out(theano.tensor.dot(a, b), borrow=False))
27
28    x = np.zeros((1, 2))
29    y = np.ones((2, 5))
30
31    z = g(x, y)
32    print(z)         # Should be zero.
33    x.fill(1)
34    print(g(x, y))   # Should be non-zero.
35    print(z)         # Should still be zero.
36    assert np.linalg.norm(z) == 0
37
38    # The code above was supposed to fail when it was written (or, more
39    # accurately, on the next revision, i.e. when it was merged with the
40    # rest of the code, i.e. on revision cac9c9e9f08e).
41    # However, for some reason, it does not fail anymore when at this revision.
42    # Thus, a new test (below) was added that exhibits the same issue. Note
43    # that it may better be moved into the test_nnet.py test file if it turns
44    # out the bug was caused by 'crossentropy_softmax_argmax_1hot_with_bias',
45    # and was not a more general issue.
46    test_output_activation_no_bias = theano.tensor.dmatrix()
47    test_b2 = theano.tensor.dvector()
48    test_target = theano.tensor.ivector()
49    nll_softmax_argmax = (
50            crossentropy_softmax_argmax_1hot_with_bias(
51                test_output_activation_no_bias,
52                test_b2,
53                test_target))
54    output = nll_softmax_argmax[1]
55    g = theano.function([test_output_activation_no_bias, test_b2, test_target],
56            theano.Out(output, borrow=False))
57
58    a = np.zeros((1, 5))
59    b = np.ones(5)
60    c = np.zeros(1, dtype=np.int32)
61
62    z = g(a, b, c)
63    z_backup = copy.copy(z)
64    id_z = id(z)
65    print(('Output z after first call: %s' % (z, )))
66    a[0, 0] = 1
67    id_other = id(g(a, b, c))
68    print(('Output z after second call: %s' % (z, )))
69    # Ensure that calling the function again returns a pointer towards a new
70    # array.
71    assert id_z != id_other
72    # Just to be 100% sure, ensure that z was not altered.
73    assert (z == z_backup).all()
74
75
76def test_deepcopied_type_filter():
77    a = copy.deepcopy(tensor.matrix())
78
79    # The following should run cleanly.
80    # As of commit 731e2d2fa68487733320d341d08b454a50c90d12
81    # it was failing.
82    a.type.filter(
83            np.ones((2, 2), dtype=a.dtype),
84            strict=True)
85