1from __future__ import absolute_import, print_function, division
2from nose.plugins.skip import SkipTest
3import numpy as np
4try:
5    import scipy.sparse as sp
6    import scipy.sparse
7except ImportError:
8    pass  # The variable enable_sparse will be used to disable the test file.
9
10import theano
11from theano import sparse, config, tensor
12from theano.sparse import enable_sparse
13from theano.tests import unittest_tools as utt
14if not enable_sparse:
15    raise SkipTest('Optional package sparse disabled')
16
17from theano.sparse.tests.test_basic import random_lil
18
19
20def test_local_csm_properties_csm():
21    data = tensor.vector()
22    indices, indptr, shape = (tensor.ivector(), tensor.ivector(),
23                              tensor.ivector())
24    mode = theano.compile.mode.get_default_mode()
25    mode = mode.including("specialize", "local_csm_properties_csm")
26    for CS, cast in [(sparse.CSC, sp.csc_matrix),
27                     (sparse.CSR, sp.csr_matrix)]:
28        f = theano.function([data, indices, indptr, shape],
29                            sparse.csm_properties(
30                                CS(data, indices, indptr, shape)),
31                            mode=mode)
32        assert not any(
33            isinstance(node.op, (sparse.CSM, sparse.CSMProperties))
34            for node in f.maker.fgraph.toposort())
35        v = cast(random_lil((10, 40),
36                            config.floatX, 3))
37        f(v.data, v.indices, v.indptr, v.shape)
38
39
40def test_local_csm_grad_c():
41    raise SkipTest("Opt disabled as it don't support unsorted indices")
42    if not theano.config.cxx:
43        raise SkipTest("G++ not available, so we need to skip this test.")
44    data = tensor.vector()
45    indices, indptr, shape = (tensor.ivector(), tensor.ivector(),
46                              tensor.ivector())
47    mode = theano.compile.mode.get_default_mode()
48
49    if theano.config.mode == 'FAST_COMPILE':
50        mode = theano.compile.Mode(linker='c|py', optimizer='fast_compile')
51
52    mode = mode.including("specialize", "local_csm_grad_c")
53    for CS, cast in [(sparse.CSC, sp.csc_matrix), (sparse.CSR, sp.csr_matrix)]:
54        cost = tensor.sum(sparse.DenseFromSparse()(CS(data, indices, indptr, shape)))
55        f = theano.function(
56            [data, indices, indptr, shape],
57            tensor.grad(cost, data),
58            mode=mode)
59        assert not any(isinstance(node.op, sparse.CSMGrad) for node
60                       in f.maker.fgraph.toposort())
61        v = cast(random_lil((10, 40),
62                            config.floatX, 3))
63        f(v.data, v.indices, v.indptr, v.shape)
64
65
66def test_local_mul_s_d():
67    if not theano.config.cxx:
68        raise SkipTest("G++ not available, so we need to skip this test.")
69    mode = theano.compile.mode.get_default_mode()
70    mode = mode.including("specialize", "local_mul_s_d")
71
72    for sp_format in sparse.sparse_formats:
73        inputs = [getattr(theano.sparse, sp_format + '_matrix')(),
74                  tensor.matrix()]
75
76        f = theano.function(inputs,
77                            sparse.mul_s_d(*inputs),
78                            mode=mode)
79
80        assert not any(isinstance(node.op, sparse.MulSD) for node
81                       in f.maker.fgraph.toposort())
82
83
84def test_local_mul_s_v():
85    if not theano.config.cxx:
86        raise SkipTest("G++ not available, so we need to skip this test.")
87    mode = theano.compile.mode.get_default_mode()
88    mode = mode.including("specialize", "local_mul_s_v")
89
90    for sp_format in ['csr']:  # Not implemented for other format
91        inputs = [getattr(theano.sparse, sp_format + '_matrix')(),
92                  tensor.vector()]
93
94        f = theano.function(inputs,
95                            sparse.mul_s_v(*inputs),
96                            mode=mode)
97
98        assert not any(isinstance(node.op, sparse.MulSV) for node
99                       in f.maker.fgraph.toposort())
100
101
102def test_local_structured_add_s_v():
103    if not theano.config.cxx:
104        raise SkipTest("G++ not available, so we need to skip this test.")
105    mode = theano.compile.mode.get_default_mode()
106    mode = mode.including("specialize", "local_structured_add_s_v")
107
108    for sp_format in ['csr']:  # Not implemented for other format
109        inputs = [getattr(theano.sparse, sp_format + '_matrix')(),
110                  tensor.vector()]
111
112        f = theano.function(inputs,
113                            sparse.structured_add_s_v(*inputs),
114                            mode=mode)
115
116        assert not any(isinstance(node.op, sparse.StructuredAddSV) for node
117                       in f.maker.fgraph.toposort())
118
119
120def test_local_sampling_dot_csr():
121    if not theano.config.cxx:
122        raise SkipTest("G++ not available, so we need to skip this test.")
123    mode = theano.compile.mode.get_default_mode()
124    mode = mode.including("specialize", "local_sampling_dot_csr")
125
126    for sp_format in ['csr']:  # Not implemented for other format
127        inputs = [tensor.matrix(),
128                  tensor.matrix(),
129                  getattr(theano.sparse, sp_format + '_matrix')()]
130
131        f = theano.function(inputs,
132                            sparse.sampling_dot(*inputs),
133                            mode=mode)
134
135        if theano.config.blas.ldflags:
136            assert not any(isinstance(node.op, sparse.SamplingDot) for node
137                       in f.maker.fgraph.toposort())
138        else:
139            # SamplingDotCSR's C implementation needs blas, so it should not
140            # be inserted
141            assert not any(isinstance(node.op, sparse.opt.SamplingDotCSR) for node
142                       in f.maker.fgraph.toposort())
143
144
145def test_local_dense_from_sparse_sparse_from_dense():
146    mode = theano.compile.mode.get_default_mode()
147    mode = mode.including("local_dense_from_sparse_sparse_from_dense")
148
149    m = theano.tensor.matrix()
150    for op in [theano.sparse.csr_from_dense, theano.sparse.csc_from_dense]:
151        s = op(m)
152        o = theano.sparse.dense_from_sparse(s)
153        f = theano.function([m], o, mode=mode)
154        # We should just have a deep copy.
155        assert len(f.maker.fgraph.apply_nodes) == 1
156        f([[1, 2], [3, 4]])
157
158def test_sd_csc():
159
160    A = sp.rand(4, 5, density=0.60, format='csc', dtype=np.float32)
161    b = np.random.rand(5,2).astype(np.float32)
162    target = A*b
163
164    a_val = theano.tensor.as_tensor_variable(A.data)
165    a_ind = theano.tensor.as_tensor_variable(A.indices)
166    a_ptr = theano.tensor.as_tensor_variable(A.indptr)
167    nrows = theano.tensor.as_tensor_variable(np.int32(A.shape[0]))
168    b = theano.tensor.as_tensor_variable(b)
169
170    res = theano.sparse.opt.sd_csc(a_val, a_ind, a_ptr, nrows, b).eval()
171
172    utt.assert_allclose(res, target)
173
174