1#cython: language_level=3 2# This file is part of QuTiP: Quantum Toolbox in Python. 3# 4# Copyright (c) 2011 and later, Paul D. Nation and Robert J. Johansson. 5# All rights reserved. 6# 7# Redistribution and use in source and binary forms, with or without 8# modification, are permitted provided that the following conditions are 9# met: 10# 11# 1. Redistributions of source code must retain the above copyright notice, 12# this list of conditions and the following disclaimer. 13# 14# 2. Redistributions in binary form must reproduce the above copyright 15# notice, this list of conditions and the following disclaimer in the 16# documentation and/or other materials provided with the distribution. 17# 18# 3. Neither the name of the QuTiP: Quantum Toolbox in Python nor the names 19# of its contributors may be used to endorse or promote products derived 20# from this software without specific prior written permission. 21# 22# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 23# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 24# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A 25# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 26# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 27# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 28# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 29# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 30# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 31# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 32# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 33############################################################################### 34import numpy as np 35cimport numpy as cnp 36cimport cython 37from libcpp.algorithm cimport sort 38from libcpp.vector cimport vector 39cnp.import_array() 40 41include "parameters.pxi" 42 43cdef extern from "numpy/arrayobject.h" nogil: 44 void PyArray_ENABLEFLAGS(cnp.ndarray arr, int flags) 45 void PyDataMem_FREE(void * ptr) 46 void PyDataMem_RENEW(void * ptr, size_t size) 47 void PyDataMem_NEW_ZEROED(size_t size, size_t elsize) 48 void PyDataMem_NEW(size_t size) 49 50#Struct used for arg sorting 51cdef struct _int_pair: 52 int data 53 int idx 54 55ctypedef _int_pair int_pair 56ctypedef int (*cfptr)(int_pair, int_pair) 57 58@cython.boundscheck(False) 59@cython.wraparound(False) 60cdef int int_sort(int_pair x, int_pair y): 61 return x.data < y.data 62 63 64@cython.boundscheck(False) 65@cython.wraparound(False) 66cdef int * int_argsort(int * x, int nrows): 67 cdef vector[int_pair] pairs 68 cdef cfptr cfptr_ = &int_sort 69 cdef size_t kk 70 pairs.resize(nrows) 71 for kk in range(nrows): 72 pairs[kk].data = x[kk] 73 pairs[kk].idx = kk 74 75 sort(pairs.begin(),pairs.end(),cfptr_) 76 cdef int * out = <int *>PyDataMem_NEW(nrows *sizeof(int)) 77 for kk in range(nrows): 78 out[kk] = pairs[kk].idx 79 return out 80 81 82@cython.boundscheck(False) 83@cython.wraparound(False) 84cpdef int[::1] _node_degrees(int[::1] ind, int[::1] ptr, 85 unsigned int num_rows): 86 87 cdef size_t ii, jj 88 cdef int[::1] degree = np.zeros(num_rows, dtype=np.int32) 89 90 for ii in range(num_rows): 91 degree[ii] = ptr[ii + 1] - ptr[ii] 92 for jj in range(ptr[ii], ptr[ii + 1]): 93 if ind[jj] == ii: 94 # add one if the diagonal is in row ii 95 degree[ii] += 1 96 break 97 98 return degree 99 100 101@cython.boundscheck(False) 102@cython.wraparound(False) 103def _breadth_first_search( 104 cnp.ndarray[ITYPE_t, ndim=1, mode="c"] ind, 105 cnp.ndarray[ITYPE_t, ndim=1, mode="c"] ptr, 106 int num_rows, int seed): 107 """ 108 Does a breath first search (BSF) of a graph in sparse CSR format matrix 109 form starting at a given seed node. 110 """ 111 112 cdef unsigned int i, j, ii, jj, N = 1 113 cdef unsigned int level_start = 0 114 cdef unsigned int level_end = N 115 cdef unsigned int current_level = 1 116 cdef cnp.ndarray[ITYPE_t] order = -1 * np.ones(num_rows, dtype=ITYPE) 117 cdef cnp.ndarray[ITYPE_t] level = -1 * np.ones(num_rows, dtype=ITYPE) 118 119 level[seed] = 0 120 order[0] = seed 121 122 while level_start < level_end: 123 # for nodes of the last level 124 for ii in range(level_start, level_end): 125 i = order[ii] 126 # add unvisited neighbors to queue 127 for jj in range(ptr[i], ptr[i + 1]): 128 j = ind[jj] 129 if level[j] == -1: 130 order[N] = j 131 level[j] = current_level 132 N += 1 133 134 level_start = level_end 135 level_end = N 136 current_level += 1 137 138 return order, level 139 140 141@cython.boundscheck(False) 142@cython.wraparound(False) 143def _reverse_cuthill_mckee(int[::1] ind, int[::1] ptr, int num_rows): 144 """ 145 Reverse Cuthill-McKee ordering of a sparse csr or csc matrix. 146 """ 147 cdef unsigned int N = 0, N_old, seed, level_start, level_end 148 cdef unsigned int zz, i, j, ii, jj, kk, ll, level_len, temp, temp2 149 cdef cnp.ndarray[int, ndim=1] order = np.zeros(num_rows, dtype=np.int32) 150 cdef int[::1] degree = _node_degrees(ind, ptr, num_rows) 151 cdef int * inds = int_argsort(°ree[0], num_rows) 152 cdef int * rev_inds = int_argsort(inds, num_rows) 153 cdef int * temp_degrees = NULL 154 155 # loop over zz takes into account possible disconnected graph. 156 for zz in range(num_rows): 157 if inds[zz] != -1: # Do BFS with seed=inds[zz] 158 seed = inds[zz] 159 order[N] = seed 160 N += 1 161 inds[rev_inds[seed]] = -1 162 level_start = N - 1 163 level_end = N 164 165 while level_start < level_end: 166 for ii in range(level_start, level_end): 167 i = order[ii] 168 N_old = N 169 170 # add unvisited neighbors 171 for jj in range(ptr[i], ptr[i + 1]): 172 # j is node number connected to i 173 j = ind[jj] 174 if inds[rev_inds[j]] != -1: 175 inds[rev_inds[j]] = -1 176 order[N] = j 177 N += 1 178 179 # Add values to temp_degrees array for insertion sort 180 temp_degrees = <int *>PyDataMem_RENEW(temp_degrees, (N-N_old)*sizeof(int)) 181 level_len = 0 182 for kk in range(N_old, N): 183 temp_degrees[level_len] = degree[order[kk]] 184 level_len += 1 185 186 # Do insertion sort for nodes from lowest to highest degree 187 for kk in range(1, level_len): 188 temp = temp_degrees[kk] 189 temp2 = order[N_old+kk] 190 ll = kk 191 while (ll > 0) and (temp < temp_degrees[ll-1]): 192 temp_degrees[ll] = temp_degrees[ll-1] 193 order[N_old+ll] = order[N_old+ll-1] 194 ll -= 1 195 temp_degrees[ll] = temp 196 order[N_old+ll] = temp2 197 198 # set next level start and end ranges 199 level_start = level_end 200 level_end = N 201 202 if N == num_rows: 203 break 204 PyDataMem_FREE(inds) 205 PyDataMem_FREE(rev_inds) 206 PyDataMem_FREE(temp_degrees) 207 # return reversed order for RCM ordering 208 return order[::-1] 209 210 211@cython.boundscheck(False) 212@cython.wraparound(False) 213def _pseudo_peripheral_node( 214 cnp.ndarray[ITYPE_t, ndim=1, mode="c"] ind, 215 cnp.ndarray[ITYPE_t, ndim=1, mode="c"] ptr, 216 int num_rows): 217 """ 218 Find a pseudo peripheral node of a graph represented by a sparse 219 csr_matrix. 220 """ 221 222 cdef unsigned int ii, jj, delta, flag, node, start 223 cdef int maxlevel, minlevel, minlastnodesdegree 224 cdef cnp.ndarray[cnp.intp_t] lastnodes 225 cdef cnp.ndarray[cnp.intp_t] lastnodesdegree 226 cdef cnp.ndarray[cnp.intp_t] degree = np.zeros(num_rows, dtype=ITYPE) 227 228 degree = _node_degrees(ind, ptr, num_rows).astype(ITYPE) 229 start = 0 230 delta = 0 231 flag = 1 232 233 while flag: 234 # do a level-set traversal from x 235 order, level = _breadth_first_search(ind, ptr, num_rows, start) 236 237 # select node in last level with min degree 238 maxlevel = max(level) 239 lastnodes = np.where(level == maxlevel)[0] 240 lastnodesdegree = degree[lastnodes] 241 minlastnodesdegree = min(lastnodesdegree) 242 node = np.where(lastnodesdegree == minlastnodesdegree)[0][0] 243 node = lastnodes[node] 244 245 # if d(x,y) > delta, set, and do another BFS from this minimal node 246 if level[node] > delta: 247 start = node 248 delta = level[node] 249 else: 250 flag = 0 251 252 return start, order, level 253 254 255@cython.boundscheck(False) 256@cython.wraparound(False) 257def _maximum_bipartite_matching( 258 cnp.ndarray[ITYPE_t, ndim=1, mode="c"] inds, 259 cnp.ndarray[ITYPE_t, ndim=1, mode="c"] ptrs, 260 int n): 261 262 cdef cnp.ndarray[ITYPE_t] visited = np.zeros(n, dtype=ITYPE) 263 cdef cnp.ndarray[ITYPE_t] queue = np.zeros(n, dtype=ITYPE) 264 cdef cnp.ndarray[ITYPE_t] previous = np.zeros(n, dtype=ITYPE) 265 cdef cnp.ndarray[ITYPE_t] match = -1 * np.ones(n, dtype=ITYPE) 266 cdef cnp.ndarray[ITYPE_t] row_match = -1 * np.ones(n, dtype=ITYPE) 267 cdef int queue_ptr, queue_col, ptr, i, j, queue_size 268 cdef int row, col, temp, eptr, next_num = 1 269 270 for i in range(n): 271 if match[i] == -1 and (ptrs[i] != ptrs[i + 1]): 272 queue[0] = i 273 queue_ptr = 0 274 queue_size = 1 275 while (queue_size > queue_ptr): 276 queue_col = queue[queue_ptr] 277 queue_ptr += 1 278 eptr = ptrs[queue_col + 1] 279 for ptr in range(ptrs[queue_col], eptr): 280 row = inds[ptr] 281 temp = visited[row] 282 if (temp != next_num and temp != -1): 283 previous[row] = queue_col 284 visited[row] = next_num 285 col = row_match[row] 286 if (col == -1): 287 while (row != -1): 288 col = previous[row] 289 temp = match[col] 290 match[col] = row 291 row_match[row] = col 292 row = temp 293 next_num += 1 294 queue_size = 0 295 break 296 else: 297 queue[queue_size] = col 298 queue_size += 1 299 300 if match[i] == -1: 301 for j in range(1, queue_size): 302 visited[match[queue[j]]] = -1 303 304 return match 305 306 307@cython.boundscheck(False) 308@cython.wraparound(False) 309def _max_row_weights( 310 double[::1] data, 311 int[::1] inds, 312 int[::1] ptrs, 313 int ncols): 314 """ 315 Finds the largest abs value in each matrix column 316 and the max. total number of elements in the cols (given by weights[-1]). 317 318 Here we assume that the user already took the ABS value of the data. 319 This keeps us from having to call abs over and over. 320 321 """ 322 cdef cnp.ndarray[DTYPE_t] weights = np.zeros(ncols + 1, dtype=DTYPE) 323 cdef int ln, mx, ii, jj 324 cdef DTYPE_t weight, current 325 326 mx = 0 327 for jj in range(ncols): 328 ln = (ptrs[jj + 1] - ptrs[jj]) 329 if ln > mx: 330 mx = ln 331 332 weight = data[ptrs[jj]] 333 for ii in range(ptrs[jj] + 1, ptrs[jj + 1]): 334 current = data[ii] 335 if current > weight: 336 weight = current 337 338 weights[jj] = weight 339 340 weights[ncols] = mx 341 return weights 342 343 344@cython.boundscheck(False) 345@cython.wraparound(False) 346def _weighted_bipartite_matching( 347 double[::1] data, 348 int[::1] inds, 349 int[::1] ptrs, 350 int n): 351 """ 352 Here we assume that the user already took the ABS value of the data. 353 This keeps us from having to call abs over and over. 354 """ 355 356 cdef cnp.ndarray[ITYPE_t] visited = np.zeros(n, dtype=ITYPE) 357 cdef cnp.ndarray[ITYPE_t] queue = np.zeros(n, dtype=ITYPE) 358 cdef cnp.ndarray[ITYPE_t] previous = np.zeros(n, dtype=ITYPE) 359 cdef cnp.ndarray[ITYPE_t] match = -1 * np.ones(n, dtype=ITYPE) 360 cdef cnp.ndarray[ITYPE_t] row_match = -1 * np.ones(n, dtype=ITYPE) 361 cdef cnp.ndarray[DTYPE_t] weights = _max_row_weights(data, inds, ptrs, n) 362 cdef cnp.ndarray[ITYPE_t] order = np.argsort(-weights[0:n]).astype(ITYPE) 363 cdef cnp.ndarray[ITYPE_t] row_order = np.zeros(int(weights[n]), dtype=ITYPE) 364 cdef cnp.ndarray[DTYPE_t] temp_weights = np.zeros(int(weights[n]), dtype=DTYPE) 365 cdef int queue_ptr, queue_col, queue_size, next_num 366 cdef int i, j, zz, ll, kk, row, col, temp, eptr, temp2 367 368 next_num = 1 369 for i in range(n): 370 zz = order[i] # cols with largest abs values first 371 if (match[zz] == -1 and (ptrs[zz] != ptrs[zz + 1])): 372 queue[0] = zz 373 queue_ptr = 0 374 queue_size = 1 375 376 while (queue_size > queue_ptr): 377 queue_col = queue[queue_ptr] 378 queue_ptr += 1 379 eptr = ptrs[queue_col + 1] 380 381 # get row inds in current column 382 temp = ptrs[queue_col] 383 for kk in range(eptr - ptrs[queue_col]): 384 row_order[kk] = inds[temp] 385 temp_weights[kk] = data[temp] 386 temp += 1 387 388 # linear sort by row weight 389 for kk in range(1, (eptr - ptrs[queue_col])): 390 val = temp_weights[kk] 391 row_val = row_order[kk] 392 ll = kk - 1 393 while (ll >= 0) and (temp_weights[ll] > val): 394 temp_weights[ll + 1] = temp_weights[ll] 395 row_order[ll + 1] = row_order[ll] 396 ll -= 1 397 398 temp_weights[ll + 1] = val 399 row_order[ll + 1] = row_val 400 401 # go through rows by decending weight 402 temp2 = (eptr - ptrs[queue_col]) - 1 403 for kk in range(eptr - ptrs[queue_col]): 404 row = row_order[temp2 - kk] 405 temp = visited[row] 406 if temp != next_num and temp != -1: 407 previous[row] = queue_col 408 visited[row] = next_num 409 col = row_match[row] 410 if col == -1: 411 while row != -1: 412 col = previous[row] 413 temp = match[col] 414 match[col] = row 415 row_match[row] = col 416 row = temp 417 418 next_num += 1 419 queue_size = 0 420 break 421 else: 422 queue[queue_size] = col 423 queue_size += 1 424 425 if match[zz] == -1: 426 for j in range(1, queue_size): 427 visited[match[queue[j]]] = -1 428 429 return match 430