1from abc import ABCMeta, abstractmethod, abstractproperty 2from typing import Dict as ptDict, Type as ptType 3import itertools 4import weakref 5 6import numpy as np 7 8from numba.core.utils import cached_property, add_metaclass 9 10# Types are added to a global registry (_typecache) in order to assign 11# them unique integer codes for fast matching in _dispatcher.c. 12# However, we also want types to be disposable, therefore we ensure 13# each type is interned as a weak reference, so that it lives only as 14# long as necessary to keep a stable type code. 15# NOTE: some types can still be made immortal elsewhere (for example 16# in _dispatcher.c's internal caches). 17_typecodes = itertools.count() 18 19def _autoincr(): 20 n = next(_typecodes) 21 # 4 billion types should be enough, right? 22 assert n < 2 ** 32, "Limited to 4 billion types" 23 return n 24 25_typecache: ptDict[weakref.ref, weakref.ref] = {} 26 27def _on_type_disposal(wr, _pop=_typecache.pop): 28 _pop(wr, None) 29 30 31class _TypeMetaclass(ABCMeta): 32 """ 33 A metaclass that will intern instances after they are created. 34 This is done by first creating a new instance (including calling 35 __init__, which sets up the required attributes for equality 36 and hashing), then looking it up in the _typecache registry. 37 """ 38 39 def __init__(cls, name, bases, orig_vars): 40 # __init__ is hooked to mark whether a Type class being defined is a 41 # Numba internal type (one which is defined somewhere under the `numba` 42 # module) or an external type (one which is defined elsewhere, for 43 # example a user defined type). 44 super(_TypeMetaclass, cls).__init__(name, bases, orig_vars) 45 root = (cls.__module__.split('.'))[0] 46 cls._is_internal = root == "numba" 47 48 def _intern(cls, inst): 49 # Try to intern the created instance 50 wr = weakref.ref(inst, _on_type_disposal) 51 orig = _typecache.get(wr) 52 orig = orig and orig() 53 if orig is not None: 54 return orig 55 else: 56 inst._code = _autoincr() 57 _typecache[wr] = wr 58 return inst 59 60 def __call__(cls, *args, **kwargs): 61 """ 62 Instantiate *cls* (a Type subclass, presumably) and intern it. 63 If an interned instance already exists, it is returned, otherwise 64 the new instance is returned. 65 """ 66 inst = type.__call__(cls, *args, **kwargs) 67 return cls._intern(inst) 68 69 70def _type_reconstructor(reconstructor, reconstructor_args, state): 71 """ 72 Rebuild function for unpickling types. 73 """ 74 obj = reconstructor(*reconstructor_args) 75 if state: 76 obj.__dict__.update(state) 77 return type(obj)._intern(obj) 78 79 80@add_metaclass(_TypeMetaclass) 81class Type(object): 82 """ 83 The base class for all Numba types. 84 It is essential that proper equality comparison is implemented. The 85 default implementation uses the "key" property (overridable in subclasses) 86 for both comparison and hashing, to ensure sane behaviour. 87 """ 88 89 mutable = False 90 # Rather the type is reflected at the python<->nopython boundary 91 reflected = False 92 93 def __init__(self, name): 94 self.name = name 95 96 @property 97 def key(self): 98 """ 99 A property used for __eq__, __ne__ and __hash__. Can be overridden 100 in subclasses. 101 """ 102 return self.name 103 104 @property 105 def mangling_args(self): 106 """ 107 Returns `(basename, args)` where `basename` is the name of the type 108 and `args` is a sequence of parameters of the type. 109 110 Subclass should override to specialize the behavior. 111 By default, this returns `(self.name, ())`. 112 """ 113 return self.name, () 114 115 def __repr__(self): 116 return self.name 117 118 def __hash__(self): 119 return hash(self.key) 120 121 def __eq__(self, other): 122 return self.__class__ is other.__class__ and self.key == other.key 123 124 def __ne__(self, other): 125 return not (self == other) 126 127 def __reduce__(self): 128 reconstructor, args, state = super(Type, self).__reduce__() 129 return (_type_reconstructor, (reconstructor, args, state)) 130 131 def unify(self, typingctx, other): 132 """ 133 Try to unify this type with the *other*. A third type must 134 be returned, or None if unification is not possible. 135 Only override this if the coercion logic cannot be expressed 136 as simple casting rules. 137 """ 138 return None 139 140 def can_convert_to(self, typingctx, other): 141 """ 142 Check whether this type can be converted to the *other*. 143 If successful, must return a string describing the conversion, e.g. 144 "exact", "promote", "unsafe", "safe"; otherwise None is returned. 145 """ 146 return None 147 148 def can_convert_from(self, typingctx, other): 149 """ 150 Similar to *can_convert_to*, but in reverse. Only needed if 151 the type provides conversion from other types. 152 """ 153 return None 154 155 def is_precise(self): 156 """ 157 Whether this type is precise, i.e. can be part of a successful 158 type inference. Default implementation returns True. 159 """ 160 return True 161 162 def augment(self, other): 163 """ 164 Augment this type with the *other*. Return the augmented type, 165 or None if not supported. 166 """ 167 return None 168 169 # User-facing helpers. These are not part of the core Type API but 170 # are provided so that users can write e.g. `numba.boolean(1.5)` 171 # (returns True) or `types.int32(types.int32[:])` (returns something 172 # usable as a function signature). 173 174 def __call__(self, *args): 175 from numba.core.typing import signature 176 if len(args) == 1 and not isinstance(args[0], Type): 177 return self.cast_python_value(args[0]) 178 return signature(self, # return_type 179 *args) 180 181 def __getitem__(self, args): 182 """ 183 Return an array of this type. 184 """ 185 from numba.core.types import Array 186 ndim, layout = self._determine_array_spec(args) 187 return Array(dtype=self, ndim=ndim, layout=layout) 188 189 def _determine_array_spec(self, args): 190 # XXX non-contiguous by default, even for 1d arrays, 191 # doesn't sound very intuitive 192 def validate_slice(s): 193 return isinstance(s, slice) and s.start is None and s.stop is None 194 195 if isinstance(args, (tuple, list)) and all(map(validate_slice, args)): 196 ndim = len(args) 197 if args[0].step == 1: 198 layout = 'F' 199 elif args[-1].step == 1: 200 layout = 'C' 201 else: 202 layout = 'A' 203 elif validate_slice(args): 204 ndim = 1 205 if args.step == 1: 206 layout = 'C' 207 else: 208 layout = 'A' 209 else: 210 # Raise a KeyError to not be handled by collection constructors (e.g. list). 211 raise KeyError(f"Can only index numba types with slices with no start or stop, got {args}.") 212 213 return ndim, layout 214 215 def cast_python_value(self, args): 216 raise NotImplementedError 217 218 219 @property 220 def is_internal(self): 221 """ Returns True if this class is an internally defined Numba type by 222 virtue of the module in which it is instantiated, False else.""" 223 return self._is_internal 224 225 def dump(self, tab=''): 226 print(f'{tab}DUMP {type(self).__name__}[code={self._code}, name={self.name}]') 227 228# XXX we should distinguish between Dummy (no meaningful 229# representation, e.g. None or a builtin function) and Opaque (has a 230# meaningful representation, e.g. ExternalFunctionPointer) 231 232class Dummy(Type): 233 """ 234 Base class for types that do not really have a representation and are 235 compatible with a void*. 236 """ 237 238 239class Hashable(Type): 240 """ 241 Base class for hashable types. 242 """ 243 244 245class Number(Hashable): 246 """ 247 Base class for number types. 248 """ 249 250 def unify(self, typingctx, other): 251 """ 252 Unify the two number types using Numpy's rules. 253 """ 254 from numba.np import numpy_support 255 if isinstance(other, Number): 256 # XXX: this can produce unsafe conversions, 257 # e.g. would unify {int64, uint64} to float64 258 a = numpy_support.as_dtype(self) 259 b = numpy_support.as_dtype(other) 260 sel = np.promote_types(a, b) 261 return numpy_support.from_dtype(sel) 262 263 264class Callable(Type): 265 """ 266 Base class for callables. 267 """ 268 269 @abstractmethod 270 def get_call_type(self, context, args, kws): 271 """ 272 Using the typing *context*, resolve the callable's signature for 273 the given arguments. A signature object is returned, or None. 274 """ 275 276 @abstractmethod 277 def get_call_signatures(self): 278 """ 279 Returns a tuple of (list of signatures, parameterized) 280 """ 281 282 283class DTypeSpec(Type): 284 """ 285 Base class for types usable as "dtype" arguments to various Numpy APIs 286 (e.g. np.empty()). 287 """ 288 289 @abstractproperty 290 def dtype(self): 291 """ 292 The actual dtype denoted by this dtype spec (a Type instance). 293 """ 294 295 296class IterableType(Type): 297 """ 298 Base class for iterable types. 299 """ 300 301 @abstractproperty 302 def iterator_type(self): 303 """ 304 The iterator type obtained when calling iter() (explicitly or implicitly). 305 """ 306 307 308class Sized(Type): 309 """ 310 Base class for objects that support len() 311 """ 312 313 314class ConstSized(Sized): 315 """ 316 For types that have a constant size 317 """ 318 @abstractmethod 319 def __len__(self): 320 pass 321 322 323class IteratorType(IterableType): 324 """ 325 Base class for all iterator types. 326 Derived classes should implement the *yield_type* attribute. 327 """ 328 329 def __init__(self, name, **kwargs): 330 super(IteratorType, self).__init__(name, **kwargs) 331 332 @abstractproperty 333 def yield_type(self): 334 """ 335 The type of values yielded by the iterator. 336 """ 337 338 # This is a property to avoid recursivity (for pickling) 339 340 @property 341 def iterator_type(self): 342 return self 343 344 345class Container(Sized, IterableType): 346 """ 347 Base class for container types. 348 """ 349 350 351class Sequence(Container): 352 """ 353 Base class for 1d sequence types. Instances should have the *dtype* 354 attribute. 355 """ 356 357 358class MutableSequence(Sequence): 359 """ 360 Base class for 1d mutable sequence types. Instances should have the 361 *dtype* attribute. 362 """ 363 364 365class ArrayCompatible(Type): 366 """ 367 Type class for Numpy array-compatible objects (typically, objects 368 exposing an __array__ method). 369 Derived classes should implement the *as_array* attribute. 370 """ 371 # If overridden by a subclass, it should also implement typing 372 # for '__array_wrap__' with arguments (input, formal result). 373 array_priority = 0.0 374 375 @abstractproperty 376 def as_array(self): 377 """ 378 The equivalent array type, for operations supporting array-compatible 379 objects (such as ufuncs). 380 """ 381 382 # For compatibility with types.Array 383 384 @cached_property 385 def ndim(self): 386 return self.as_array.ndim 387 388 @cached_property 389 def layout(self): 390 return self.as_array.layout 391 392 @cached_property 393 def dtype(self): 394 return self.as_array.dtype 395 396 397class Literal(Type): 398 """Base class for Literal types. 399 Literal types contain the original Python value in the type. 400 401 A literal type should always be constructed from the `literal(val)` 402 function. 403 """ 404 405 # *ctor_map* is a dictionary mapping Python types to Literal subclasses 406 # for constructing a numba type for a given Python type. 407 # It is used in `literal(val)` function. 408 # To add new Literal subclass, register a new mapping to this dict. 409 ctor_map: ptDict[type, ptType['Literal']] = {} 410 411 # *_literal_type_cache* is used to cache the numba type of the given value. 412 _literal_type_cache = None 413 414 def __init__(self, value): 415 if type(self) is Literal: 416 raise TypeError( 417 "Cannot be constructed directly. " 418 "Use `numba.types.literal(value)` instead", 419 ) 420 self._literal_init(value) 421 fmt = "Literal[{}]({})" 422 super(Literal, self).__init__(fmt.format(type(value).__name__, value)) 423 424 def _literal_init(self, value): 425 self._literal_value = value 426 # We want to support constants of non-hashable values, therefore 427 # fall back on the value's id() if necessary. 428 try: 429 hash(value) 430 except TypeError: 431 self._key = id(value) 432 else: 433 self._key = value 434 435 @property 436 def literal_value(self): 437 return self._literal_value 438 439 @property 440 def literal_type(self): 441 if self._literal_type_cache is None: 442 from numba.core import typing 443 ctx = typing.Context() 444 try: 445 res = ctx.resolve_value_type(self.literal_value) 446 except ValueError: 447 # Not all literal types have a literal_value that can be 448 # resolved to a type, for example, LiteralStrKeyDict has a 449 # literal_value that is a python dict for which there's no 450 # `typeof` support. 451 msg = "{} has no attribute 'literal_type'".format(self) 452 raise AttributeError(msg) 453 self._literal_type_cache = res 454 455 return self._literal_type_cache 456 457 458 459class TypeRef(Dummy): 460 """Reference to a type. 461 462 Used when a type is passed as a value. 463 """ 464 def __init__(self, instance_type): 465 self.instance_type = instance_type 466 super(TypeRef, self).__init__('typeref[{}]'.format(self.instance_type)) 467 468 469class InitialValue(object): 470 """ 471 Used as a mixin for a type will potentially have an initial value that will 472 be carried in the .initial_value attribute. 473 """ 474 def __init__(self, initial_value): 475 self._initial_value = initial_value 476 477 @property 478 def initial_value(self): 479 return self._initial_value 480 481 482class Poison(Type): 483 """ 484 This is the "bottom" type in the type system. It won't unify and it's 485 unliteral version is Poison of itself. It's advisable for debugging purposes 486 to call the constructor with the type that's being poisoned (for whatever 487 reason) but this isn't strictly required. 488 """ 489 def __init__(self, ty): 490 self.ty = ty 491 super(Poison, self).__init__(name="Poison<%s>" % ty) 492 493 def __unliteral__(self): 494 return Poison(self) 495 496 def unify(self, typingctx, other): 497 return None 498