1
2 /* Tuple object implementation */
3
4 #include "Python.h"
5 #include "pycore_abstract.h" // _PyIndex_Check()
6 #include "pycore_gc.h" // _PyObject_GC_IS_TRACKED()
7 #include "pycore_initconfig.h" // _PyStatus_OK()
8 #include "pycore_object.h" // _PyObject_GC_TRACK()
9 #include "pycore_pyerrors.h" // _Py_FatalRefcountError()
10
11 /*[clinic input]
12 class tuple "PyTupleObject *" "&PyTuple_Type"
13 [clinic start generated code]*/
14 /*[clinic end generated code: output=da39a3ee5e6b4b0d input=f051ba3cfdf9a189]*/
15
16 #include "clinic/tupleobject.c.h"
17
18
19 #if PyTuple_MAXSAVESIZE > 0
20 static struct _Py_tuple_state *
get_tuple_state(void)21 get_tuple_state(void)
22 {
23 PyInterpreterState *interp = _PyInterpreterState_GET();
24 return &interp->tuple;
25 }
26 #endif
27
28
29 /* Print summary info about the state of the optimized allocator */
30 void
_PyTuple_DebugMallocStats(FILE * out)31 _PyTuple_DebugMallocStats(FILE *out)
32 {
33 #if PyTuple_MAXSAVESIZE > 0
34 struct _Py_tuple_state *state = get_tuple_state();
35 for (int i = 1; i < PyTuple_MAXSAVESIZE; i++) {
36 char buf[128];
37 PyOS_snprintf(buf, sizeof(buf),
38 "free %d-sized PyTupleObject", i);
39 _PyDebugAllocatorStats(out, buf, state->numfree[i],
40 _PyObject_VAR_SIZE(&PyTuple_Type, i));
41 }
42 #endif
43 }
44
45 /* Allocate an uninitialized tuple object. Before making it public, following
46 steps must be done:
47
48 - Initialize its items.
49 - Call _PyObject_GC_TRACK() on it.
50
51 Because the empty tuple is always reused and it's already tracked by GC,
52 this function must not be called with size == 0 (unless from PyTuple_New()
53 which wraps this function).
54 */
55 static PyTupleObject *
tuple_alloc(Py_ssize_t size)56 tuple_alloc(Py_ssize_t size)
57 {
58 PyTupleObject *op;
59 #if PyTuple_MAXSAVESIZE > 0
60 // If Python is built with the empty tuple singleton,
61 // tuple_alloc(0) must not be called.
62 assert(size != 0);
63 #endif
64 if (size < 0) {
65 PyErr_BadInternalCall();
66 return NULL;
67 }
68
69 // Check for max save size > 1. Empty tuple singleton is special case.
70 #if PyTuple_MAXSAVESIZE > 1
71 struct _Py_tuple_state *state = get_tuple_state();
72 #ifdef Py_DEBUG
73 // tuple_alloc() must not be called after _PyTuple_Fini()
74 assert(state->numfree[0] != -1);
75 #endif
76 if (size < PyTuple_MAXSAVESIZE && (op = state->free_list[size]) != NULL) {
77 assert(size != 0);
78 state->free_list[size] = (PyTupleObject *) op->ob_item[0];
79 state->numfree[size]--;
80 /* Inlined _PyObject_InitVar() without _PyType_HasFeature() test */
81 #ifdef Py_TRACE_REFS
82 Py_SET_SIZE(op, size);
83 Py_SET_TYPE(op, &PyTuple_Type);
84 #endif
85 _Py_NewReference((PyObject *)op);
86 }
87 else
88 #endif
89 {
90 /* Check for overflow */
91 if ((size_t)size > ((size_t)PY_SSIZE_T_MAX - (sizeof(PyTupleObject) -
92 sizeof(PyObject *))) / sizeof(PyObject *)) {
93 return (PyTupleObject *)PyErr_NoMemory();
94 }
95 op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size);
96 if (op == NULL)
97 return NULL;
98 }
99 return op;
100 }
101
102 static int
tuple_create_empty_tuple_singleton(struct _Py_tuple_state * state)103 tuple_create_empty_tuple_singleton(struct _Py_tuple_state *state)
104 {
105 #if PyTuple_MAXSAVESIZE > 0
106 assert(state->free_list[0] == NULL);
107
108 PyTupleObject *op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, 0);
109 if (op == NULL) {
110 return -1;
111 }
112 // The empty tuple singleton is not tracked by the GC.
113 // It does not contain any Python object.
114
115 state->free_list[0] = op;
116 state->numfree[0]++;
117
118 assert(state->numfree[0] == 1);
119 #endif
120 return 0;
121 }
122
123
124 static PyObject *
tuple_get_empty(void)125 tuple_get_empty(void)
126 {
127 #if PyTuple_MAXSAVESIZE > 0
128 struct _Py_tuple_state *state = get_tuple_state();
129 PyTupleObject *op = state->free_list[0];
130 // tuple_get_empty() must not be called before _PyTuple_Init()
131 // or after _PyTuple_Fini()
132 assert(op != NULL);
133 #ifdef Py_DEBUG
134 assert(state->numfree[0] != -1);
135 #endif
136
137 Py_INCREF(op);
138 return (PyObject *) op;
139 #else
140 return PyTuple_New(0);
141 #endif
142 }
143
144
145 PyObject *
PyTuple_New(Py_ssize_t size)146 PyTuple_New(Py_ssize_t size)
147 {
148 PyTupleObject *op;
149 #if PyTuple_MAXSAVESIZE > 0
150 if (size == 0) {
151 return tuple_get_empty();
152 }
153 #endif
154 op = tuple_alloc(size);
155 if (op == NULL) {
156 return NULL;
157 }
158 for (Py_ssize_t i = 0; i < size; i++) {
159 op->ob_item[i] = NULL;
160 }
161 _PyObject_GC_TRACK(op);
162 return (PyObject *) op;
163 }
164
165 Py_ssize_t
PyTuple_Size(PyObject * op)166 PyTuple_Size(PyObject *op)
167 {
168 if (!PyTuple_Check(op)) {
169 PyErr_BadInternalCall();
170 return -1;
171 }
172 else
173 return Py_SIZE(op);
174 }
175
176 PyObject *
PyTuple_GetItem(PyObject * op,Py_ssize_t i)177 PyTuple_GetItem(PyObject *op, Py_ssize_t i)
178 {
179 if (!PyTuple_Check(op)) {
180 PyErr_BadInternalCall();
181 return NULL;
182 }
183 if (i < 0 || i >= Py_SIZE(op)) {
184 PyErr_SetString(PyExc_IndexError, "tuple index out of range");
185 return NULL;
186 }
187 return ((PyTupleObject *)op) -> ob_item[i];
188 }
189
190 int
PyTuple_SetItem(PyObject * op,Py_ssize_t i,PyObject * newitem)191 PyTuple_SetItem(PyObject *op, Py_ssize_t i, PyObject *newitem)
192 {
193 PyObject **p;
194 if (!PyTuple_Check(op) || Py_REFCNT(op) != 1) {
195 Py_XDECREF(newitem);
196 PyErr_BadInternalCall();
197 return -1;
198 }
199 if (i < 0 || i >= Py_SIZE(op)) {
200 Py_XDECREF(newitem);
201 PyErr_SetString(PyExc_IndexError,
202 "tuple assignment index out of range");
203 return -1;
204 }
205 p = ((PyTupleObject *)op) -> ob_item + i;
206 Py_XSETREF(*p, newitem);
207 return 0;
208 }
209
210 void
_PyTuple_MaybeUntrack(PyObject * op)211 _PyTuple_MaybeUntrack(PyObject *op)
212 {
213 PyTupleObject *t;
214 Py_ssize_t i, n;
215
216 if (!PyTuple_CheckExact(op) || !_PyObject_GC_IS_TRACKED(op))
217 return;
218 t = (PyTupleObject *) op;
219 n = Py_SIZE(t);
220 for (i = 0; i < n; i++) {
221 PyObject *elt = PyTuple_GET_ITEM(t, i);
222 /* Tuple with NULL elements aren't
223 fully constructed, don't untrack
224 them yet. */
225 if (!elt ||
226 _PyObject_GC_MAY_BE_TRACKED(elt))
227 return;
228 }
229 _PyObject_GC_UNTRACK(op);
230 }
231
232 PyObject *
PyTuple_Pack(Py_ssize_t n,...)233 PyTuple_Pack(Py_ssize_t n, ...)
234 {
235 Py_ssize_t i;
236 PyObject *o;
237 PyObject **items;
238 va_list vargs;
239
240 if (n == 0) {
241 return tuple_get_empty();
242 }
243
244 va_start(vargs, n);
245 PyTupleObject *result = tuple_alloc(n);
246 if (result == NULL) {
247 va_end(vargs);
248 return NULL;
249 }
250 items = result->ob_item;
251 for (i = 0; i < n; i++) {
252 o = va_arg(vargs, PyObject *);
253 Py_INCREF(o);
254 items[i] = o;
255 }
256 va_end(vargs);
257 _PyObject_GC_TRACK(result);
258 return (PyObject *)result;
259 }
260
261
262 /* Methods */
263
264 static void
tupledealloc(PyTupleObject * op)265 tupledealloc(PyTupleObject *op)
266 {
267 Py_ssize_t len = Py_SIZE(op);
268 PyObject_GC_UnTrack(op);
269 Py_TRASHCAN_BEGIN(op, tupledealloc)
270 if (len > 0) {
271 Py_ssize_t i = len;
272 while (--i >= 0) {
273 Py_XDECREF(op->ob_item[i]);
274 }
275 #if PyTuple_MAXSAVESIZE > 0
276 struct _Py_tuple_state *state = get_tuple_state();
277 #ifdef Py_DEBUG
278 // tupledealloc() must not be called after _PyTuple_Fini()
279 assert(state->numfree[0] != -1);
280 #endif
281 if (len < PyTuple_MAXSAVESIZE
282 && state->numfree[len] < PyTuple_MAXFREELIST
283 && Py_IS_TYPE(op, &PyTuple_Type))
284 {
285 op->ob_item[0] = (PyObject *) state->free_list[len];
286 state->numfree[len]++;
287 state->free_list[len] = op;
288 goto done; /* return */
289 }
290 #endif
291 }
292 #if defined(Py_DEBUG) && PyTuple_MAXSAVESIZE > 0
293 else {
294 assert(len == 0);
295 struct _Py_tuple_state *state = get_tuple_state();
296 // The empty tuple singleton must only be deallocated by
297 // _PyTuple_Fini(): not before, not after
298 if (op == state->free_list[0] && state->numfree[0] != 0) {
299 _Py_FatalRefcountError("deallocating the empty tuple singleton");
300 }
301 }
302 #endif
303 Py_TYPE(op)->tp_free((PyObject *)op);
304
305 #if PyTuple_MAXSAVESIZE > 0
306 done:
307 #endif
308 Py_TRASHCAN_END
309 }
310
311 static PyObject *
tuplerepr(PyTupleObject * v)312 tuplerepr(PyTupleObject *v)
313 {
314 Py_ssize_t i, n;
315 _PyUnicodeWriter writer;
316
317 n = Py_SIZE(v);
318 if (n == 0)
319 return PyUnicode_FromString("()");
320
321 /* While not mutable, it is still possible to end up with a cycle in a
322 tuple through an object that stores itself within a tuple (and thus
323 infinitely asks for the repr of itself). This should only be
324 possible within a type. */
325 i = Py_ReprEnter((PyObject *)v);
326 if (i != 0) {
327 return i > 0 ? PyUnicode_FromString("(...)") : NULL;
328 }
329
330 _PyUnicodeWriter_Init(&writer);
331 writer.overallocate = 1;
332 if (Py_SIZE(v) > 1) {
333 /* "(" + "1" + ", 2" * (len - 1) + ")" */
334 writer.min_length = 1 + 1 + (2 + 1) * (Py_SIZE(v) - 1) + 1;
335 }
336 else {
337 /* "(1,)" */
338 writer.min_length = 4;
339 }
340
341 if (_PyUnicodeWriter_WriteChar(&writer, '(') < 0)
342 goto error;
343
344 /* Do repr() on each element. */
345 for (i = 0; i < n; ++i) {
346 PyObject *s;
347
348 if (i > 0) {
349 if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0)
350 goto error;
351 }
352
353 s = PyObject_Repr(v->ob_item[i]);
354 if (s == NULL)
355 goto error;
356
357 if (_PyUnicodeWriter_WriteStr(&writer, s) < 0) {
358 Py_DECREF(s);
359 goto error;
360 }
361 Py_DECREF(s);
362 }
363
364 writer.overallocate = 0;
365 if (n > 1) {
366 if (_PyUnicodeWriter_WriteChar(&writer, ')') < 0)
367 goto error;
368 }
369 else {
370 if (_PyUnicodeWriter_WriteASCIIString(&writer, ",)", 2) < 0)
371 goto error;
372 }
373
374 Py_ReprLeave((PyObject *)v);
375 return _PyUnicodeWriter_Finish(&writer);
376
377 error:
378 _PyUnicodeWriter_Dealloc(&writer);
379 Py_ReprLeave((PyObject *)v);
380 return NULL;
381 }
382
383
384 /* Hash for tuples. This is a slightly simplified version of the xxHash
385 non-cryptographic hash:
386 - we do not use any parallellism, there is only 1 accumulator.
387 - we drop the final mixing since this is just a permutation of the
388 output space: it does not help against collisions.
389 - at the end, we mangle the length with a single constant.
390 For the xxHash specification, see
391 https://github.com/Cyan4973/xxHash/blob/master/doc/xxhash_spec.md
392
393 Below are the official constants from the xxHash specification. Optimizing
394 compilers should emit a single "rotate" instruction for the
395 _PyHASH_XXROTATE() expansion. If that doesn't happen for some important
396 platform, the macro could be changed to expand to a platform-specific rotate
397 spelling instead.
398 */
399 #if SIZEOF_PY_UHASH_T > 4
400 #define _PyHASH_XXPRIME_1 ((Py_uhash_t)11400714785074694791ULL)
401 #define _PyHASH_XXPRIME_2 ((Py_uhash_t)14029467366897019727ULL)
402 #define _PyHASH_XXPRIME_5 ((Py_uhash_t)2870177450012600261ULL)
403 #define _PyHASH_XXROTATE(x) ((x << 31) | (x >> 33)) /* Rotate left 31 bits */
404 #else
405 #define _PyHASH_XXPRIME_1 ((Py_uhash_t)2654435761UL)
406 #define _PyHASH_XXPRIME_2 ((Py_uhash_t)2246822519UL)
407 #define _PyHASH_XXPRIME_5 ((Py_uhash_t)374761393UL)
408 #define _PyHASH_XXROTATE(x) ((x << 13) | (x >> 19)) /* Rotate left 13 bits */
409 #endif
410
411 /* Tests have shown that it's not worth to cache the hash value, see
412 https://bugs.python.org/issue9685 */
413 static Py_hash_t
tuplehash(PyTupleObject * v)414 tuplehash(PyTupleObject *v)
415 {
416 Py_ssize_t i, len = Py_SIZE(v);
417 PyObject **item = v->ob_item;
418
419 Py_uhash_t acc = _PyHASH_XXPRIME_5;
420 for (i = 0; i < len; i++) {
421 Py_uhash_t lane = PyObject_Hash(item[i]);
422 if (lane == (Py_uhash_t)-1) {
423 return -1;
424 }
425 acc += lane * _PyHASH_XXPRIME_2;
426 acc = _PyHASH_XXROTATE(acc);
427 acc *= _PyHASH_XXPRIME_1;
428 }
429
430 /* Add input length, mangled to keep the historical value of hash(()). */
431 acc += len ^ (_PyHASH_XXPRIME_5 ^ 3527539UL);
432
433 if (acc == (Py_uhash_t)-1) {
434 return 1546275796;
435 }
436 return acc;
437 }
438
439 static Py_ssize_t
tuplelength(PyTupleObject * a)440 tuplelength(PyTupleObject *a)
441 {
442 return Py_SIZE(a);
443 }
444
445 static int
tuplecontains(PyTupleObject * a,PyObject * el)446 tuplecontains(PyTupleObject *a, PyObject *el)
447 {
448 Py_ssize_t i;
449 int cmp;
450
451 for (i = 0, cmp = 0 ; cmp == 0 && i < Py_SIZE(a); ++i)
452 cmp = PyObject_RichCompareBool(PyTuple_GET_ITEM(a, i), el, Py_EQ);
453 return cmp;
454 }
455
456 static PyObject *
tupleitem(PyTupleObject * a,Py_ssize_t i)457 tupleitem(PyTupleObject *a, Py_ssize_t i)
458 {
459 if (i < 0 || i >= Py_SIZE(a)) {
460 PyErr_SetString(PyExc_IndexError, "tuple index out of range");
461 return NULL;
462 }
463 Py_INCREF(a->ob_item[i]);
464 return a->ob_item[i];
465 }
466
467 PyObject *
_PyTuple_FromArray(PyObject * const * src,Py_ssize_t n)468 _PyTuple_FromArray(PyObject *const *src, Py_ssize_t n)
469 {
470 if (n == 0) {
471 return tuple_get_empty();
472 }
473
474 PyTupleObject *tuple = tuple_alloc(n);
475 if (tuple == NULL) {
476 return NULL;
477 }
478 PyObject **dst = tuple->ob_item;
479 for (Py_ssize_t i = 0; i < n; i++) {
480 PyObject *item = src[i];
481 Py_INCREF(item);
482 dst[i] = item;
483 }
484 _PyObject_GC_TRACK(tuple);
485 return (PyObject *)tuple;
486 }
487
488 PyObject *
_PyTuple_FromArraySteal(PyObject * const * src,Py_ssize_t n)489 _PyTuple_FromArraySteal(PyObject *const *src, Py_ssize_t n)
490 {
491 if (n == 0) {
492 return tuple_get_empty();
493 }
494 PyTupleObject *tuple = tuple_alloc(n);
495 if (tuple == NULL) {
496 for (Py_ssize_t i = 0; i < n; i++) {
497 Py_DECREF(src[i]);
498 }
499 return NULL;
500 }
501 PyObject **dst = tuple->ob_item;
502 for (Py_ssize_t i = 0; i < n; i++) {
503 PyObject *item = src[i];
504 dst[i] = item;
505 }
506 _PyObject_GC_TRACK(tuple);
507 return (PyObject *)tuple;
508 }
509
510 static PyObject *
tupleslice(PyTupleObject * a,Py_ssize_t ilow,Py_ssize_t ihigh)511 tupleslice(PyTupleObject *a, Py_ssize_t ilow,
512 Py_ssize_t ihigh)
513 {
514 if (ilow < 0)
515 ilow = 0;
516 if (ihigh > Py_SIZE(a))
517 ihigh = Py_SIZE(a);
518 if (ihigh < ilow)
519 ihigh = ilow;
520 if (ilow == 0 && ihigh == Py_SIZE(a) && PyTuple_CheckExact(a)) {
521 Py_INCREF(a);
522 return (PyObject *)a;
523 }
524 return _PyTuple_FromArray(a->ob_item + ilow, ihigh - ilow);
525 }
526
527 PyObject *
PyTuple_GetSlice(PyObject * op,Py_ssize_t i,Py_ssize_t j)528 PyTuple_GetSlice(PyObject *op, Py_ssize_t i, Py_ssize_t j)
529 {
530 if (op == NULL || !PyTuple_Check(op)) {
531 PyErr_BadInternalCall();
532 return NULL;
533 }
534 return tupleslice((PyTupleObject *)op, i, j);
535 }
536
537 static PyObject *
tupleconcat(PyTupleObject * a,PyObject * bb)538 tupleconcat(PyTupleObject *a, PyObject *bb)
539 {
540 Py_ssize_t size;
541 Py_ssize_t i;
542 PyObject **src, **dest;
543 PyTupleObject *np;
544 if (Py_SIZE(a) == 0 && PyTuple_CheckExact(bb)) {
545 Py_INCREF(bb);
546 return bb;
547 }
548 if (!PyTuple_Check(bb)) {
549 PyErr_Format(PyExc_TypeError,
550 "can only concatenate tuple (not \"%.200s\") to tuple",
551 Py_TYPE(bb)->tp_name);
552 return NULL;
553 }
554 PyTupleObject *b = (PyTupleObject *)bb;
555
556 if (Py_SIZE(b) == 0 && PyTuple_CheckExact(a)) {
557 Py_INCREF(a);
558 return (PyObject *)a;
559 }
560 assert((size_t)Py_SIZE(a) + (size_t)Py_SIZE(b) < PY_SSIZE_T_MAX);
561 size = Py_SIZE(a) + Py_SIZE(b);
562 if (size == 0) {
563 return tuple_get_empty();
564 }
565
566 np = tuple_alloc(size);
567 if (np == NULL) {
568 return NULL;
569 }
570 src = a->ob_item;
571 dest = np->ob_item;
572 for (i = 0; i < Py_SIZE(a); i++) {
573 PyObject *v = src[i];
574 Py_INCREF(v);
575 dest[i] = v;
576 }
577 src = b->ob_item;
578 dest = np->ob_item + Py_SIZE(a);
579 for (i = 0; i < Py_SIZE(b); i++) {
580 PyObject *v = src[i];
581 Py_INCREF(v);
582 dest[i] = v;
583 }
584 _PyObject_GC_TRACK(np);
585 return (PyObject *)np;
586 }
587
588 static PyObject *
tuplerepeat(PyTupleObject * a,Py_ssize_t n)589 tuplerepeat(PyTupleObject *a, Py_ssize_t n)
590 {
591 Py_ssize_t i, j;
592 Py_ssize_t size;
593 PyTupleObject *np;
594 PyObject **p, **items;
595 if (Py_SIZE(a) == 0 || n == 1) {
596 if (PyTuple_CheckExact(a)) {
597 /* Since tuples are immutable, we can return a shared
598 copy in this case */
599 Py_INCREF(a);
600 return (PyObject *)a;
601 }
602 }
603 if (Py_SIZE(a) == 0 || n <= 0) {
604 return tuple_get_empty();
605 }
606 if (n > PY_SSIZE_T_MAX / Py_SIZE(a))
607 return PyErr_NoMemory();
608 size = Py_SIZE(a) * n;
609 np = tuple_alloc(size);
610 if (np == NULL)
611 return NULL;
612 p = np->ob_item;
613 items = a->ob_item;
614 for (i = 0; i < n; i++) {
615 for (j = 0; j < Py_SIZE(a); j++) {
616 *p = items[j];
617 Py_INCREF(*p);
618 p++;
619 }
620 }
621 _PyObject_GC_TRACK(np);
622 return (PyObject *) np;
623 }
624
625 /*[clinic input]
626 tuple.index
627
628 value: object
629 start: slice_index(accept={int}) = 0
630 stop: slice_index(accept={int}, c_default="PY_SSIZE_T_MAX") = sys.maxsize
631 /
632
633 Return first index of value.
634
635 Raises ValueError if the value is not present.
636 [clinic start generated code]*/
637
638 static PyObject *
tuple_index_impl(PyTupleObject * self,PyObject * value,Py_ssize_t start,Py_ssize_t stop)639 tuple_index_impl(PyTupleObject *self, PyObject *value, Py_ssize_t start,
640 Py_ssize_t stop)
641 /*[clinic end generated code: output=07b6f9f3cb5c33eb input=fb39e9874a21fe3f]*/
642 {
643 Py_ssize_t i;
644
645 if (start < 0) {
646 start += Py_SIZE(self);
647 if (start < 0)
648 start = 0;
649 }
650 if (stop < 0) {
651 stop += Py_SIZE(self);
652 }
653 else if (stop > Py_SIZE(self)) {
654 stop = Py_SIZE(self);
655 }
656 for (i = start; i < stop; i++) {
657 int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
658 if (cmp > 0)
659 return PyLong_FromSsize_t(i);
660 else if (cmp < 0)
661 return NULL;
662 }
663 PyErr_SetString(PyExc_ValueError, "tuple.index(x): x not in tuple");
664 return NULL;
665 }
666
667 /*[clinic input]
668 tuple.count
669
670 value: object
671 /
672
673 Return number of occurrences of value.
674 [clinic start generated code]*/
675
676 static PyObject *
tuple_count(PyTupleObject * self,PyObject * value)677 tuple_count(PyTupleObject *self, PyObject *value)
678 /*[clinic end generated code: output=aa927affc5a97605 input=531721aff65bd772]*/
679 {
680 Py_ssize_t count = 0;
681 Py_ssize_t i;
682
683 for (i = 0; i < Py_SIZE(self); i++) {
684 int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
685 if (cmp > 0)
686 count++;
687 else if (cmp < 0)
688 return NULL;
689 }
690 return PyLong_FromSsize_t(count);
691 }
692
693 static int
tupletraverse(PyTupleObject * o,visitproc visit,void * arg)694 tupletraverse(PyTupleObject *o, visitproc visit, void *arg)
695 {
696 Py_ssize_t i;
697
698 for (i = Py_SIZE(o); --i >= 0; )
699 Py_VISIT(o->ob_item[i]);
700 return 0;
701 }
702
703 static PyObject *
tuplerichcompare(PyObject * v,PyObject * w,int op)704 tuplerichcompare(PyObject *v, PyObject *w, int op)
705 {
706 PyTupleObject *vt, *wt;
707 Py_ssize_t i;
708 Py_ssize_t vlen, wlen;
709
710 if (!PyTuple_Check(v) || !PyTuple_Check(w))
711 Py_RETURN_NOTIMPLEMENTED;
712
713 vt = (PyTupleObject *)v;
714 wt = (PyTupleObject *)w;
715
716 vlen = Py_SIZE(vt);
717 wlen = Py_SIZE(wt);
718
719 /* Note: the corresponding code for lists has an "early out" test
720 * here when op is EQ or NE and the lengths differ. That pays there,
721 * but Tim was unable to find any real code where EQ/NE tuple
722 * compares don't have the same length, so testing for it here would
723 * have cost without benefit.
724 */
725
726 /* Search for the first index where items are different.
727 * Note that because tuples are immutable, it's safe to reuse
728 * vlen and wlen across the comparison calls.
729 */
730 for (i = 0; i < vlen && i < wlen; i++) {
731 int k = PyObject_RichCompareBool(vt->ob_item[i],
732 wt->ob_item[i], Py_EQ);
733 if (k < 0)
734 return NULL;
735 if (!k)
736 break;
737 }
738
739 if (i >= vlen || i >= wlen) {
740 /* No more items to compare -- compare sizes */
741 Py_RETURN_RICHCOMPARE(vlen, wlen, op);
742 }
743
744 /* We have an item that differs -- shortcuts for EQ/NE */
745 if (op == Py_EQ) {
746 Py_RETURN_FALSE;
747 }
748 if (op == Py_NE) {
749 Py_RETURN_TRUE;
750 }
751
752 /* Compare the final item again using the proper operator */
753 return PyObject_RichCompare(vt->ob_item[i], wt->ob_item[i], op);
754 }
755
756 static PyObject *
757 tuple_subtype_new(PyTypeObject *type, PyObject *iterable);
758
759 /*[clinic input]
760 @classmethod
761 tuple.__new__ as tuple_new
762 iterable: object(c_default="NULL") = ()
763 /
764
765 Built-in immutable sequence.
766
767 If no argument is given, the constructor returns an empty tuple.
768 If iterable is specified the tuple is initialized from iterable's items.
769
770 If the argument is a tuple, the return value is the same object.
771 [clinic start generated code]*/
772
773 static PyObject *
tuple_new_impl(PyTypeObject * type,PyObject * iterable)774 tuple_new_impl(PyTypeObject *type, PyObject *iterable)
775 /*[clinic end generated code: output=4546d9f0d469bce7 input=86963bcde633b5a2]*/
776 {
777 if (type != &PyTuple_Type)
778 return tuple_subtype_new(type, iterable);
779
780 if (iterable == NULL) {
781 return tuple_get_empty();
782 }
783 else {
784 return PySequence_Tuple(iterable);
785 }
786 }
787
788 static PyObject *
tuple_vectorcall(PyObject * type,PyObject * const * args,size_t nargsf,PyObject * kwnames)789 tuple_vectorcall(PyObject *type, PyObject * const*args,
790 size_t nargsf, PyObject *kwnames)
791 {
792 if (!_PyArg_NoKwnames("tuple", kwnames)) {
793 return NULL;
794 }
795
796 Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
797 if (!_PyArg_CheckPositional("tuple", nargs, 0, 1)) {
798 return NULL;
799 }
800
801 if (nargs) {
802 return tuple_new_impl((PyTypeObject *)type, args[0]);
803 }
804 else {
805 return tuple_get_empty();
806 }
807 }
808
809 static PyObject *
tuple_subtype_new(PyTypeObject * type,PyObject * iterable)810 tuple_subtype_new(PyTypeObject *type, PyObject *iterable)
811 {
812 PyObject *tmp, *newobj, *item;
813 Py_ssize_t i, n;
814
815 assert(PyType_IsSubtype(type, &PyTuple_Type));
816 // tuple subclasses must implement the GC protocol
817 assert(_PyType_IS_GC(type));
818
819 tmp = tuple_new_impl(&PyTuple_Type, iterable);
820 if (tmp == NULL)
821 return NULL;
822 assert(PyTuple_Check(tmp));
823 newobj = type->tp_alloc(type, n = PyTuple_GET_SIZE(tmp));
824 if (newobj == NULL) {
825 Py_DECREF(tmp);
826 return NULL;
827 }
828 for (i = 0; i < n; i++) {
829 item = PyTuple_GET_ITEM(tmp, i);
830 Py_INCREF(item);
831 PyTuple_SET_ITEM(newobj, i, item);
832 }
833 Py_DECREF(tmp);
834
835 // Don't track if a subclass tp_alloc is PyType_GenericAlloc()
836 if (!_PyObject_GC_IS_TRACKED(newobj)) {
837 _PyObject_GC_TRACK(newobj);
838 }
839 return newobj;
840 }
841
842 static PySequenceMethods tuple_as_sequence = {
843 (lenfunc)tuplelength, /* sq_length */
844 (binaryfunc)tupleconcat, /* sq_concat */
845 (ssizeargfunc)tuplerepeat, /* sq_repeat */
846 (ssizeargfunc)tupleitem, /* sq_item */
847 0, /* sq_slice */
848 0, /* sq_ass_item */
849 0, /* sq_ass_slice */
850 (objobjproc)tuplecontains, /* sq_contains */
851 };
852
853 static PyObject*
tuplesubscript(PyTupleObject * self,PyObject * item)854 tuplesubscript(PyTupleObject* self, PyObject* item)
855 {
856 if (_PyIndex_Check(item)) {
857 Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError);
858 if (i == -1 && PyErr_Occurred())
859 return NULL;
860 if (i < 0)
861 i += PyTuple_GET_SIZE(self);
862 return tupleitem(self, i);
863 }
864 else if (PySlice_Check(item)) {
865 Py_ssize_t start, stop, step, slicelength, i;
866 size_t cur;
867 PyObject* it;
868 PyObject **src, **dest;
869
870 if (PySlice_Unpack(item, &start, &stop, &step) < 0) {
871 return NULL;
872 }
873 slicelength = PySlice_AdjustIndices(PyTuple_GET_SIZE(self), &start,
874 &stop, step);
875
876 if (slicelength <= 0) {
877 return tuple_get_empty();
878 }
879 else if (start == 0 && step == 1 &&
880 slicelength == PyTuple_GET_SIZE(self) &&
881 PyTuple_CheckExact(self)) {
882 Py_INCREF(self);
883 return (PyObject *)self;
884 }
885 else {
886 PyTupleObject* result = tuple_alloc(slicelength);
887 if (!result) return NULL;
888
889 src = self->ob_item;
890 dest = result->ob_item;
891 for (cur = start, i = 0; i < slicelength;
892 cur += step, i++) {
893 it = src[cur];
894 Py_INCREF(it);
895 dest[i] = it;
896 }
897
898 _PyObject_GC_TRACK(result);
899 return (PyObject *)result;
900 }
901 }
902 else {
903 PyErr_Format(PyExc_TypeError,
904 "tuple indices must be integers or slices, not %.200s",
905 Py_TYPE(item)->tp_name);
906 return NULL;
907 }
908 }
909
910 /*[clinic input]
911 tuple.__getnewargs__
912 [clinic start generated code]*/
913
914 static PyObject *
tuple___getnewargs___impl(PyTupleObject * self)915 tuple___getnewargs___impl(PyTupleObject *self)
916 /*[clinic end generated code: output=25e06e3ee56027e2 input=1aeb4b286a21639a]*/
917 {
918 return Py_BuildValue("(N)", tupleslice(self, 0, Py_SIZE(self)));
919 }
920
921 static PyMethodDef tuple_methods[] = {
922 TUPLE___GETNEWARGS___METHODDEF
923 TUPLE_INDEX_METHODDEF
924 TUPLE_COUNT_METHODDEF
925 {"__class_getitem__", Py_GenericAlias, METH_O|METH_CLASS, PyDoc_STR("See PEP 585")},
926 {NULL, NULL} /* sentinel */
927 };
928
929 static PyMappingMethods tuple_as_mapping = {
930 (lenfunc)tuplelength,
931 (binaryfunc)tuplesubscript,
932 0
933 };
934
935 static PyObject *tuple_iter(PyObject *seq);
936
937 PyTypeObject PyTuple_Type = {
938 PyVarObject_HEAD_INIT(&PyType_Type, 0)
939 "tuple",
940 sizeof(PyTupleObject) - sizeof(PyObject *),
941 sizeof(PyObject *),
942 (destructor)tupledealloc, /* tp_dealloc */
943 0, /* tp_vectorcall_offset */
944 0, /* tp_getattr */
945 0, /* tp_setattr */
946 0, /* tp_as_async */
947 (reprfunc)tuplerepr, /* tp_repr */
948 0, /* tp_as_number */
949 &tuple_as_sequence, /* tp_as_sequence */
950 &tuple_as_mapping, /* tp_as_mapping */
951 (hashfunc)tuplehash, /* tp_hash */
952 0, /* tp_call */
953 0, /* tp_str */
954 PyObject_GenericGetAttr, /* tp_getattro */
955 0, /* tp_setattro */
956 0, /* tp_as_buffer */
957 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
958 Py_TPFLAGS_BASETYPE | Py_TPFLAGS_TUPLE_SUBCLASS |
959 _Py_TPFLAGS_MATCH_SELF | Py_TPFLAGS_SEQUENCE, /* tp_flags */
960 tuple_new__doc__, /* tp_doc */
961 (traverseproc)tupletraverse, /* tp_traverse */
962 0, /* tp_clear */
963 tuplerichcompare, /* tp_richcompare */
964 0, /* tp_weaklistoffset */
965 tuple_iter, /* tp_iter */
966 0, /* tp_iternext */
967 tuple_methods, /* tp_methods */
968 0, /* tp_members */
969 0, /* tp_getset */
970 0, /* tp_base */
971 0, /* tp_dict */
972 0, /* tp_descr_get */
973 0, /* tp_descr_set */
974 0, /* tp_dictoffset */
975 0, /* tp_init */
976 0, /* tp_alloc */
977 tuple_new, /* tp_new */
978 PyObject_GC_Del, /* tp_free */
979 .tp_vectorcall = tuple_vectorcall,
980 };
981
982 /* The following function breaks the notion that tuples are immutable:
983 it changes the size of a tuple. We get away with this only if there
984 is only one module referencing the object. You can also think of it
985 as creating a new tuple object and destroying the old one, only more
986 efficiently. In any case, don't use this if the tuple may already be
987 known to some other part of the code. */
988
989 int
_PyTuple_Resize(PyObject ** pv,Py_ssize_t newsize)990 _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize)
991 {
992 PyTupleObject *v;
993 PyTupleObject *sv;
994 Py_ssize_t i;
995 Py_ssize_t oldsize;
996
997 v = (PyTupleObject *) *pv;
998 if (v == NULL || !Py_IS_TYPE(v, &PyTuple_Type) ||
999 (Py_SIZE(v) != 0 && Py_REFCNT(v) != 1)) {
1000 *pv = 0;
1001 Py_XDECREF(v);
1002 PyErr_BadInternalCall();
1003 return -1;
1004 }
1005 oldsize = Py_SIZE(v);
1006 if (oldsize == newsize)
1007 return 0;
1008
1009 if (oldsize == 0) {
1010 /* Empty tuples are often shared, so we should never
1011 resize them in-place even if we do own the only
1012 (current) reference */
1013 Py_DECREF(v);
1014 *pv = PyTuple_New(newsize);
1015 return *pv == NULL ? -1 : 0;
1016 }
1017
1018 /* XXX UNREF/NEWREF interface should be more symmetrical */
1019 #ifdef Py_REF_DEBUG
1020 _Py_RefTotal--;
1021 #endif
1022 if (_PyObject_GC_IS_TRACKED(v)) {
1023 _PyObject_GC_UNTRACK(v);
1024 }
1025 #ifdef Py_TRACE_REFS
1026 _Py_ForgetReference((PyObject *) v);
1027 #endif
1028 /* DECREF items deleted by shrinkage */
1029 for (i = newsize; i < oldsize; i++) {
1030 Py_CLEAR(v->ob_item[i]);
1031 }
1032 sv = PyObject_GC_Resize(PyTupleObject, v, newsize);
1033 if (sv == NULL) {
1034 *pv = NULL;
1035 PyObject_GC_Del(v);
1036 return -1;
1037 }
1038 _Py_NewReference((PyObject *) sv);
1039 /* Zero out items added by growing */
1040 if (newsize > oldsize)
1041 memset(&sv->ob_item[oldsize], 0,
1042 sizeof(*sv->ob_item) * (newsize - oldsize));
1043 *pv = (PyObject *) sv;
1044 _PyObject_GC_TRACK(sv);
1045 return 0;
1046 }
1047
1048 void
_PyTuple_ClearFreeList(PyInterpreterState * interp)1049 _PyTuple_ClearFreeList(PyInterpreterState *interp)
1050 {
1051 #if PyTuple_MAXSAVESIZE > 0
1052 struct _Py_tuple_state *state = &interp->tuple;
1053 for (Py_ssize_t i = 1; i < PyTuple_MAXSAVESIZE; i++) {
1054 PyTupleObject *p = state->free_list[i];
1055 state->free_list[i] = NULL;
1056 state->numfree[i] = 0;
1057 while (p) {
1058 PyTupleObject *q = p;
1059 p = (PyTupleObject *)(p->ob_item[0]);
1060 PyObject_GC_Del(q);
1061 }
1062 }
1063 // the empty tuple singleton is only cleared by _PyTuple_Fini()
1064 #endif
1065 }
1066
1067
1068 PyStatus
_PyTuple_Init(PyInterpreterState * interp)1069 _PyTuple_Init(PyInterpreterState *interp)
1070 {
1071 struct _Py_tuple_state *state = &interp->tuple;
1072 if (tuple_create_empty_tuple_singleton(state) < 0) {
1073 return _PyStatus_NO_MEMORY();
1074 }
1075 return _PyStatus_OK();
1076 }
1077
1078
1079 void
_PyTuple_Fini(PyInterpreterState * interp)1080 _PyTuple_Fini(PyInterpreterState *interp)
1081 {
1082 #if PyTuple_MAXSAVESIZE > 0
1083 struct _Py_tuple_state *state = &interp->tuple;
1084 // The empty tuple singleton must not be tracked by the GC
1085 assert(!_PyObject_GC_IS_TRACKED(state->free_list[0]));
1086
1087 #ifdef Py_DEBUG
1088 state->numfree[0] = 0;
1089 #endif
1090 Py_CLEAR(state->free_list[0]);
1091 #ifdef Py_DEBUG
1092 state->numfree[0] = -1;
1093 #endif
1094
1095 _PyTuple_ClearFreeList(interp);
1096 #endif
1097 }
1098
1099 /*********************** Tuple Iterator **************************/
1100
1101 typedef struct {
1102 PyObject_HEAD
1103 Py_ssize_t it_index;
1104 PyTupleObject *it_seq; /* Set to NULL when iterator is exhausted */
1105 } tupleiterobject;
1106
1107 static void
tupleiter_dealloc(tupleiterobject * it)1108 tupleiter_dealloc(tupleiterobject *it)
1109 {
1110 _PyObject_GC_UNTRACK(it);
1111 Py_XDECREF(it->it_seq);
1112 PyObject_GC_Del(it);
1113 }
1114
1115 static int
tupleiter_traverse(tupleiterobject * it,visitproc visit,void * arg)1116 tupleiter_traverse(tupleiterobject *it, visitproc visit, void *arg)
1117 {
1118 Py_VISIT(it->it_seq);
1119 return 0;
1120 }
1121
1122 static PyObject *
tupleiter_next(tupleiterobject * it)1123 tupleiter_next(tupleiterobject *it)
1124 {
1125 PyTupleObject *seq;
1126 PyObject *item;
1127
1128 assert(it != NULL);
1129 seq = it->it_seq;
1130 if (seq == NULL)
1131 return NULL;
1132 assert(PyTuple_Check(seq));
1133
1134 if (it->it_index < PyTuple_GET_SIZE(seq)) {
1135 item = PyTuple_GET_ITEM(seq, it->it_index);
1136 ++it->it_index;
1137 Py_INCREF(item);
1138 return item;
1139 }
1140
1141 it->it_seq = NULL;
1142 Py_DECREF(seq);
1143 return NULL;
1144 }
1145
1146 static PyObject *
tupleiter_len(tupleiterobject * it,PyObject * Py_UNUSED (ignored))1147 tupleiter_len(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
1148 {
1149 Py_ssize_t len = 0;
1150 if (it->it_seq)
1151 len = PyTuple_GET_SIZE(it->it_seq) - it->it_index;
1152 return PyLong_FromSsize_t(len);
1153 }
1154
1155 PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
1156
1157 static PyObject *
tupleiter_reduce(tupleiterobject * it,PyObject * Py_UNUSED (ignored))1158 tupleiter_reduce(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
1159 {
1160 _Py_IDENTIFIER(iter);
1161 if (it->it_seq)
1162 return Py_BuildValue("N(O)n", _PyEval_GetBuiltinId(&PyId_iter),
1163 it->it_seq, it->it_index);
1164 else
1165 return Py_BuildValue("N(())", _PyEval_GetBuiltinId(&PyId_iter));
1166 }
1167
1168 static PyObject *
tupleiter_setstate(tupleiterobject * it,PyObject * state)1169 tupleiter_setstate(tupleiterobject *it, PyObject *state)
1170 {
1171 Py_ssize_t index = PyLong_AsSsize_t(state);
1172 if (index == -1 && PyErr_Occurred())
1173 return NULL;
1174 if (it->it_seq != NULL) {
1175 if (index < 0)
1176 index = 0;
1177 else if (index > PyTuple_GET_SIZE(it->it_seq))
1178 index = PyTuple_GET_SIZE(it->it_seq); /* exhausted iterator */
1179 it->it_index = index;
1180 }
1181 Py_RETURN_NONE;
1182 }
1183
1184 PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
1185 PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
1186
1187 static PyMethodDef tupleiter_methods[] = {
1188 {"__length_hint__", (PyCFunction)tupleiter_len, METH_NOARGS, length_hint_doc},
1189 {"__reduce__", (PyCFunction)tupleiter_reduce, METH_NOARGS, reduce_doc},
1190 {"__setstate__", (PyCFunction)tupleiter_setstate, METH_O, setstate_doc},
1191 {NULL, NULL} /* sentinel */
1192 };
1193
1194 PyTypeObject PyTupleIter_Type = {
1195 PyVarObject_HEAD_INIT(&PyType_Type, 0)
1196 "tuple_iterator", /* tp_name */
1197 sizeof(tupleiterobject), /* tp_basicsize */
1198 0, /* tp_itemsize */
1199 /* methods */
1200 (destructor)tupleiter_dealloc, /* tp_dealloc */
1201 0, /* tp_vectorcall_offset */
1202 0, /* tp_getattr */
1203 0, /* tp_setattr */
1204 0, /* tp_as_async */
1205 0, /* tp_repr */
1206 0, /* tp_as_number */
1207 0, /* tp_as_sequence */
1208 0, /* tp_as_mapping */
1209 0, /* tp_hash */
1210 0, /* tp_call */
1211 0, /* tp_str */
1212 PyObject_GenericGetAttr, /* tp_getattro */
1213 0, /* tp_setattro */
1214 0, /* tp_as_buffer */
1215 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
1216 0, /* tp_doc */
1217 (traverseproc)tupleiter_traverse, /* tp_traverse */
1218 0, /* tp_clear */
1219 0, /* tp_richcompare */
1220 0, /* tp_weaklistoffset */
1221 PyObject_SelfIter, /* tp_iter */
1222 (iternextfunc)tupleiter_next, /* tp_iternext */
1223 tupleiter_methods, /* tp_methods */
1224 0,
1225 };
1226
1227 static PyObject *
tuple_iter(PyObject * seq)1228 tuple_iter(PyObject *seq)
1229 {
1230 tupleiterobject *it;
1231
1232 if (!PyTuple_Check(seq)) {
1233 PyErr_BadInternalCall();
1234 return NULL;
1235 }
1236 it = PyObject_GC_New(tupleiterobject, &PyTupleIter_Type);
1237 if (it == NULL)
1238 return NULL;
1239 it->it_index = 0;
1240 Py_INCREF(seq);
1241 it->it_seq = (PyTupleObject *)seq;
1242 _PyObject_GC_TRACK(it);
1243 return (PyObject *)it;
1244 }
1245