1
2 /* Tuple object implementation */
3
4 #include "Python.h"
5 #include "pycore_object.h"
6 #include "pycore_pystate.h"
7 #include "pycore_accu.h"
8
9 /*[clinic input]
10 class tuple "PyTupleObject *" "&PyTuple_Type"
11 [clinic start generated code]*/
12 /*[clinic end generated code: output=da39a3ee5e6b4b0d input=f051ba3cfdf9a189]*/
13
14 #include "clinic/tupleobject.c.h"
15
16 /* Speed optimization to avoid frequent malloc/free of small tuples */
17 #ifndef PyTuple_MAXSAVESIZE
18 #define PyTuple_MAXSAVESIZE 20 /* Largest tuple to save on free list */
19 #endif
20 #ifndef PyTuple_MAXFREELIST
21 #define PyTuple_MAXFREELIST 2000 /* Maximum number of tuples of each size to save */
22 #endif
23
24 #if PyTuple_MAXSAVESIZE > 0
25 /* Entries 1 up to PyTuple_MAXSAVESIZE are free lists, entry 0 is the empty
26 tuple () of which at most one instance will be allocated.
27 */
28 static PyTupleObject *free_list[PyTuple_MAXSAVESIZE];
29 static int numfree[PyTuple_MAXSAVESIZE];
30 #endif
31 #ifdef COUNT_ALLOCS
32 Py_ssize_t _Py_fast_tuple_allocs;
33 Py_ssize_t _Py_tuple_zero_allocs;
34 #endif
35
36 /* Debug statistic to count GC tracking of tuples.
37 Please note that tuples are only untracked when considered by the GC, and
38 many of them will be dead before. Therefore, a tracking rate close to 100%
39 does not necessarily prove that the heuristic is inefficient.
40 */
41 #ifdef SHOW_TRACK_COUNT
42 static Py_ssize_t count_untracked = 0;
43 static Py_ssize_t count_tracked = 0;
44
45 static void
show_track(void)46 show_track(void)
47 {
48 PyInterpreterState *interp = _PyInterpreterState_Get();
49 if (!interp->config.show_alloc_count) {
50 return;
51 }
52
53 fprintf(stderr, "Tuples created: %" PY_FORMAT_SIZE_T "d\n",
54 count_tracked + count_untracked);
55 fprintf(stderr, "Tuples tracked by the GC: %" PY_FORMAT_SIZE_T
56 "d\n", count_tracked);
57 fprintf(stderr, "%.2f%% tuple tracking rate\n\n",
58 (100.0*count_tracked/(count_untracked+count_tracked)));
59 }
60 #endif
61
62 /* Print summary info about the state of the optimized allocator */
63 void
_PyTuple_DebugMallocStats(FILE * out)64 _PyTuple_DebugMallocStats(FILE *out)
65 {
66 #if PyTuple_MAXSAVESIZE > 0
67 int i;
68 char buf[128];
69 for (i = 1; i < PyTuple_MAXSAVESIZE; i++) {
70 PyOS_snprintf(buf, sizeof(buf),
71 "free %d-sized PyTupleObject", i);
72 _PyDebugAllocatorStats(out,
73 buf,
74 numfree[i], _PyObject_VAR_SIZE(&PyTuple_Type, i));
75 }
76 #endif
77 }
78
79 PyObject *
PyTuple_New(Py_ssize_t size)80 PyTuple_New(Py_ssize_t size)
81 {
82 PyTupleObject *op;
83 Py_ssize_t i;
84 if (size < 0) {
85 PyErr_BadInternalCall();
86 return NULL;
87 }
88 #if PyTuple_MAXSAVESIZE > 0
89 if (size == 0 && free_list[0]) {
90 op = free_list[0];
91 Py_INCREF(op);
92 #ifdef COUNT_ALLOCS
93 _Py_tuple_zero_allocs++;
94 #endif
95 return (PyObject *) op;
96 }
97 if (size < PyTuple_MAXSAVESIZE && (op = free_list[size]) != NULL) {
98 free_list[size] = (PyTupleObject *) op->ob_item[0];
99 numfree[size]--;
100 #ifdef COUNT_ALLOCS
101 _Py_fast_tuple_allocs++;
102 #endif
103 /* Inline PyObject_InitVar */
104 #ifdef Py_TRACE_REFS
105 Py_SIZE(op) = size;
106 Py_TYPE(op) = &PyTuple_Type;
107 #endif
108 _Py_NewReference((PyObject *)op);
109 }
110 else
111 #endif
112 {
113 /* Check for overflow */
114 if ((size_t)size > ((size_t)PY_SSIZE_T_MAX - sizeof(PyTupleObject) -
115 sizeof(PyObject *)) / sizeof(PyObject *)) {
116 return PyErr_NoMemory();
117 }
118 op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size);
119 if (op == NULL)
120 return NULL;
121 }
122 for (i=0; i < size; i++)
123 op->ob_item[i] = NULL;
124 #if PyTuple_MAXSAVESIZE > 0
125 if (size == 0) {
126 free_list[0] = op;
127 ++numfree[0];
128 Py_INCREF(op); /* extra INCREF so that this is never freed */
129 }
130 #endif
131 #ifdef SHOW_TRACK_COUNT
132 count_tracked++;
133 #endif
134 _PyObject_GC_TRACK(op);
135 return (PyObject *) op;
136 }
137
138 Py_ssize_t
PyTuple_Size(PyObject * op)139 PyTuple_Size(PyObject *op)
140 {
141 if (!PyTuple_Check(op)) {
142 PyErr_BadInternalCall();
143 return -1;
144 }
145 else
146 return Py_SIZE(op);
147 }
148
149 PyObject *
PyTuple_GetItem(PyObject * op,Py_ssize_t i)150 PyTuple_GetItem(PyObject *op, Py_ssize_t i)
151 {
152 if (!PyTuple_Check(op)) {
153 PyErr_BadInternalCall();
154 return NULL;
155 }
156 if (i < 0 || i >= Py_SIZE(op)) {
157 PyErr_SetString(PyExc_IndexError, "tuple index out of range");
158 return NULL;
159 }
160 return ((PyTupleObject *)op) -> ob_item[i];
161 }
162
163 int
PyTuple_SetItem(PyObject * op,Py_ssize_t i,PyObject * newitem)164 PyTuple_SetItem(PyObject *op, Py_ssize_t i, PyObject *newitem)
165 {
166 PyObject **p;
167 if (!PyTuple_Check(op) || op->ob_refcnt != 1) {
168 Py_XDECREF(newitem);
169 PyErr_BadInternalCall();
170 return -1;
171 }
172 if (i < 0 || i >= Py_SIZE(op)) {
173 Py_XDECREF(newitem);
174 PyErr_SetString(PyExc_IndexError,
175 "tuple assignment index out of range");
176 return -1;
177 }
178 p = ((PyTupleObject *)op) -> ob_item + i;
179 Py_XSETREF(*p, newitem);
180 return 0;
181 }
182
183 void
_PyTuple_MaybeUntrack(PyObject * op)184 _PyTuple_MaybeUntrack(PyObject *op)
185 {
186 PyTupleObject *t;
187 Py_ssize_t i, n;
188
189 if (!PyTuple_CheckExact(op) || !_PyObject_GC_IS_TRACKED(op))
190 return;
191 t = (PyTupleObject *) op;
192 n = Py_SIZE(t);
193 for (i = 0; i < n; i++) {
194 PyObject *elt = PyTuple_GET_ITEM(t, i);
195 /* Tuple with NULL elements aren't
196 fully constructed, don't untrack
197 them yet. */
198 if (!elt ||
199 _PyObject_GC_MAY_BE_TRACKED(elt))
200 return;
201 }
202 #ifdef SHOW_TRACK_COUNT
203 count_tracked--;
204 count_untracked++;
205 #endif
206 _PyObject_GC_UNTRACK(op);
207 }
208
209 PyObject *
PyTuple_Pack(Py_ssize_t n,...)210 PyTuple_Pack(Py_ssize_t n, ...)
211 {
212 Py_ssize_t i;
213 PyObject *o;
214 PyObject *result;
215 PyObject **items;
216 va_list vargs;
217
218 va_start(vargs, n);
219 result = PyTuple_New(n);
220 if (result == NULL) {
221 va_end(vargs);
222 return NULL;
223 }
224 items = ((PyTupleObject *)result)->ob_item;
225 for (i = 0; i < n; i++) {
226 o = va_arg(vargs, PyObject *);
227 Py_INCREF(o);
228 items[i] = o;
229 }
230 va_end(vargs);
231 return result;
232 }
233
234
235 /* Methods */
236
237 static void
tupledealloc(PyTupleObject * op)238 tupledealloc(PyTupleObject *op)
239 {
240 Py_ssize_t i;
241 Py_ssize_t len = Py_SIZE(op);
242 PyObject_GC_UnTrack(op);
243 Py_TRASHCAN_BEGIN(op, tupledealloc)
244 if (len > 0) {
245 i = len;
246 while (--i >= 0)
247 Py_XDECREF(op->ob_item[i]);
248 #if PyTuple_MAXSAVESIZE > 0
249 if (len < PyTuple_MAXSAVESIZE &&
250 numfree[len] < PyTuple_MAXFREELIST &&
251 Py_TYPE(op) == &PyTuple_Type)
252 {
253 op->ob_item[0] = (PyObject *) free_list[len];
254 numfree[len]++;
255 free_list[len] = op;
256 goto done; /* return */
257 }
258 #endif
259 }
260 Py_TYPE(op)->tp_free((PyObject *)op);
261 done:
262 Py_TRASHCAN_END
263 }
264
265 static PyObject *
tuplerepr(PyTupleObject * v)266 tuplerepr(PyTupleObject *v)
267 {
268 Py_ssize_t i, n;
269 _PyUnicodeWriter writer;
270
271 n = Py_SIZE(v);
272 if (n == 0)
273 return PyUnicode_FromString("()");
274
275 /* While not mutable, it is still possible to end up with a cycle in a
276 tuple through an object that stores itself within a tuple (and thus
277 infinitely asks for the repr of itself). This should only be
278 possible within a type. */
279 i = Py_ReprEnter((PyObject *)v);
280 if (i != 0) {
281 return i > 0 ? PyUnicode_FromString("(...)") : NULL;
282 }
283
284 _PyUnicodeWriter_Init(&writer);
285 writer.overallocate = 1;
286 if (Py_SIZE(v) > 1) {
287 /* "(" + "1" + ", 2" * (len - 1) + ")" */
288 writer.min_length = 1 + 1 + (2 + 1) * (Py_SIZE(v) - 1) + 1;
289 }
290 else {
291 /* "(1,)" */
292 writer.min_length = 4;
293 }
294
295 if (_PyUnicodeWriter_WriteChar(&writer, '(') < 0)
296 goto error;
297
298 /* Do repr() on each element. */
299 for (i = 0; i < n; ++i) {
300 PyObject *s;
301
302 if (i > 0) {
303 if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0)
304 goto error;
305 }
306
307 s = PyObject_Repr(v->ob_item[i]);
308 if (s == NULL)
309 goto error;
310
311 if (_PyUnicodeWriter_WriteStr(&writer, s) < 0) {
312 Py_DECREF(s);
313 goto error;
314 }
315 Py_DECREF(s);
316 }
317
318 writer.overallocate = 0;
319 if (n > 1) {
320 if (_PyUnicodeWriter_WriteChar(&writer, ')') < 0)
321 goto error;
322 }
323 else {
324 if (_PyUnicodeWriter_WriteASCIIString(&writer, ",)", 2) < 0)
325 goto error;
326 }
327
328 Py_ReprLeave((PyObject *)v);
329 return _PyUnicodeWriter_Finish(&writer);
330
331 error:
332 _PyUnicodeWriter_Dealloc(&writer);
333 Py_ReprLeave((PyObject *)v);
334 return NULL;
335 }
336
337
338 /* Hash for tuples. This is a slightly simplified version of the xxHash
339 non-cryptographic hash:
340 - we do not use any parallellism, there is only 1 accumulator.
341 - we drop the final mixing since this is just a permutation of the
342 output space: it does not help against collisions.
343 - at the end, we mangle the length with a single constant.
344 For the xxHash specification, see
345 https://github.com/Cyan4973/xxHash/blob/master/doc/xxhash_spec.md
346
347 Below are the official constants from the xxHash specification. Optimizing
348 compilers should emit a single "rotate" instruction for the
349 _PyHASH_XXROTATE() expansion. If that doesn't happen for some important
350 platform, the macro could be changed to expand to a platform-specific rotate
351 spelling instead.
352 */
353 #if SIZEOF_PY_UHASH_T > 4
354 #define _PyHASH_XXPRIME_1 ((Py_uhash_t)11400714785074694791ULL)
355 #define _PyHASH_XXPRIME_2 ((Py_uhash_t)14029467366897019727ULL)
356 #define _PyHASH_XXPRIME_5 ((Py_uhash_t)2870177450012600261ULL)
357 #define _PyHASH_XXROTATE(x) ((x << 31) | (x >> 33)) /* Rotate left 31 bits */
358 #else
359 #define _PyHASH_XXPRIME_1 ((Py_uhash_t)2654435761UL)
360 #define _PyHASH_XXPRIME_2 ((Py_uhash_t)2246822519UL)
361 #define _PyHASH_XXPRIME_5 ((Py_uhash_t)374761393UL)
362 #define _PyHASH_XXROTATE(x) ((x << 13) | (x >> 19)) /* Rotate left 13 bits */
363 #endif
364
365 /* Tests have shown that it's not worth to cache the hash value, see
366 https://bugs.python.org/issue9685 */
367 static Py_hash_t
tuplehash(PyTupleObject * v)368 tuplehash(PyTupleObject *v)
369 {
370 Py_ssize_t i, len = Py_SIZE(v);
371 PyObject **item = v->ob_item;
372
373 Py_uhash_t acc = _PyHASH_XXPRIME_5;
374 for (i = 0; i < len; i++) {
375 Py_uhash_t lane = PyObject_Hash(item[i]);
376 if (lane == (Py_uhash_t)-1) {
377 return -1;
378 }
379 acc += lane * _PyHASH_XXPRIME_2;
380 acc = _PyHASH_XXROTATE(acc);
381 acc *= _PyHASH_XXPRIME_1;
382 }
383
384 /* Add input length, mangled to keep the historical value of hash(()). */
385 acc += len ^ (_PyHASH_XXPRIME_5 ^ 3527539UL);
386
387 if (acc == (Py_uhash_t)-1) {
388 return 1546275796;
389 }
390 return acc;
391 }
392
393 static Py_ssize_t
tuplelength(PyTupleObject * a)394 tuplelength(PyTupleObject *a)
395 {
396 return Py_SIZE(a);
397 }
398
399 static int
tuplecontains(PyTupleObject * a,PyObject * el)400 tuplecontains(PyTupleObject *a, PyObject *el)
401 {
402 Py_ssize_t i;
403 int cmp;
404
405 for (i = 0, cmp = 0 ; cmp == 0 && i < Py_SIZE(a); ++i)
406 cmp = PyObject_RichCompareBool(el, PyTuple_GET_ITEM(a, i),
407 Py_EQ);
408 return cmp;
409 }
410
411 static PyObject *
tupleitem(PyTupleObject * a,Py_ssize_t i)412 tupleitem(PyTupleObject *a, Py_ssize_t i)
413 {
414 if (i < 0 || i >= Py_SIZE(a)) {
415 PyErr_SetString(PyExc_IndexError, "tuple index out of range");
416 return NULL;
417 }
418 Py_INCREF(a->ob_item[i]);
419 return a->ob_item[i];
420 }
421
422 PyObject *
_PyTuple_FromArray(PyObject * const * src,Py_ssize_t n)423 _PyTuple_FromArray(PyObject *const *src, Py_ssize_t n)
424 {
425 PyTupleObject *tuple = (PyTupleObject *)PyTuple_New(n);
426 if (tuple == NULL) {
427 return NULL;
428 }
429 PyObject **dst = tuple->ob_item;
430 for (Py_ssize_t i = 0; i < n; i++) {
431 PyObject *item = src[i];
432 Py_INCREF(item);
433 dst[i] = item;
434 }
435 return (PyObject *)tuple;
436 }
437
438 static PyObject *
tupleslice(PyTupleObject * a,Py_ssize_t ilow,Py_ssize_t ihigh)439 tupleslice(PyTupleObject *a, Py_ssize_t ilow,
440 Py_ssize_t ihigh)
441 {
442 if (ilow < 0)
443 ilow = 0;
444 if (ihigh > Py_SIZE(a))
445 ihigh = Py_SIZE(a);
446 if (ihigh < ilow)
447 ihigh = ilow;
448 if (ilow == 0 && ihigh == Py_SIZE(a) && PyTuple_CheckExact(a)) {
449 Py_INCREF(a);
450 return (PyObject *)a;
451 }
452 return _PyTuple_FromArray(a->ob_item + ilow, ihigh - ilow);
453 }
454
455 PyObject *
PyTuple_GetSlice(PyObject * op,Py_ssize_t i,Py_ssize_t j)456 PyTuple_GetSlice(PyObject *op, Py_ssize_t i, Py_ssize_t j)
457 {
458 if (op == NULL || !PyTuple_Check(op)) {
459 PyErr_BadInternalCall();
460 return NULL;
461 }
462 return tupleslice((PyTupleObject *)op, i, j);
463 }
464
465 static PyObject *
tupleconcat(PyTupleObject * a,PyObject * bb)466 tupleconcat(PyTupleObject *a, PyObject *bb)
467 {
468 Py_ssize_t size;
469 Py_ssize_t i;
470 PyObject **src, **dest;
471 PyTupleObject *np;
472 if (Py_SIZE(a) == 0 && PyTuple_CheckExact(bb)) {
473 Py_INCREF(bb);
474 return bb;
475 }
476 if (!PyTuple_Check(bb)) {
477 PyErr_Format(PyExc_TypeError,
478 "can only concatenate tuple (not \"%.200s\") to tuple",
479 Py_TYPE(bb)->tp_name);
480 return NULL;
481 }
482 #define b ((PyTupleObject *)bb)
483 if (Py_SIZE(b) == 0 && PyTuple_CheckExact(a)) {
484 Py_INCREF(a);
485 return (PyObject *)a;
486 }
487 if (Py_SIZE(a) > PY_SSIZE_T_MAX - Py_SIZE(b))
488 return PyErr_NoMemory();
489 size = Py_SIZE(a) + Py_SIZE(b);
490 np = (PyTupleObject *) PyTuple_New(size);
491 if (np == NULL) {
492 return NULL;
493 }
494 src = a->ob_item;
495 dest = np->ob_item;
496 for (i = 0; i < Py_SIZE(a); i++) {
497 PyObject *v = src[i];
498 Py_INCREF(v);
499 dest[i] = v;
500 }
501 src = b->ob_item;
502 dest = np->ob_item + Py_SIZE(a);
503 for (i = 0; i < Py_SIZE(b); i++) {
504 PyObject *v = src[i];
505 Py_INCREF(v);
506 dest[i] = v;
507 }
508 return (PyObject *)np;
509 #undef b
510 }
511
512 static PyObject *
tuplerepeat(PyTupleObject * a,Py_ssize_t n)513 tuplerepeat(PyTupleObject *a, Py_ssize_t n)
514 {
515 Py_ssize_t i, j;
516 Py_ssize_t size;
517 PyTupleObject *np;
518 PyObject **p, **items;
519 if (n < 0)
520 n = 0;
521 if (Py_SIZE(a) == 0 || n == 1) {
522 if (PyTuple_CheckExact(a)) {
523 /* Since tuples are immutable, we can return a shared
524 copy in this case */
525 Py_INCREF(a);
526 return (PyObject *)a;
527 }
528 if (Py_SIZE(a) == 0)
529 return PyTuple_New(0);
530 }
531 if (n > PY_SSIZE_T_MAX / Py_SIZE(a))
532 return PyErr_NoMemory();
533 size = Py_SIZE(a) * n;
534 np = (PyTupleObject *) PyTuple_New(size);
535 if (np == NULL)
536 return NULL;
537 p = np->ob_item;
538 items = a->ob_item;
539 for (i = 0; i < n; i++) {
540 for (j = 0; j < Py_SIZE(a); j++) {
541 *p = items[j];
542 Py_INCREF(*p);
543 p++;
544 }
545 }
546 return (PyObject *) np;
547 }
548
549 /*[clinic input]
550 tuple.index
551
552 value: object
553 start: slice_index(accept={int}) = 0
554 stop: slice_index(accept={int}, c_default="PY_SSIZE_T_MAX") = sys.maxsize
555 /
556
557 Return first index of value.
558
559 Raises ValueError if the value is not present.
560 [clinic start generated code]*/
561
562 static PyObject *
tuple_index_impl(PyTupleObject * self,PyObject * value,Py_ssize_t start,Py_ssize_t stop)563 tuple_index_impl(PyTupleObject *self, PyObject *value, Py_ssize_t start,
564 Py_ssize_t stop)
565 /*[clinic end generated code: output=07b6f9f3cb5c33eb input=fb39e9874a21fe3f]*/
566 {
567 Py_ssize_t i;
568
569 if (start < 0) {
570 start += Py_SIZE(self);
571 if (start < 0)
572 start = 0;
573 }
574 if (stop < 0) {
575 stop += Py_SIZE(self);
576 }
577 else if (stop > Py_SIZE(self)) {
578 stop = Py_SIZE(self);
579 }
580 for (i = start; i < stop; i++) {
581 int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
582 if (cmp > 0)
583 return PyLong_FromSsize_t(i);
584 else if (cmp < 0)
585 return NULL;
586 }
587 PyErr_SetString(PyExc_ValueError, "tuple.index(x): x not in tuple");
588 return NULL;
589 }
590
591 /*[clinic input]
592 tuple.count
593
594 value: object
595 /
596
597 Return number of occurrences of value.
598 [clinic start generated code]*/
599
600 static PyObject *
tuple_count(PyTupleObject * self,PyObject * value)601 tuple_count(PyTupleObject *self, PyObject *value)
602 /*[clinic end generated code: output=aa927affc5a97605 input=531721aff65bd772]*/
603 {
604 Py_ssize_t count = 0;
605 Py_ssize_t i;
606
607 for (i = 0; i < Py_SIZE(self); i++) {
608 int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
609 if (cmp > 0)
610 count++;
611 else if (cmp < 0)
612 return NULL;
613 }
614 return PyLong_FromSsize_t(count);
615 }
616
617 static int
tupletraverse(PyTupleObject * o,visitproc visit,void * arg)618 tupletraverse(PyTupleObject *o, visitproc visit, void *arg)
619 {
620 Py_ssize_t i;
621
622 for (i = Py_SIZE(o); --i >= 0; )
623 Py_VISIT(o->ob_item[i]);
624 return 0;
625 }
626
627 static PyObject *
tuplerichcompare(PyObject * v,PyObject * w,int op)628 tuplerichcompare(PyObject *v, PyObject *w, int op)
629 {
630 PyTupleObject *vt, *wt;
631 Py_ssize_t i;
632 Py_ssize_t vlen, wlen;
633
634 if (!PyTuple_Check(v) || !PyTuple_Check(w))
635 Py_RETURN_NOTIMPLEMENTED;
636
637 vt = (PyTupleObject *)v;
638 wt = (PyTupleObject *)w;
639
640 vlen = Py_SIZE(vt);
641 wlen = Py_SIZE(wt);
642
643 /* Note: the corresponding code for lists has an "early out" test
644 * here when op is EQ or NE and the lengths differ. That pays there,
645 * but Tim was unable to find any real code where EQ/NE tuple
646 * compares don't have the same length, so testing for it here would
647 * have cost without benefit.
648 */
649
650 /* Search for the first index where items are different.
651 * Note that because tuples are immutable, it's safe to reuse
652 * vlen and wlen across the comparison calls.
653 */
654 for (i = 0; i < vlen && i < wlen; i++) {
655 int k = PyObject_RichCompareBool(vt->ob_item[i],
656 wt->ob_item[i], Py_EQ);
657 if (k < 0)
658 return NULL;
659 if (!k)
660 break;
661 }
662
663 if (i >= vlen || i >= wlen) {
664 /* No more items to compare -- compare sizes */
665 Py_RETURN_RICHCOMPARE(vlen, wlen, op);
666 }
667
668 /* We have an item that differs -- shortcuts for EQ/NE */
669 if (op == Py_EQ) {
670 Py_RETURN_FALSE;
671 }
672 if (op == Py_NE) {
673 Py_RETURN_TRUE;
674 }
675
676 /* Compare the final item again using the proper operator */
677 return PyObject_RichCompare(vt->ob_item[i], wt->ob_item[i], op);
678 }
679
680 static PyObject *
681 tuple_subtype_new(PyTypeObject *type, PyObject *iterable);
682
683 /*[clinic input]
684 @classmethod
685 tuple.__new__ as tuple_new
686 iterable: object(c_default="NULL") = ()
687 /
688
689 Built-in immutable sequence.
690
691 If no argument is given, the constructor returns an empty tuple.
692 If iterable is specified the tuple is initialized from iterable's items.
693
694 If the argument is a tuple, the return value is the same object.
695 [clinic start generated code]*/
696
697 static PyObject *
tuple_new_impl(PyTypeObject * type,PyObject * iterable)698 tuple_new_impl(PyTypeObject *type, PyObject *iterable)
699 /*[clinic end generated code: output=4546d9f0d469bce7 input=86963bcde633b5a2]*/
700 {
701 if (type != &PyTuple_Type)
702 return tuple_subtype_new(type, iterable);
703
704 if (iterable == NULL)
705 return PyTuple_New(0);
706 else
707 return PySequence_Tuple(iterable);
708 }
709
710 static PyObject *
tuple_subtype_new(PyTypeObject * type,PyObject * iterable)711 tuple_subtype_new(PyTypeObject *type, PyObject *iterable)
712 {
713 PyObject *tmp, *newobj, *item;
714 Py_ssize_t i, n;
715
716 assert(PyType_IsSubtype(type, &PyTuple_Type));
717 tmp = tuple_new_impl(&PyTuple_Type, iterable);
718 if (tmp == NULL)
719 return NULL;
720 assert(PyTuple_Check(tmp));
721 newobj = type->tp_alloc(type, n = PyTuple_GET_SIZE(tmp));
722 if (newobj == NULL) {
723 Py_DECREF(tmp);
724 return NULL;
725 }
726 for (i = 0; i < n; i++) {
727 item = PyTuple_GET_ITEM(tmp, i);
728 Py_INCREF(item);
729 PyTuple_SET_ITEM(newobj, i, item);
730 }
731 Py_DECREF(tmp);
732 return newobj;
733 }
734
735 static PySequenceMethods tuple_as_sequence = {
736 (lenfunc)tuplelength, /* sq_length */
737 (binaryfunc)tupleconcat, /* sq_concat */
738 (ssizeargfunc)tuplerepeat, /* sq_repeat */
739 (ssizeargfunc)tupleitem, /* sq_item */
740 0, /* sq_slice */
741 0, /* sq_ass_item */
742 0, /* sq_ass_slice */
743 (objobjproc)tuplecontains, /* sq_contains */
744 };
745
746 static PyObject*
tuplesubscript(PyTupleObject * self,PyObject * item)747 tuplesubscript(PyTupleObject* self, PyObject* item)
748 {
749 if (PyIndex_Check(item)) {
750 Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError);
751 if (i == -1 && PyErr_Occurred())
752 return NULL;
753 if (i < 0)
754 i += PyTuple_GET_SIZE(self);
755 return tupleitem(self, i);
756 }
757 else if (PySlice_Check(item)) {
758 Py_ssize_t start, stop, step, slicelength, i;
759 size_t cur;
760 PyObject* result;
761 PyObject* it;
762 PyObject **src, **dest;
763
764 if (PySlice_Unpack(item, &start, &stop, &step) < 0) {
765 return NULL;
766 }
767 slicelength = PySlice_AdjustIndices(PyTuple_GET_SIZE(self), &start,
768 &stop, step);
769
770 if (slicelength <= 0) {
771 return PyTuple_New(0);
772 }
773 else if (start == 0 && step == 1 &&
774 slicelength == PyTuple_GET_SIZE(self) &&
775 PyTuple_CheckExact(self)) {
776 Py_INCREF(self);
777 return (PyObject *)self;
778 }
779 else {
780 result = PyTuple_New(slicelength);
781 if (!result) return NULL;
782
783 src = self->ob_item;
784 dest = ((PyTupleObject *)result)->ob_item;
785 for (cur = start, i = 0; i < slicelength;
786 cur += step, i++) {
787 it = src[cur];
788 Py_INCREF(it);
789 dest[i] = it;
790 }
791
792 return result;
793 }
794 }
795 else {
796 PyErr_Format(PyExc_TypeError,
797 "tuple indices must be integers or slices, not %.200s",
798 Py_TYPE(item)->tp_name);
799 return NULL;
800 }
801 }
802
803 /*[clinic input]
804 tuple.__getnewargs__
805 [clinic start generated code]*/
806
807 static PyObject *
tuple___getnewargs___impl(PyTupleObject * self)808 tuple___getnewargs___impl(PyTupleObject *self)
809 /*[clinic end generated code: output=25e06e3ee56027e2 input=1aeb4b286a21639a]*/
810 {
811 return Py_BuildValue("(N)", tupleslice(self, 0, Py_SIZE(self)));
812 }
813
814 static PyMethodDef tuple_methods[] = {
815 TUPLE___GETNEWARGS___METHODDEF
816 TUPLE_INDEX_METHODDEF
817 TUPLE_COUNT_METHODDEF
818 {NULL, NULL} /* sentinel */
819 };
820
821 static PyMappingMethods tuple_as_mapping = {
822 (lenfunc)tuplelength,
823 (binaryfunc)tuplesubscript,
824 0
825 };
826
827 static PyObject *tuple_iter(PyObject *seq);
828
829 PyTypeObject PyTuple_Type = {
830 PyVarObject_HEAD_INIT(&PyType_Type, 0)
831 "tuple",
832 sizeof(PyTupleObject) - sizeof(PyObject *),
833 sizeof(PyObject *),
834 (destructor)tupledealloc, /* tp_dealloc */
835 0, /* tp_vectorcall_offset */
836 0, /* tp_getattr */
837 0, /* tp_setattr */
838 0, /* tp_as_async */
839 (reprfunc)tuplerepr, /* tp_repr */
840 0, /* tp_as_number */
841 &tuple_as_sequence, /* tp_as_sequence */
842 &tuple_as_mapping, /* tp_as_mapping */
843 (hashfunc)tuplehash, /* tp_hash */
844 0, /* tp_call */
845 0, /* tp_str */
846 PyObject_GenericGetAttr, /* tp_getattro */
847 0, /* tp_setattro */
848 0, /* tp_as_buffer */
849 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
850 Py_TPFLAGS_BASETYPE | Py_TPFLAGS_TUPLE_SUBCLASS, /* tp_flags */
851 tuple_new__doc__, /* tp_doc */
852 (traverseproc)tupletraverse, /* tp_traverse */
853 0, /* tp_clear */
854 tuplerichcompare, /* tp_richcompare */
855 0, /* tp_weaklistoffset */
856 tuple_iter, /* tp_iter */
857 0, /* tp_iternext */
858 tuple_methods, /* tp_methods */
859 0, /* tp_members */
860 0, /* tp_getset */
861 0, /* tp_base */
862 0, /* tp_dict */
863 0, /* tp_descr_get */
864 0, /* tp_descr_set */
865 0, /* tp_dictoffset */
866 0, /* tp_init */
867 0, /* tp_alloc */
868 tuple_new, /* tp_new */
869 PyObject_GC_Del, /* tp_free */
870 };
871
872 /* The following function breaks the notion that tuples are immutable:
873 it changes the size of a tuple. We get away with this only if there
874 is only one module referencing the object. You can also think of it
875 as creating a new tuple object and destroying the old one, only more
876 efficiently. In any case, don't use this if the tuple may already be
877 known to some other part of the code. */
878
879 int
_PyTuple_Resize(PyObject ** pv,Py_ssize_t newsize)880 _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize)
881 {
882 PyTupleObject *v;
883 PyTupleObject *sv;
884 Py_ssize_t i;
885 Py_ssize_t oldsize;
886
887 v = (PyTupleObject *) *pv;
888 if (v == NULL || Py_TYPE(v) != &PyTuple_Type ||
889 (Py_SIZE(v) != 0 && Py_REFCNT(v) != 1)) {
890 *pv = 0;
891 Py_XDECREF(v);
892 PyErr_BadInternalCall();
893 return -1;
894 }
895 oldsize = Py_SIZE(v);
896 if (oldsize == newsize)
897 return 0;
898
899 if (oldsize == 0) {
900 /* Empty tuples are often shared, so we should never
901 resize them in-place even if we do own the only
902 (current) reference */
903 Py_DECREF(v);
904 *pv = PyTuple_New(newsize);
905 return *pv == NULL ? -1 : 0;
906 }
907
908 /* XXX UNREF/NEWREF interface should be more symmetrical */
909 _Py_DEC_REFTOTAL;
910 if (_PyObject_GC_IS_TRACKED(v))
911 _PyObject_GC_UNTRACK(v);
912 _Py_ForgetReference((PyObject *) v);
913 /* DECREF items deleted by shrinkage */
914 for (i = newsize; i < oldsize; i++) {
915 Py_CLEAR(v->ob_item[i]);
916 }
917 sv = PyObject_GC_Resize(PyTupleObject, v, newsize);
918 if (sv == NULL) {
919 *pv = NULL;
920 PyObject_GC_Del(v);
921 return -1;
922 }
923 _Py_NewReference((PyObject *) sv);
924 /* Zero out items added by growing */
925 if (newsize > oldsize)
926 memset(&sv->ob_item[oldsize], 0,
927 sizeof(*sv->ob_item) * (newsize - oldsize));
928 *pv = (PyObject *) sv;
929 _PyObject_GC_TRACK(sv);
930 return 0;
931 }
932
933 int
PyTuple_ClearFreeList(void)934 PyTuple_ClearFreeList(void)
935 {
936 int freelist_size = 0;
937 #if PyTuple_MAXSAVESIZE > 0
938 int i;
939 for (i = 1; i < PyTuple_MAXSAVESIZE; i++) {
940 PyTupleObject *p, *q;
941 p = free_list[i];
942 freelist_size += numfree[i];
943 free_list[i] = NULL;
944 numfree[i] = 0;
945 while (p) {
946 q = p;
947 p = (PyTupleObject *)(p->ob_item[0]);
948 PyObject_GC_Del(q);
949 }
950 }
951 #endif
952 return freelist_size;
953 }
954
955 void
PyTuple_Fini(void)956 PyTuple_Fini(void)
957 {
958 #if PyTuple_MAXSAVESIZE > 0
959 /* empty tuples are used all over the place and applications may
960 * rely on the fact that an empty tuple is a singleton. */
961 Py_CLEAR(free_list[0]);
962
963 (void)PyTuple_ClearFreeList();
964 #endif
965 #ifdef SHOW_TRACK_COUNT
966 show_track();
967 #endif
968 }
969
970 /*********************** Tuple Iterator **************************/
971
972 typedef struct {
973 PyObject_HEAD
974 Py_ssize_t it_index;
975 PyTupleObject *it_seq; /* Set to NULL when iterator is exhausted */
976 } tupleiterobject;
977
978 static void
tupleiter_dealloc(tupleiterobject * it)979 tupleiter_dealloc(tupleiterobject *it)
980 {
981 _PyObject_GC_UNTRACK(it);
982 Py_XDECREF(it->it_seq);
983 PyObject_GC_Del(it);
984 }
985
986 static int
tupleiter_traverse(tupleiterobject * it,visitproc visit,void * arg)987 tupleiter_traverse(tupleiterobject *it, visitproc visit, void *arg)
988 {
989 Py_VISIT(it->it_seq);
990 return 0;
991 }
992
993 static PyObject *
tupleiter_next(tupleiterobject * it)994 tupleiter_next(tupleiterobject *it)
995 {
996 PyTupleObject *seq;
997 PyObject *item;
998
999 assert(it != NULL);
1000 seq = it->it_seq;
1001 if (seq == NULL)
1002 return NULL;
1003 assert(PyTuple_Check(seq));
1004
1005 if (it->it_index < PyTuple_GET_SIZE(seq)) {
1006 item = PyTuple_GET_ITEM(seq, it->it_index);
1007 ++it->it_index;
1008 Py_INCREF(item);
1009 return item;
1010 }
1011
1012 it->it_seq = NULL;
1013 Py_DECREF(seq);
1014 return NULL;
1015 }
1016
1017 static PyObject *
tupleiter_len(tupleiterobject * it,PyObject * Py_UNUSED (ignored))1018 tupleiter_len(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
1019 {
1020 Py_ssize_t len = 0;
1021 if (it->it_seq)
1022 len = PyTuple_GET_SIZE(it->it_seq) - it->it_index;
1023 return PyLong_FromSsize_t(len);
1024 }
1025
1026 PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
1027
1028 static PyObject *
tupleiter_reduce(tupleiterobject * it,PyObject * Py_UNUSED (ignored))1029 tupleiter_reduce(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
1030 {
1031 _Py_IDENTIFIER(iter);
1032 if (it->it_seq)
1033 return Py_BuildValue("N(O)n", _PyEval_GetBuiltinId(&PyId_iter),
1034 it->it_seq, it->it_index);
1035 else
1036 return Py_BuildValue("N(())", _PyEval_GetBuiltinId(&PyId_iter));
1037 }
1038
1039 static PyObject *
tupleiter_setstate(tupleiterobject * it,PyObject * state)1040 tupleiter_setstate(tupleiterobject *it, PyObject *state)
1041 {
1042 Py_ssize_t index = PyLong_AsSsize_t(state);
1043 if (index == -1 && PyErr_Occurred())
1044 return NULL;
1045 if (it->it_seq != NULL) {
1046 if (index < 0)
1047 index = 0;
1048 else if (index > PyTuple_GET_SIZE(it->it_seq))
1049 index = PyTuple_GET_SIZE(it->it_seq); /* exhausted iterator */
1050 it->it_index = index;
1051 }
1052 Py_RETURN_NONE;
1053 }
1054
1055 PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
1056 PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
1057
1058 static PyMethodDef tupleiter_methods[] = {
1059 {"__length_hint__", (PyCFunction)tupleiter_len, METH_NOARGS, length_hint_doc},
1060 {"__reduce__", (PyCFunction)tupleiter_reduce, METH_NOARGS, reduce_doc},
1061 {"__setstate__", (PyCFunction)tupleiter_setstate, METH_O, setstate_doc},
1062 {NULL, NULL} /* sentinel */
1063 };
1064
1065 PyTypeObject PyTupleIter_Type = {
1066 PyVarObject_HEAD_INIT(&PyType_Type, 0)
1067 "tuple_iterator", /* tp_name */
1068 sizeof(tupleiterobject), /* tp_basicsize */
1069 0, /* tp_itemsize */
1070 /* methods */
1071 (destructor)tupleiter_dealloc, /* tp_dealloc */
1072 0, /* tp_vectorcall_offset */
1073 0, /* tp_getattr */
1074 0, /* tp_setattr */
1075 0, /* tp_as_async */
1076 0, /* tp_repr */
1077 0, /* tp_as_number */
1078 0, /* tp_as_sequence */
1079 0, /* tp_as_mapping */
1080 0, /* tp_hash */
1081 0, /* tp_call */
1082 0, /* tp_str */
1083 PyObject_GenericGetAttr, /* tp_getattro */
1084 0, /* tp_setattro */
1085 0, /* tp_as_buffer */
1086 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
1087 0, /* tp_doc */
1088 (traverseproc)tupleiter_traverse, /* tp_traverse */
1089 0, /* tp_clear */
1090 0, /* tp_richcompare */
1091 0, /* tp_weaklistoffset */
1092 PyObject_SelfIter, /* tp_iter */
1093 (iternextfunc)tupleiter_next, /* tp_iternext */
1094 tupleiter_methods, /* tp_methods */
1095 0,
1096 };
1097
1098 static PyObject *
tuple_iter(PyObject * seq)1099 tuple_iter(PyObject *seq)
1100 {
1101 tupleiterobject *it;
1102
1103 if (!PyTuple_Check(seq)) {
1104 PyErr_BadInternalCall();
1105 return NULL;
1106 }
1107 it = PyObject_GC_New(tupleiterobject, &PyTupleIter_Type);
1108 if (it == NULL)
1109 return NULL;
1110 it->it_index = 0;
1111 Py_INCREF(seq);
1112 it->it_seq = (PyTupleObject *)seq;
1113 _PyObject_GC_TRACK(it);
1114 return (PyObject *)it;
1115 }
1116