1
2 /* Tuple object implementation */
3
4 #include "Python.h"
5 #include "pycore_abstract.h" // _PyIndex_Check()
6 #include "pycore_gc.h" // _PyObject_GC_IS_TRACKED()
7 #include "pycore_initconfig.h" // _PyStatus_OK()
8 #include "pycore_object.h" // _PyObject_GC_TRACK(), _Py_FatalRefcountError()
9
10 /*[clinic input]
11 class tuple "PyTupleObject *" "&PyTuple_Type"
12 [clinic start generated code]*/
13 /*[clinic end generated code: output=da39a3ee5e6b4b0d input=f051ba3cfdf9a189]*/
14
15 #include "clinic/tupleobject.c.h"
16
17
18 static inline PyTupleObject * maybe_freelist_pop(Py_ssize_t);
19 static inline int maybe_freelist_push(PyTupleObject *);
20
21
22 /* Allocate an uninitialized tuple object. Before making it public, following
23 steps must be done:
24
25 - Initialize its items.
26 - Call _PyObject_GC_TRACK() on it.
27
28 Because the empty tuple is always reused and it's already tracked by GC,
29 this function must not be called with size == 0 (unless from PyTuple_New()
30 which wraps this function).
31 */
32 static PyTupleObject *
33 tuple_alloc(Py_ssize_t size)
34 {
35 if (size < 0) {
36 PyErr_BadInternalCall();
37 return NULL;
38 }
39 #ifdef Py_DEBUG
40 assert(size != 0); // The empty tuple is statically allocated.
41 #endif
42
43 PyTupleObject *op = maybe_freelist_pop(size);
44 if (op == NULL) {
45 /* Check for overflow */
46 if ((size_t)size > ((size_t)PY_SSIZE_T_MAX - (sizeof(PyTupleObject) -
47 sizeof(PyObject *))) / sizeof(PyObject *)) {
48 return (PyTupleObject *)PyErr_NoMemory();
49 }
50 op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size);
51 if (op == NULL)
52 return NULL;
53 }
54 return op;
55 }
56
57 // The empty tuple singleton is not tracked by the GC.
58 // It does not contain any Python object.
59 // Note that tuple subclasses have their own empty instances.
60
61 static inline PyObject *
62 tuple_get_empty(void)
63 {
64 return Py_NewRef(&_Py_SINGLETON(tuple_empty));
65 }
66
67 PyObject *
68 PyTuple_New(Py_ssize_t size)
69 {
70 PyTupleObject *op;
71 if (size == 0) {
72 return tuple_get_empty();
73 }
74 op = tuple_alloc(size);
75 if (op == NULL) {
76 return NULL;
77 }
78 for (Py_ssize_t i = 0; i < size; i++) {
79 op->ob_item[i] = NULL;
80 }
81 _PyObject_GC_TRACK(op);
82 return (PyObject *) op;
83 }
84
85 Py_ssize_t
86 PyTuple_Size(PyObject *op)
87 {
88 if (!PyTuple_Check(op)) {
89 PyErr_BadInternalCall();
90 return -1;
91 }
92 else
93 return Py_SIZE(op);
94 }
95
96 PyObject *
97 PyTuple_GetItem(PyObject *op, Py_ssize_t i)
98 {
99 if (!PyTuple_Check(op)) {
100 PyErr_BadInternalCall();
101 return NULL;
102 }
103 if (i < 0 || i >= Py_SIZE(op)) {
104 PyErr_SetString(PyExc_IndexError, "tuple index out of range");
105 return NULL;
106 }
107 return ((PyTupleObject *)op) -> ob_item[i];
108 }
109
110 int
111 PyTuple_SetItem(PyObject *op, Py_ssize_t i, PyObject *newitem)
112 {
113 PyObject **p;
114 if (!PyTuple_Check(op) || Py_REFCNT(op) != 1) {
115 Py_XDECREF(newitem);
116 PyErr_BadInternalCall();
117 return -1;
118 }
119 if (i < 0 || i >= Py_SIZE(op)) {
120 Py_XDECREF(newitem);
121 PyErr_SetString(PyExc_IndexError,
122 "tuple assignment index out of range");
123 return -1;
124 }
125 p = ((PyTupleObject *)op) -> ob_item + i;
126 Py_XSETREF(*p, newitem);
127 return 0;
128 }
129
130 void
131 _PyTuple_MaybeUntrack(PyObject *op)
132 {
133 PyTupleObject *t;
134 Py_ssize_t i, n;
135
136 if (!PyTuple_CheckExact(op) || !_PyObject_GC_IS_TRACKED(op))
137 return;
138 t = (PyTupleObject *) op;
139 n = Py_SIZE(t);
140 for (i = 0; i < n; i++) {
141 PyObject *elt = PyTuple_GET_ITEM(t, i);
142 /* Tuple with NULL elements aren't
143 fully constructed, don't untrack
144 them yet. */
145 if (!elt ||
146 _PyObject_GC_MAY_BE_TRACKED(elt))
147 return;
148 }
149 _PyObject_GC_UNTRACK(op);
150 }
151
152 PyObject *
153 PyTuple_Pack(Py_ssize_t n, ...)
154 {
155 Py_ssize_t i;
156 PyObject *o;
157 PyObject **items;
158 va_list vargs;
159
160 if (n == 0) {
161 return tuple_get_empty();
162 }
163
164 va_start(vargs, n);
165 PyTupleObject *result = tuple_alloc(n);
166 if (result == NULL) {
167 va_end(vargs);
168 return NULL;
169 }
170 items = result->ob_item;
171 for (i = 0; i < n; i++) {
172 o = va_arg(vargs, PyObject *);
173 items[i] = Py_NewRef(o);
174 }
175 va_end(vargs);
176 _PyObject_GC_TRACK(result);
177 return (PyObject *)result;
178 }
179
180
181 /* Methods */
182
183 static void
184 tupledealloc(PyTupleObject *op)
185 {
186 if (Py_SIZE(op) == 0) {
187 /* The empty tuple is statically allocated. */
188 if (op == &_Py_SINGLETON(tuple_empty)) {
189 #ifdef Py_DEBUG
190 _Py_FatalRefcountError("deallocating the empty tuple singleton");
191 #else
192 return;
193 #endif
194 }
195 #ifdef Py_DEBUG
196 /* tuple subclasses have their own empty instances. */
197 assert(!PyTuple_CheckExact(op));
198 #endif
199 }
200
201 PyObject_GC_UnTrack(op);
202 Py_TRASHCAN_BEGIN(op, tupledealloc)
203
204 Py_ssize_t i = Py_SIZE(op);
205 while (--i >= 0) {
206 Py_XDECREF(op->ob_item[i]);
207 }
208 // This will abort on the empty singleton (if there is one).
209 if (!maybe_freelist_push(op)) {
210 Py_TYPE(op)->tp_free((PyObject *)op);
211 }
212
213 Py_TRASHCAN_END
214 }
215
216 static PyObject *
217 tuplerepr(PyTupleObject *v)
218 {
219 Py_ssize_t i, n;
220 _PyUnicodeWriter writer;
221
222 n = Py_SIZE(v);
223 if (n == 0)
224 return PyUnicode_FromString("()");
225
226 /* While not mutable, it is still possible to end up with a cycle in a
227 tuple through an object that stores itself within a tuple (and thus
228 infinitely asks for the repr of itself). This should only be
229 possible within a type. */
230 i = Py_ReprEnter((PyObject *)v);
231 if (i != 0) {
232 return i > 0 ? PyUnicode_FromString("(...)") : NULL;
233 }
234
235 _PyUnicodeWriter_Init(&writer);
236 writer.overallocate = 1;
237 if (Py_SIZE(v) > 1) {
238 /* "(" + "1" + ", 2" * (len - 1) + ")" */
239 writer.min_length = 1 + 1 + (2 + 1) * (Py_SIZE(v) - 1) + 1;
240 }
241 else {
242 /* "(1,)" */
243 writer.min_length = 4;
244 }
245
246 if (_PyUnicodeWriter_WriteChar(&writer, '(') < 0)
247 goto error;
248
249 /* Do repr() on each element. */
250 for (i = 0; i < n; ++i) {
251 PyObject *s;
252
253 if (i > 0) {
254 if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0)
255 goto error;
256 }
257
258 s = PyObject_Repr(v->ob_item[i]);
259 if (s == NULL)
260 goto error;
261
262 if (_PyUnicodeWriter_WriteStr(&writer, s) < 0) {
263 Py_DECREF(s);
264 goto error;
265 }
266 Py_DECREF(s);
267 }
268
269 writer.overallocate = 0;
270 if (n > 1) {
271 if (_PyUnicodeWriter_WriteChar(&writer, ')') < 0)
272 goto error;
273 }
274 else {
275 if (_PyUnicodeWriter_WriteASCIIString(&writer, ",)", 2) < 0)
276 goto error;
277 }
278
279 Py_ReprLeave((PyObject *)v);
280 return _PyUnicodeWriter_Finish(&writer);
281
282 error:
283 _PyUnicodeWriter_Dealloc(&writer);
284 Py_ReprLeave((PyObject *)v);
285 return NULL;
286 }
287
288
289 /* Hash for tuples. This is a slightly simplified version of the xxHash
290 non-cryptographic hash:
291 - we do not use any parallelism, there is only 1 accumulator.
292 - we drop the final mixing since this is just a permutation of the
293 output space: it does not help against collisions.
294 - at the end, we mangle the length with a single constant.
295 For the xxHash specification, see
296 https://github.com/Cyan4973/xxHash/blob/master/doc/xxhash_spec.md
297
298 Below are the official constants from the xxHash specification. Optimizing
299 compilers should emit a single "rotate" instruction for the
300 _PyHASH_XXROTATE() expansion. If that doesn't happen for some important
301 platform, the macro could be changed to expand to a platform-specific rotate
302 spelling instead.
303 */
304 #if SIZEOF_PY_UHASH_T > 4
305 #define _PyHASH_XXPRIME_1 ((Py_uhash_t)11400714785074694791ULL)
306 #define _PyHASH_XXPRIME_2 ((Py_uhash_t)14029467366897019727ULL)
307 #define _PyHASH_XXPRIME_5 ((Py_uhash_t)2870177450012600261ULL)
308 #define _PyHASH_XXROTATE(x) ((x << 31) | (x >> 33)) /* Rotate left 31 bits */
309 #else
310 #define _PyHASH_XXPRIME_1 ((Py_uhash_t)2654435761UL)
311 #define _PyHASH_XXPRIME_2 ((Py_uhash_t)2246822519UL)
312 #define _PyHASH_XXPRIME_5 ((Py_uhash_t)374761393UL)
313 #define _PyHASH_XXROTATE(x) ((x << 13) | (x >> 19)) /* Rotate left 13 bits */
314 #endif
315
316 /* Tests have shown that it's not worth to cache the hash value, see
317 https://bugs.python.org/issue9685 */
318 static Py_hash_t
319 tuplehash(PyTupleObject *v)
320 {
321 Py_ssize_t i, len = Py_SIZE(v);
322 PyObject **item = v->ob_item;
323
324 Py_uhash_t acc = _PyHASH_XXPRIME_5;
325 for (i = 0; i < len; i++) {
326 Py_uhash_t lane = PyObject_Hash(item[i]);
327 if (lane == (Py_uhash_t)-1) {
328 return -1;
329 }
330 acc += lane * _PyHASH_XXPRIME_2;
331 acc = _PyHASH_XXROTATE(acc);
332 acc *= _PyHASH_XXPRIME_1;
333 }
334
335 /* Add input length, mangled to keep the historical value of hash(()). */
336 acc += len ^ (_PyHASH_XXPRIME_5 ^ 3527539UL);
337
338 if (acc == (Py_uhash_t)-1) {
339 return 1546275796;
340 }
341 return acc;
342 }
343
344 static Py_ssize_t
345 tuplelength(PyTupleObject *a)
346 {
347 return Py_SIZE(a);
348 }
349
350 static int
351 tuplecontains(PyTupleObject *a, PyObject *el)
352 {
353 Py_ssize_t i;
354 int cmp;
355
356 for (i = 0, cmp = 0 ; cmp == 0 && i < Py_SIZE(a); ++i)
357 cmp = PyObject_RichCompareBool(PyTuple_GET_ITEM(a, i), el, Py_EQ);
358 return cmp;
359 }
360
361 static PyObject *
362 tupleitem(PyTupleObject *a, Py_ssize_t i)
363 {
364 if (i < 0 || i >= Py_SIZE(a)) {
365 PyErr_SetString(PyExc_IndexError, "tuple index out of range");
366 return NULL;
367 }
368 return Py_NewRef(a->ob_item[i]);
369 }
370
371 PyObject *
372 _PyTuple_FromArray(PyObject *const *src, Py_ssize_t n)
373 {
374 if (n == 0) {
375 return tuple_get_empty();
376 }
377
378 PyTupleObject *tuple = tuple_alloc(n);
379 if (tuple == NULL) {
380 return NULL;
381 }
382 PyObject **dst = tuple->ob_item;
383 for (Py_ssize_t i = 0; i < n; i++) {
384 PyObject *item = src[i];
385 dst[i] = Py_NewRef(item);
386 }
387 _PyObject_GC_TRACK(tuple);
388 return (PyObject *)tuple;
389 }
390
391 PyObject *
392 _PyTuple_FromArraySteal(PyObject *const *src, Py_ssize_t n)
393 {
394 if (n == 0) {
395 return tuple_get_empty();
396 }
397 PyTupleObject *tuple = tuple_alloc(n);
398 if (tuple == NULL) {
399 for (Py_ssize_t i = 0; i < n; i++) {
400 Py_DECREF(src[i]);
401 }
402 return NULL;
403 }
404 PyObject **dst = tuple->ob_item;
405 for (Py_ssize_t i = 0; i < n; i++) {
406 PyObject *item = src[i];
407 dst[i] = item;
408 }
409 _PyObject_GC_TRACK(tuple);
410 return (PyObject *)tuple;
411 }
412
413 static PyObject *
414 tupleslice(PyTupleObject *a, Py_ssize_t ilow,
415 Py_ssize_t ihigh)
416 {
417 if (ilow < 0)
418 ilow = 0;
419 if (ihigh > Py_SIZE(a))
420 ihigh = Py_SIZE(a);
421 if (ihigh < ilow)
422 ihigh = ilow;
423 if (ilow == 0 && ihigh == Py_SIZE(a) && PyTuple_CheckExact(a)) {
424 return Py_NewRef(a);
425 }
426 return _PyTuple_FromArray(a->ob_item + ilow, ihigh - ilow);
427 }
428
429 PyObject *
430 PyTuple_GetSlice(PyObject *op, Py_ssize_t i, Py_ssize_t j)
431 {
432 if (op == NULL || !PyTuple_Check(op)) {
433 PyErr_BadInternalCall();
434 return NULL;
435 }
436 return tupleslice((PyTupleObject *)op, i, j);
437 }
438
439 static PyObject *
440 tupleconcat(PyTupleObject *a, PyObject *bb)
441 {
442 Py_ssize_t size;
443 Py_ssize_t i;
444 PyObject **src, **dest;
445 PyTupleObject *np;
446 if (Py_SIZE(a) == 0 && PyTuple_CheckExact(bb)) {
447 return Py_NewRef(bb);
448 }
449 if (!PyTuple_Check(bb)) {
450 PyErr_Format(PyExc_TypeError,
451 "can only concatenate tuple (not \"%.200s\") to tuple",
452 Py_TYPE(bb)->tp_name);
453 return NULL;
454 }
455 PyTupleObject *b = (PyTupleObject *)bb;
456
457 if (Py_SIZE(b) == 0 && PyTuple_CheckExact(a)) {
458 return Py_NewRef(a);
459 }
460 assert((size_t)Py_SIZE(a) + (size_t)Py_SIZE(b) < PY_SSIZE_T_MAX);
461 size = Py_SIZE(a) + Py_SIZE(b);
462 if (size == 0) {
463 return tuple_get_empty();
464 }
465
466 np = tuple_alloc(size);
467 if (np == NULL) {
468 return NULL;
469 }
470 src = a->ob_item;
471 dest = np->ob_item;
472 for (i = 0; i < Py_SIZE(a); i++) {
473 PyObject *v = src[i];
474 dest[i] = Py_NewRef(v);
475 }
476 src = b->ob_item;
477 dest = np->ob_item + Py_SIZE(a);
478 for (i = 0; i < Py_SIZE(b); i++) {
479 PyObject *v = src[i];
480 dest[i] = Py_NewRef(v);
481 }
482 _PyObject_GC_TRACK(np);
483 return (PyObject *)np;
484 }
485
486 static PyObject *
487 tuplerepeat(PyTupleObject *a, Py_ssize_t n)
488 {
489 const Py_ssize_t input_size = Py_SIZE(a);
490 if (input_size == 0 || n == 1) {
491 if (PyTuple_CheckExact(a)) {
492 /* Since tuples are immutable, we can return a shared
493 copy in this case */
494 return Py_NewRef(a);
495 }
496 }
497 if (input_size == 0 || n <= 0) {
498 return tuple_get_empty();
499 }
500 assert(n>0);
501
502 if (input_size > PY_SSIZE_T_MAX / n)
503 return PyErr_NoMemory();
504 Py_ssize_t output_size = input_size * n;
505
506 PyTupleObject *np = tuple_alloc(output_size);
507 if (np == NULL)
508 return NULL;
509
510 PyObject **dest = np->ob_item;
511 if (input_size == 1) {
512 PyObject *elem = a->ob_item[0];
513 _Py_RefcntAdd(elem, n);
514 PyObject **dest_end = dest + output_size;
515 while (dest < dest_end) {
516 *dest++ = elem;
517 }
518 }
519 else {
520 PyObject **src = a->ob_item;
521 PyObject **src_end = src + input_size;
522 while (src < src_end) {
523 _Py_RefcntAdd(*src, n);
524 *dest++ = *src++;
525 }
526
527 _Py_memory_repeat((char *)np->ob_item, sizeof(PyObject *)*output_size,
528 sizeof(PyObject *)*input_size);
529 }
530 _PyObject_GC_TRACK(np);
531 return (PyObject *) np;
532 }
533
534 /*[clinic input]
535 tuple.index
536
537 value: object
538 start: slice_index(accept={int}) = 0
539 stop: slice_index(accept={int}, c_default="PY_SSIZE_T_MAX") = sys.maxsize
540 /
541
542 Return first index of value.
543
544 Raises ValueError if the value is not present.
545 [clinic start generated code]*/
546
547 static PyObject *
548 tuple_index_impl(PyTupleObject *self, PyObject *value, Py_ssize_t start,
549 Py_ssize_t stop)
550 /*[clinic end generated code: output=07b6f9f3cb5c33eb input=fb39e9874a21fe3f]*/
551 {
552 Py_ssize_t i;
553
554 if (start < 0) {
555 start += Py_SIZE(self);
556 if (start < 0)
557 start = 0;
558 }
559 if (stop < 0) {
560 stop += Py_SIZE(self);
561 }
562 else if (stop > Py_SIZE(self)) {
563 stop = Py_SIZE(self);
564 }
565 for (i = start; i < stop; i++) {
566 int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
567 if (cmp > 0)
568 return PyLong_FromSsize_t(i);
569 else if (cmp < 0)
570 return NULL;
571 }
572 PyErr_SetString(PyExc_ValueError, "tuple.index(x): x not in tuple");
573 return NULL;
574 }
575
576 /*[clinic input]
577 tuple.count
578
579 value: object
580 /
581
582 Return number of occurrences of value.
583 [clinic start generated code]*/
584
585 static PyObject *
586 tuple_count(PyTupleObject *self, PyObject *value)
587 /*[clinic end generated code: output=aa927affc5a97605 input=531721aff65bd772]*/
588 {
589 Py_ssize_t count = 0;
590 Py_ssize_t i;
591
592 for (i = 0; i < Py_SIZE(self); i++) {
593 int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
594 if (cmp > 0)
595 count++;
596 else if (cmp < 0)
597 return NULL;
598 }
599 return PyLong_FromSsize_t(count);
600 }
601
602 static int
603 tupletraverse(PyTupleObject *o, visitproc visit, void *arg)
604 {
605 Py_ssize_t i;
606
607 for (i = Py_SIZE(o); --i >= 0; )
608 Py_VISIT(o->ob_item[i]);
609 return 0;
610 }
611
612 static PyObject *
613 tuplerichcompare(PyObject *v, PyObject *w, int op)
614 {
615 PyTupleObject *vt, *wt;
616 Py_ssize_t i;
617 Py_ssize_t vlen, wlen;
618
619 if (!PyTuple_Check(v) || !PyTuple_Check(w))
620 Py_RETURN_NOTIMPLEMENTED;
621
622 vt = (PyTupleObject *)v;
623 wt = (PyTupleObject *)w;
624
625 vlen = Py_SIZE(vt);
626 wlen = Py_SIZE(wt);
627
628 /* Note: the corresponding code for lists has an "early out" test
629 * here when op is EQ or NE and the lengths differ. That pays there,
630 * but Tim was unable to find any real code where EQ/NE tuple
631 * compares don't have the same length, so testing for it here would
632 * have cost without benefit.
633 */
634
635 /* Search for the first index where items are different.
636 * Note that because tuples are immutable, it's safe to reuse
637 * vlen and wlen across the comparison calls.
638 */
639 for (i = 0; i < vlen && i < wlen; i++) {
640 int k = PyObject_RichCompareBool(vt->ob_item[i],
641 wt->ob_item[i], Py_EQ);
642 if (k < 0)
643 return NULL;
644 if (!k)
645 break;
646 }
647
648 if (i >= vlen || i >= wlen) {
649 /* No more items to compare -- compare sizes */
650 Py_RETURN_RICHCOMPARE(vlen, wlen, op);
651 }
652
653 /* We have an item that differs -- shortcuts for EQ/NE */
654 if (op == Py_EQ) {
655 Py_RETURN_FALSE;
656 }
657 if (op == Py_NE) {
658 Py_RETURN_TRUE;
659 }
660
661 /* Compare the final item again using the proper operator */
662 return PyObject_RichCompare(vt->ob_item[i], wt->ob_item[i], op);
663 }
664
665 static PyObject *
666 tuple_subtype_new(PyTypeObject *type, PyObject *iterable);
667
668 /*[clinic input]
669 @classmethod
670 tuple.__new__ as tuple_new
671 iterable: object(c_default="NULL") = ()
672 /
673
674 Built-in immutable sequence.
675
676 If no argument is given, the constructor returns an empty tuple.
677 If iterable is specified the tuple is initialized from iterable's items.
678
679 If the argument is a tuple, the return value is the same object.
680 [clinic start generated code]*/
681
682 static PyObject *
683 tuple_new_impl(PyTypeObject *type, PyObject *iterable)
684 /*[clinic end generated code: output=4546d9f0d469bce7 input=86963bcde633b5a2]*/
685 {
686 if (type != &PyTuple_Type)
687 return tuple_subtype_new(type, iterable);
688
689 if (iterable == NULL) {
690 return tuple_get_empty();
691 }
692 else {
693 return PySequence_Tuple(iterable);
694 }
695 }
696
697 static PyObject *
698 tuple_vectorcall(PyObject *type, PyObject * const*args,
699 size_t nargsf, PyObject *kwnames)
700 {
701 if (!_PyArg_NoKwnames("tuple", kwnames)) {
702 return NULL;
703 }
704
705 Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
706 if (!_PyArg_CheckPositional("tuple", nargs, 0, 1)) {
707 return NULL;
708 }
709
710 if (nargs) {
711 return tuple_new_impl(_PyType_CAST(type), args[0]);
712 }
713 else {
714 return tuple_get_empty();
715 }
716 }
717
718 static PyObject *
719 tuple_subtype_new(PyTypeObject *type, PyObject *iterable)
720 {
721 PyObject *tmp, *newobj, *item;
722 Py_ssize_t i, n;
723
724 assert(PyType_IsSubtype(type, &PyTuple_Type));
725 // tuple subclasses must implement the GC protocol
726 assert(_PyType_IS_GC(type));
727
728 tmp = tuple_new_impl(&PyTuple_Type, iterable);
729 if (tmp == NULL)
730 return NULL;
731 assert(PyTuple_Check(tmp));
732 /* This may allocate an empty tuple that is not the global one. */
733 newobj = type->tp_alloc(type, n = PyTuple_GET_SIZE(tmp));
734 if (newobj == NULL) {
735 Py_DECREF(tmp);
736 return NULL;
737 }
738 for (i = 0; i < n; i++) {
739 item = PyTuple_GET_ITEM(tmp, i);
740 PyTuple_SET_ITEM(newobj, i, Py_NewRef(item));
741 }
742 Py_DECREF(tmp);
743
744 // Don't track if a subclass tp_alloc is PyType_GenericAlloc()
745 if (!_PyObject_GC_IS_TRACKED(newobj)) {
746 _PyObject_GC_TRACK(newobj);
747 }
748 return newobj;
749 }
750
751 static PySequenceMethods tuple_as_sequence = {
752 (lenfunc)tuplelength, /* sq_length */
753 (binaryfunc)tupleconcat, /* sq_concat */
754 (ssizeargfunc)tuplerepeat, /* sq_repeat */
755 (ssizeargfunc)tupleitem, /* sq_item */
756 0, /* sq_slice */
757 0, /* sq_ass_item */
758 0, /* sq_ass_slice */
759 (objobjproc)tuplecontains, /* sq_contains */
760 };
761
762 static PyObject*
763 tuplesubscript(PyTupleObject* self, PyObject* item)
764 {
765 if (_PyIndex_Check(item)) {
766 Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError);
767 if (i == -1 && PyErr_Occurred())
768 return NULL;
769 if (i < 0)
770 i += PyTuple_GET_SIZE(self);
771 return tupleitem(self, i);
772 }
773 else if (PySlice_Check(item)) {
774 Py_ssize_t start, stop, step, slicelength, i;
775 size_t cur;
776 PyObject* it;
777 PyObject **src, **dest;
778
779 if (PySlice_Unpack(item, &start, &stop, &step) < 0) {
780 return NULL;
781 }
782 slicelength = PySlice_AdjustIndices(PyTuple_GET_SIZE(self), &start,
783 &stop, step);
784
785 if (slicelength <= 0) {
786 return tuple_get_empty();
787 }
788 else if (start == 0 && step == 1 &&
789 slicelength == PyTuple_GET_SIZE(self) &&
790 PyTuple_CheckExact(self)) {
791 return Py_NewRef(self);
792 }
793 else {
794 PyTupleObject* result = tuple_alloc(slicelength);
795 if (!result) return NULL;
796
797 src = self->ob_item;
798 dest = result->ob_item;
799 for (cur = start, i = 0; i < slicelength;
800 cur += step, i++) {
801 it = Py_NewRef(src[cur]);
802 dest[i] = it;
803 }
804
805 _PyObject_GC_TRACK(result);
806 return (PyObject *)result;
807 }
808 }
809 else {
810 PyErr_Format(PyExc_TypeError,
811 "tuple indices must be integers or slices, not %.200s",
812 Py_TYPE(item)->tp_name);
813 return NULL;
814 }
815 }
816
817 /*[clinic input]
818 tuple.__getnewargs__
819 [clinic start generated code]*/
820
821 static PyObject *
822 tuple___getnewargs___impl(PyTupleObject *self)
823 /*[clinic end generated code: output=25e06e3ee56027e2 input=1aeb4b286a21639a]*/
824 {
825 return Py_BuildValue("(N)", tupleslice(self, 0, Py_SIZE(self)));
826 }
827
828 static PyMethodDef tuple_methods[] = {
829 TUPLE___GETNEWARGS___METHODDEF
830 TUPLE_INDEX_METHODDEF
831 TUPLE_COUNT_METHODDEF
832 {"__class_getitem__", Py_GenericAlias, METH_O|METH_CLASS, PyDoc_STR("See PEP 585")},
833 {NULL, NULL} /* sentinel */
834 };
835
836 static PyMappingMethods tuple_as_mapping = {
837 (lenfunc)tuplelength,
838 (binaryfunc)tuplesubscript,
839 0
840 };
841
842 static PyObject *tuple_iter(PyObject *seq);
843
844 PyTypeObject PyTuple_Type = {
845 PyVarObject_HEAD_INIT(&PyType_Type, 0)
846 "tuple",
847 sizeof(PyTupleObject) - sizeof(PyObject *),
848 sizeof(PyObject *),
849 (destructor)tupledealloc, /* tp_dealloc */
850 0, /* tp_vectorcall_offset */
851 0, /* tp_getattr */
852 0, /* tp_setattr */
853 0, /* tp_as_async */
854 (reprfunc)tuplerepr, /* tp_repr */
855 0, /* tp_as_number */
856 &tuple_as_sequence, /* tp_as_sequence */
857 &tuple_as_mapping, /* tp_as_mapping */
858 (hashfunc)tuplehash, /* tp_hash */
859 0, /* tp_call */
860 0, /* tp_str */
861 PyObject_GenericGetAttr, /* tp_getattro */
862 0, /* tp_setattro */
863 0, /* tp_as_buffer */
864 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
865 Py_TPFLAGS_BASETYPE | Py_TPFLAGS_TUPLE_SUBCLASS |
866 _Py_TPFLAGS_MATCH_SELF | Py_TPFLAGS_SEQUENCE, /* tp_flags */
867 tuple_new__doc__, /* tp_doc */
868 (traverseproc)tupletraverse, /* tp_traverse */
869 0, /* tp_clear */
870 tuplerichcompare, /* tp_richcompare */
871 0, /* tp_weaklistoffset */
872 tuple_iter, /* tp_iter */
873 0, /* tp_iternext */
874 tuple_methods, /* tp_methods */
875 0, /* tp_members */
876 0, /* tp_getset */
877 0, /* tp_base */
878 0, /* tp_dict */
879 0, /* tp_descr_get */
880 0, /* tp_descr_set */
881 0, /* tp_dictoffset */
882 0, /* tp_init */
883 0, /* tp_alloc */
884 tuple_new, /* tp_new */
885 PyObject_GC_Del, /* tp_free */
886 .tp_vectorcall = tuple_vectorcall,
887 };
888
889 /* The following function breaks the notion that tuples are immutable:
890 it changes the size of a tuple. We get away with this only if there
891 is only one module referencing the object. You can also think of it
892 as creating a new tuple object and destroying the old one, only more
893 efficiently. In any case, don't use this if the tuple may already be
894 known to some other part of the code. */
895
896 int
897 _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize)
898 {
899 PyTupleObject *v;
900 PyTupleObject *sv;
901 Py_ssize_t i;
902 Py_ssize_t oldsize;
903
904 v = (PyTupleObject *) *pv;
905 if (v == NULL || !Py_IS_TYPE(v, &PyTuple_Type) ||
906 (Py_SIZE(v) != 0 && Py_REFCNT(v) != 1)) {
907 *pv = 0;
908 Py_XDECREF(v);
909 PyErr_BadInternalCall();
910 return -1;
911 }
912
913 oldsize = Py_SIZE(v);
914 if (oldsize == newsize) {
915 return 0;
916 }
917 if (newsize == 0) {
918 Py_DECREF(v);
919 *pv = tuple_get_empty();
920 return 0;
921 }
922 if (oldsize == 0) {
923 #ifdef Py_DEBUG
924 assert(v == &_Py_SINGLETON(tuple_empty));
925 #endif
926 /* The empty tuple is statically allocated so we never
927 resize it in-place. */
928 Py_DECREF(v);
929 *pv = PyTuple_New(newsize);
930 return *pv == NULL ? -1 : 0;
931 }
932
933 if (_PyObject_GC_IS_TRACKED(v)) {
934 _PyObject_GC_UNTRACK(v);
935 }
936 #ifdef Py_TRACE_REFS
937 _Py_ForgetReference((PyObject *) v);
938 #endif
939 /* DECREF items deleted by shrinkage */
940 for (i = newsize; i < oldsize; i++) {
941 Py_CLEAR(v->ob_item[i]);
942 }
943 sv = PyObject_GC_Resize(PyTupleObject, v, newsize);
944 if (sv == NULL) {
945 *pv = NULL;
946 #ifdef Py_REF_DEBUG
947 _Py_DecRefTotal(_PyInterpreterState_GET());
948 #endif
949 PyObject_GC_Del(v);
950 return -1;
951 }
952 _Py_NewReferenceNoTotal((PyObject *) sv);
953 /* Zero out items added by growing */
954 if (newsize > oldsize)
955 memset(&sv->ob_item[oldsize], 0,
956 sizeof(*sv->ob_item) * (newsize - oldsize));
957 *pv = (PyObject *) sv;
958 _PyObject_GC_TRACK(sv);
959 return 0;
960 }
961
962
963 static void maybe_freelist_clear(PyInterpreterState *, int);
964
965 void
966 _PyTuple_Fini(PyInterpreterState *interp)
967 {
968 maybe_freelist_clear(interp, 1);
969 }
970
971 void
972 _PyTuple_ClearFreeList(PyInterpreterState *interp)
973 {
974 maybe_freelist_clear(interp, 0);
975 }
976
977 /*********************** Tuple Iterator **************************/
978
979
980 static void
981 tupleiter_dealloc(_PyTupleIterObject *it)
982 {
983 _PyObject_GC_UNTRACK(it);
984 Py_XDECREF(it->it_seq);
985 PyObject_GC_Del(it);
986 }
987
988 static int
989 tupleiter_traverse(_PyTupleIterObject *it, visitproc visit, void *arg)
990 {
991 Py_VISIT(it->it_seq);
992 return 0;
993 }
994
995 static PyObject *
996 tupleiter_next(_PyTupleIterObject *it)
997 {
998 PyTupleObject *seq;
999 PyObject *item;
1000
1001 assert(it != NULL);
1002 seq = it->it_seq;
1003 if (seq == NULL)
1004 return NULL;
1005 assert(PyTuple_Check(seq));
1006
1007 if (it->it_index < PyTuple_GET_SIZE(seq)) {
1008 item = PyTuple_GET_ITEM(seq, it->it_index);
1009 ++it->it_index;
1010 return Py_NewRef(item);
1011 }
1012
1013 it->it_seq = NULL;
1014 Py_DECREF(seq);
1015 return NULL;
1016 }
1017
1018 static PyObject *
1019 tupleiter_len(_PyTupleIterObject *it, PyObject *Py_UNUSED(ignored))
1020 {
1021 Py_ssize_t len = 0;
1022 if (it->it_seq)
1023 len = PyTuple_GET_SIZE(it->it_seq) - it->it_index;
1024 return PyLong_FromSsize_t(len);
1025 }
1026
1027 PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
1028
1029 static PyObject *
1030 tupleiter_reduce(_PyTupleIterObject *it, PyObject *Py_UNUSED(ignored))
1031 {
1032 PyObject *iter = _PyEval_GetBuiltin(&_Py_ID(iter));
1033
1034 /* _PyEval_GetBuiltin can invoke arbitrary code,
1035 * call must be before access of iterator pointers.
1036 * see issue #101765 */
1037
1038 if (it->it_seq)
1039 return Py_BuildValue("N(O)n", iter, it->it_seq, it->it_index);
1040 else
1041 return Py_BuildValue("N(())", iter);
1042 }
1043
1044 static PyObject *
1045 tupleiter_setstate(_PyTupleIterObject *it, PyObject *state)
1046 {
1047 Py_ssize_t index = PyLong_AsSsize_t(state);
1048 if (index == -1 && PyErr_Occurred())
1049 return NULL;
1050 if (it->it_seq != NULL) {
1051 if (index < 0)
1052 index = 0;
1053 else if (index > PyTuple_GET_SIZE(it->it_seq))
1054 index = PyTuple_GET_SIZE(it->it_seq); /* exhausted iterator */
1055 it->it_index = index;
1056 }
1057 Py_RETURN_NONE;
1058 }
1059
1060 PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
1061 PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
1062
1063 static PyMethodDef tupleiter_methods[] = {
1064 {"__length_hint__", (PyCFunction)tupleiter_len, METH_NOARGS, length_hint_doc},
1065 {"__reduce__", (PyCFunction)tupleiter_reduce, METH_NOARGS, reduce_doc},
1066 {"__setstate__", (PyCFunction)tupleiter_setstate, METH_O, setstate_doc},
1067 {NULL, NULL} /* sentinel */
1068 };
1069
1070 PyTypeObject PyTupleIter_Type = {
1071 PyVarObject_HEAD_INIT(&PyType_Type, 0)
1072 "tuple_iterator", /* tp_name */
1073 sizeof(_PyTupleIterObject), /* tp_basicsize */
1074 0, /* tp_itemsize */
1075 /* methods */
1076 (destructor)tupleiter_dealloc, /* tp_dealloc */
1077 0, /* tp_vectorcall_offset */
1078 0, /* tp_getattr */
1079 0, /* tp_setattr */
1080 0, /* tp_as_async */
1081 0, /* tp_repr */
1082 0, /* tp_as_number */
1083 0, /* tp_as_sequence */
1084 0, /* tp_as_mapping */
1085 0, /* tp_hash */
1086 0, /* tp_call */
1087 0, /* tp_str */
1088 PyObject_GenericGetAttr, /* tp_getattro */
1089 0, /* tp_setattro */
1090 0, /* tp_as_buffer */
1091 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
1092 0, /* tp_doc */
1093 (traverseproc)tupleiter_traverse, /* tp_traverse */
1094 0, /* tp_clear */
1095 0, /* tp_richcompare */
1096 0, /* tp_weaklistoffset */
1097 PyObject_SelfIter, /* tp_iter */
1098 (iternextfunc)tupleiter_next, /* tp_iternext */
1099 tupleiter_methods, /* tp_methods */
1100 0,
1101 };
1102
1103 static PyObject *
1104 tuple_iter(PyObject *seq)
1105 {
1106 _PyTupleIterObject *it;
1107
1108 if (!PyTuple_Check(seq)) {
1109 PyErr_BadInternalCall();
1110 return NULL;
1111 }
1112 it = PyObject_GC_New(_PyTupleIterObject, &PyTupleIter_Type);
1113 if (it == NULL)
1114 return NULL;
1115 it->it_index = 0;
1116 it->it_seq = (PyTupleObject *)Py_NewRef(seq);
1117 _PyObject_GC_TRACK(it);
1118 return (PyObject *)it;
1119 }
1120
1121
1122 /*************
1123 * freelists *
1124 *************/
1125
1126 #define STATE (interp->tuple)
1127 #define FREELIST_FINALIZED (STATE.numfree[0] < 0)
1128
1129 static inline PyTupleObject *
1130 maybe_freelist_pop(Py_ssize_t size)
1131 {
1132 #if PyTuple_NFREELISTS > 0
1133 PyInterpreterState *interp = _PyInterpreterState_GET();
1134 #ifdef Py_DEBUG
1135 /* maybe_freelist_pop() must not be called after maybe_freelist_fini(). */
1136 assert(!FREELIST_FINALIZED);
1137 #endif
1138 if (size == 0) {
1139 return NULL;
1140 }
1141 assert(size > 0);
1142 if (size < PyTuple_MAXSAVESIZE) {
1143 Py_ssize_t index = size - 1;
1144 PyTupleObject *op = STATE.free_list[index];
1145 if (op != NULL) {
1146 /* op is the head of a linked list, with the first item
1147 pointing to the next node. Here we pop off the old head. */
1148 STATE.free_list[index] = (PyTupleObject *) op->ob_item[0];
1149 STATE.numfree[index]--;
1150 /* Inlined _PyObject_InitVar() without _PyType_HasFeature() test */
1151 #ifdef Py_TRACE_REFS
1152 /* maybe_freelist_push() ensures these were already set. */
1153 // XXX Can we drop these? See commit 68055ce6fe01 (GvR, Dec 1998).
1154 Py_SET_SIZE(op, size);
1155 Py_SET_TYPE(op, &PyTuple_Type);
1156 #endif
1157 _Py_NewReference((PyObject *)op);
1158 /* END inlined _PyObject_InitVar() */
1159 OBJECT_STAT_INC(from_freelist);
1160 return op;
1161 }
1162 }
1163 #endif
1164 return NULL;
1165 }
1166
1167 static inline int
1168 maybe_freelist_push(PyTupleObject *op)
1169 {
1170 #if PyTuple_NFREELISTS > 0
1171 PyInterpreterState *interp = _PyInterpreterState_GET();
1172 #ifdef Py_DEBUG
1173 /* maybe_freelist_push() must not be called after maybe_freelist_fini(). */
1174 assert(!FREELIST_FINALIZED);
1175 #endif
1176 if (Py_SIZE(op) == 0) {
1177 return 0;
1178 }
1179 Py_ssize_t index = Py_SIZE(op) - 1;
1180 if (index < PyTuple_NFREELISTS
1181 && STATE.numfree[index] < PyTuple_MAXFREELIST
1182 && Py_IS_TYPE(op, &PyTuple_Type))
1183 {
1184 /* op is the head of a linked list, with the first item
1185 pointing to the next node. Here we set op as the new head. */
1186 op->ob_item[0] = (PyObject *) STATE.free_list[index];
1187 STATE.free_list[index] = op;
1188 STATE.numfree[index]++;
1189 OBJECT_STAT_INC(to_freelist);
1190 return 1;
1191 }
1192 #endif
1193 return 0;
1194 }
1195
1196 static void
1197 maybe_freelist_clear(PyInterpreterState *interp, int fini)
1198 {
1199 #if PyTuple_NFREELISTS > 0
1200 for (Py_ssize_t i = 0; i < PyTuple_NFREELISTS; i++) {
1201 PyTupleObject *p = STATE.free_list[i];
1202 STATE.free_list[i] = NULL;
1203 STATE.numfree[i] = fini ? -1 : 0;
1204 while (p) {
1205 PyTupleObject *q = p;
1206 p = (PyTupleObject *)(p->ob_item[0]);
1207 PyObject_GC_Del(q);
1208 }
1209 }
1210 #endif
1211 }
1212
1213 /* Print summary info about the state of the optimized allocator */
1214 void
1215 _PyTuple_DebugMallocStats(FILE *out)
1216 {
1217 #if PyTuple_NFREELISTS > 0
1218 PyInterpreterState *interp = _PyInterpreterState_GET();
1219 for (int i = 0; i < PyTuple_NFREELISTS; i++) {
1220 int len = i + 1;
1221 char buf[128];
1222 PyOS_snprintf(buf, sizeof(buf),
1223 "free %d-sized PyTupleObject", len);
1224 _PyDebugAllocatorStats(out, buf, STATE.numfree[i],
1225 _PyObject_VAR_SIZE(&PyTuple_Type, len));
1226 }
1227 #endif
1228 }
1229
1230 #undef STATE
1231 #undef FREELIST_FINALIZED