1 2/* Tuple object implementation */ 3 4#include "Python.h" 5#include "pycore_abstract.h" // _PyIndex_Check() 6#include "pycore_gc.h" // _PyObject_GC_IS_TRACKED() 7#include "pycore_initconfig.h" // _PyStatus_OK() 8#include "pycore_object.h" // _PyObject_GC_TRACK(), _Py_FatalRefcountError() 9 10/*[clinic input] 11class tuple "PyTupleObject *" "&PyTuple_Type" 12[clinic start generated code]*/ 13/*[clinic end generated code: output=da39a3ee5e6b4b0d input=f051ba3cfdf9a189]*/ 14 15#include "clinic/tupleobject.c.h" 16 17 18static inline PyTupleObject * maybe_freelist_pop(Py_ssize_t); 19static inline int maybe_freelist_push(PyTupleObject *); 20 21 22/* Allocate an uninitialized tuple object. Before making it public, following 23 steps must be done: 24 25 - Initialize its items. 26 - Call _PyObject_GC_TRACK() on it. 27 28 Because the empty tuple is always reused and it's already tracked by GC, 29 this function must not be called with size == 0 (unless from PyTuple_New() 30 which wraps this function). 31*/ 32static PyTupleObject * 33tuple_alloc(Py_ssize_t size) 34{ 35 if (size < 0) { 36 PyErr_BadInternalCall(); 37 return NULL; 38 } 39#ifdef Py_DEBUG 40 assert(size != 0); // The empty tuple is statically allocated. 41#endif 42 43 PyTupleObject *op = maybe_freelist_pop(size); 44 if (op == NULL) { 45 /* Check for overflow */ 46 if ((size_t)size > ((size_t)PY_SSIZE_T_MAX - (sizeof(PyTupleObject) - 47 sizeof(PyObject *))) / sizeof(PyObject *)) { 48 return (PyTupleObject *)PyErr_NoMemory(); 49 } 50 op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size); 51 if (op == NULL) 52 return NULL; 53 } 54 return op; 55} 56 57// The empty tuple singleton is not tracked by the GC. 58// It does not contain any Python object. 59// Note that tuple subclasses have their own empty instances. 60 61static inline PyObject * 62tuple_get_empty(void) 63{ 64 Py_INCREF(&_Py_SINGLETON(tuple_empty)); 65 return (PyObject *)&_Py_SINGLETON(tuple_empty); 66} 67 68PyObject * 69PyTuple_New(Py_ssize_t size) 70{ 71 PyTupleObject *op; 72 if (size == 0) { 73 return tuple_get_empty(); 74 } 75 op = tuple_alloc(size); 76 if (op == NULL) { 77 return NULL; 78 } 79 for (Py_ssize_t i = 0; i < size; i++) { 80 op->ob_item[i] = NULL; 81 } 82 _PyObject_GC_TRACK(op); 83 return (PyObject *) op; 84} 85 86Py_ssize_t 87PyTuple_Size(PyObject *op) 88{ 89 if (!PyTuple_Check(op)) { 90 PyErr_BadInternalCall(); 91 return -1; 92 } 93 else 94 return Py_SIZE(op); 95} 96 97PyObject * 98PyTuple_GetItem(PyObject *op, Py_ssize_t i) 99{ 100 if (!PyTuple_Check(op)) { 101 PyErr_BadInternalCall(); 102 return NULL; 103 } 104 if (i < 0 || i >= Py_SIZE(op)) { 105 PyErr_SetString(PyExc_IndexError, "tuple index out of range"); 106 return NULL; 107 } 108 return ((PyTupleObject *)op) -> ob_item[i]; 109} 110 111int 112PyTuple_SetItem(PyObject *op, Py_ssize_t i, PyObject *newitem) 113{ 114 PyObject **p; 115 if (!PyTuple_Check(op) || Py_REFCNT(op) != 1) { 116 Py_XDECREF(newitem); 117 PyErr_BadInternalCall(); 118 return -1; 119 } 120 if (i < 0 || i >= Py_SIZE(op)) { 121 Py_XDECREF(newitem); 122 PyErr_SetString(PyExc_IndexError, 123 "tuple assignment index out of range"); 124 return -1; 125 } 126 p = ((PyTupleObject *)op) -> ob_item + i; 127 Py_XSETREF(*p, newitem); 128 return 0; 129} 130 131void 132_PyTuple_MaybeUntrack(PyObject *op) 133{ 134 PyTupleObject *t; 135 Py_ssize_t i, n; 136 137 if (!PyTuple_CheckExact(op) || !_PyObject_GC_IS_TRACKED(op)) 138 return; 139 t = (PyTupleObject *) op; 140 n = Py_SIZE(t); 141 for (i = 0; i < n; i++) { 142 PyObject *elt = PyTuple_GET_ITEM(t, i); 143 /* Tuple with NULL elements aren't 144 fully constructed, don't untrack 145 them yet. */ 146 if (!elt || 147 _PyObject_GC_MAY_BE_TRACKED(elt)) 148 return; 149 } 150 _PyObject_GC_UNTRACK(op); 151} 152 153PyObject * 154PyTuple_Pack(Py_ssize_t n, ...) 155{ 156 Py_ssize_t i; 157 PyObject *o; 158 PyObject **items; 159 va_list vargs; 160 161 if (n == 0) { 162 return tuple_get_empty(); 163 } 164 165 va_start(vargs, n); 166 PyTupleObject *result = tuple_alloc(n); 167 if (result == NULL) { 168 va_end(vargs); 169 return NULL; 170 } 171 items = result->ob_item; 172 for (i = 0; i < n; i++) { 173 o = va_arg(vargs, PyObject *); 174 Py_INCREF(o); 175 items[i] = o; 176 } 177 va_end(vargs); 178 _PyObject_GC_TRACK(result); 179 return (PyObject *)result; 180} 181 182 183/* Methods */ 184 185static void 186tupledealloc(PyTupleObject *op) 187{ 188 if (Py_SIZE(op) == 0) { 189 /* The empty tuple is statically allocated. */ 190 if (op == &_Py_SINGLETON(tuple_empty)) { 191#ifdef Py_DEBUG 192 _Py_FatalRefcountError("deallocating the empty tuple singleton"); 193#else 194 return; 195#endif 196 } 197#ifdef Py_DEBUG 198 /* tuple subclasses have their own empty instances. */ 199 assert(!PyTuple_CheckExact(op)); 200#endif 201 } 202 203 PyObject_GC_UnTrack(op); 204 Py_TRASHCAN_BEGIN(op, tupledealloc) 205 206 Py_ssize_t i = Py_SIZE(op); 207 while (--i >= 0) { 208 Py_XDECREF(op->ob_item[i]); 209 } 210 // This will abort on the empty singleton (if there is one). 211 if (!maybe_freelist_push(op)) { 212 Py_TYPE(op)->tp_free((PyObject *)op); 213 } 214 215 Py_TRASHCAN_END 216} 217 218static PyObject * 219tuplerepr(PyTupleObject *v) 220{ 221 Py_ssize_t i, n; 222 _PyUnicodeWriter writer; 223 224 n = Py_SIZE(v); 225 if (n == 0) 226 return PyUnicode_FromString("()"); 227 228 /* While not mutable, it is still possible to end up with a cycle in a 229 tuple through an object that stores itself within a tuple (and thus 230 infinitely asks for the repr of itself). This should only be 231 possible within a type. */ 232 i = Py_ReprEnter((PyObject *)v); 233 if (i != 0) { 234 return i > 0 ? PyUnicode_FromString("(...)") : NULL; 235 } 236 237 _PyUnicodeWriter_Init(&writer); 238 writer.overallocate = 1; 239 if (Py_SIZE(v) > 1) { 240 /* "(" + "1" + ", 2" * (len - 1) + ")" */ 241 writer.min_length = 1 + 1 + (2 + 1) * (Py_SIZE(v) - 1) + 1; 242 } 243 else { 244 /* "(1,)" */ 245 writer.min_length = 4; 246 } 247 248 if (_PyUnicodeWriter_WriteChar(&writer, '(') < 0) 249 goto error; 250 251 /* Do repr() on each element. */ 252 for (i = 0; i < n; ++i) { 253 PyObject *s; 254 255 if (i > 0) { 256 if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0) 257 goto error; 258 } 259 260 s = PyObject_Repr(v->ob_item[i]); 261 if (s == NULL) 262 goto error; 263 264 if (_PyUnicodeWriter_WriteStr(&writer, s) < 0) { 265 Py_DECREF(s); 266 goto error; 267 } 268 Py_DECREF(s); 269 } 270 271 writer.overallocate = 0; 272 if (n > 1) { 273 if (_PyUnicodeWriter_WriteChar(&writer, ')') < 0) 274 goto error; 275 } 276 else { 277 if (_PyUnicodeWriter_WriteASCIIString(&writer, ",)", 2) < 0) 278 goto error; 279 } 280 281 Py_ReprLeave((PyObject *)v); 282 return _PyUnicodeWriter_Finish(&writer); 283 284error: 285 _PyUnicodeWriter_Dealloc(&writer); 286 Py_ReprLeave((PyObject *)v); 287 return NULL; 288} 289 290 291/* Hash for tuples. This is a slightly simplified version of the xxHash 292 non-cryptographic hash: 293 - we do not use any parallellism, there is only 1 accumulator. 294 - we drop the final mixing since this is just a permutation of the 295 output space: it does not help against collisions. 296 - at the end, we mangle the length with a single constant. 297 For the xxHash specification, see 298 https://github.com/Cyan4973/xxHash/blob/master/doc/xxhash_spec.md 299 300 Below are the official constants from the xxHash specification. Optimizing 301 compilers should emit a single "rotate" instruction for the 302 _PyHASH_XXROTATE() expansion. If that doesn't happen for some important 303 platform, the macro could be changed to expand to a platform-specific rotate 304 spelling instead. 305*/ 306#if SIZEOF_PY_UHASH_T > 4 307#define _PyHASH_XXPRIME_1 ((Py_uhash_t)11400714785074694791ULL) 308#define _PyHASH_XXPRIME_2 ((Py_uhash_t)14029467366897019727ULL) 309#define _PyHASH_XXPRIME_5 ((Py_uhash_t)2870177450012600261ULL) 310#define _PyHASH_XXROTATE(x) ((x << 31) | (x >> 33)) /* Rotate left 31 bits */ 311#else 312#define _PyHASH_XXPRIME_1 ((Py_uhash_t)2654435761UL) 313#define _PyHASH_XXPRIME_2 ((Py_uhash_t)2246822519UL) 314#define _PyHASH_XXPRIME_5 ((Py_uhash_t)374761393UL) 315#define _PyHASH_XXROTATE(x) ((x << 13) | (x >> 19)) /* Rotate left 13 bits */ 316#endif 317 318/* Tests have shown that it's not worth to cache the hash value, see 319 https://bugs.python.org/issue9685 */ 320static Py_hash_t 321tuplehash(PyTupleObject *v) 322{ 323 Py_ssize_t i, len = Py_SIZE(v); 324 PyObject **item = v->ob_item; 325 326 Py_uhash_t acc = _PyHASH_XXPRIME_5; 327 for (i = 0; i < len; i++) { 328 Py_uhash_t lane = PyObject_Hash(item[i]); 329 if (lane == (Py_uhash_t)-1) { 330 return -1; 331 } 332 acc += lane * _PyHASH_XXPRIME_2; 333 acc = _PyHASH_XXROTATE(acc); 334 acc *= _PyHASH_XXPRIME_1; 335 } 336 337 /* Add input length, mangled to keep the historical value of hash(()). */ 338 acc += len ^ (_PyHASH_XXPRIME_5 ^ 3527539UL); 339 340 if (acc == (Py_uhash_t)-1) { 341 return 1546275796; 342 } 343 return acc; 344} 345 346static Py_ssize_t 347tuplelength(PyTupleObject *a) 348{ 349 return Py_SIZE(a); 350} 351 352static int 353tuplecontains(PyTupleObject *a, PyObject *el) 354{ 355 Py_ssize_t i; 356 int cmp; 357 358 for (i = 0, cmp = 0 ; cmp == 0 && i < Py_SIZE(a); ++i) 359 cmp = PyObject_RichCompareBool(PyTuple_GET_ITEM(a, i), el, Py_EQ); 360 return cmp; 361} 362 363static PyObject * 364tupleitem(PyTupleObject *a, Py_ssize_t i) 365{ 366 if (i < 0 || i >= Py_SIZE(a)) { 367 PyErr_SetString(PyExc_IndexError, "tuple index out of range"); 368 return NULL; 369 } 370 Py_INCREF(a->ob_item[i]); 371 return a->ob_item[i]; 372} 373 374PyObject * 375_PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) 376{ 377 if (n == 0) { 378 return tuple_get_empty(); 379 } 380 381 PyTupleObject *tuple = tuple_alloc(n); 382 if (tuple == NULL) { 383 return NULL; 384 } 385 PyObject **dst = tuple->ob_item; 386 for (Py_ssize_t i = 0; i < n; i++) { 387 PyObject *item = src[i]; 388 Py_INCREF(item); 389 dst[i] = item; 390 } 391 _PyObject_GC_TRACK(tuple); 392 return (PyObject *)tuple; 393} 394 395PyObject * 396_PyTuple_FromArraySteal(PyObject *const *src, Py_ssize_t n) 397{ 398 if (n == 0) { 399 return tuple_get_empty(); 400 } 401 PyTupleObject *tuple = tuple_alloc(n); 402 if (tuple == NULL) { 403 for (Py_ssize_t i = 0; i < n; i++) { 404 Py_DECREF(src[i]); 405 } 406 return NULL; 407 } 408 PyObject **dst = tuple->ob_item; 409 for (Py_ssize_t i = 0; i < n; i++) { 410 PyObject *item = src[i]; 411 dst[i] = item; 412 } 413 _PyObject_GC_TRACK(tuple); 414 return (PyObject *)tuple; 415} 416 417static PyObject * 418tupleslice(PyTupleObject *a, Py_ssize_t ilow, 419 Py_ssize_t ihigh) 420{ 421 if (ilow < 0) 422 ilow = 0; 423 if (ihigh > Py_SIZE(a)) 424 ihigh = Py_SIZE(a); 425 if (ihigh < ilow) 426 ihigh = ilow; 427 if (ilow == 0 && ihigh == Py_SIZE(a) && PyTuple_CheckExact(a)) { 428 Py_INCREF(a); 429 return (PyObject *)a; 430 } 431 return _PyTuple_FromArray(a->ob_item + ilow, ihigh - ilow); 432} 433 434PyObject * 435PyTuple_GetSlice(PyObject *op, Py_ssize_t i, Py_ssize_t j) 436{ 437 if (op == NULL || !PyTuple_Check(op)) { 438 PyErr_BadInternalCall(); 439 return NULL; 440 } 441 return tupleslice((PyTupleObject *)op, i, j); 442} 443 444static PyObject * 445tupleconcat(PyTupleObject *a, PyObject *bb) 446{ 447 Py_ssize_t size; 448 Py_ssize_t i; 449 PyObject **src, **dest; 450 PyTupleObject *np; 451 if (Py_SIZE(a) == 0 && PyTuple_CheckExact(bb)) { 452 Py_INCREF(bb); 453 return bb; 454 } 455 if (!PyTuple_Check(bb)) { 456 PyErr_Format(PyExc_TypeError, 457 "can only concatenate tuple (not \"%.200s\") to tuple", 458 Py_TYPE(bb)->tp_name); 459 return NULL; 460 } 461 PyTupleObject *b = (PyTupleObject *)bb; 462 463 if (Py_SIZE(b) == 0 && PyTuple_CheckExact(a)) { 464 Py_INCREF(a); 465 return (PyObject *)a; 466 } 467 assert((size_t)Py_SIZE(a) + (size_t)Py_SIZE(b) < PY_SSIZE_T_MAX); 468 size = Py_SIZE(a) + Py_SIZE(b); 469 if (size == 0) { 470 return tuple_get_empty(); 471 } 472 473 np = tuple_alloc(size); 474 if (np == NULL) { 475 return NULL; 476 } 477 src = a->ob_item; 478 dest = np->ob_item; 479 for (i = 0; i < Py_SIZE(a); i++) { 480 PyObject *v = src[i]; 481 Py_INCREF(v); 482 dest[i] = v; 483 } 484 src = b->ob_item; 485 dest = np->ob_item + Py_SIZE(a); 486 for (i = 0; i < Py_SIZE(b); i++) { 487 PyObject *v = src[i]; 488 Py_INCREF(v); 489 dest[i] = v; 490 } 491 _PyObject_GC_TRACK(np); 492 return (PyObject *)np; 493} 494 495static PyObject * 496tuplerepeat(PyTupleObject *a, Py_ssize_t n) 497{ 498 Py_ssize_t size; 499 PyTupleObject *np; 500 if (Py_SIZE(a) == 0 || n == 1) { 501 if (PyTuple_CheckExact(a)) { 502 /* Since tuples are immutable, we can return a shared 503 copy in this case */ 504 Py_INCREF(a); 505 return (PyObject *)a; 506 } 507 } 508 if (Py_SIZE(a) == 0 || n <= 0) { 509 return tuple_get_empty(); 510 } 511 if (n > PY_SSIZE_T_MAX / Py_SIZE(a)) 512 return PyErr_NoMemory(); 513 size = Py_SIZE(a) * n; 514 np = tuple_alloc(size); 515 if (np == NULL) 516 return NULL; 517 PyObject **dest = np->ob_item; 518 PyObject **dest_end = dest + size; 519 if (Py_SIZE(a) == 1) { 520 PyObject *elem = a->ob_item[0]; 521 Py_SET_REFCNT(elem, Py_REFCNT(elem) + n); 522#ifdef Py_REF_DEBUG 523 _Py_RefTotal += n; 524#endif 525 while (dest < dest_end) { 526 *dest++ = elem; 527 } 528 } 529 else { 530 PyObject **src = a->ob_item; 531 PyObject **src_end = src + Py_SIZE(a); 532 while (src < src_end) { 533 Py_SET_REFCNT(*src, Py_REFCNT(*src) + n); 534#ifdef Py_REF_DEBUG 535 _Py_RefTotal += n; 536#endif 537 *dest++ = *src++; 538 } 539 // Now src chases after dest in the same buffer 540 src = np->ob_item; 541 while (dest < dest_end) { 542 *dest++ = *src++; 543 } 544 } 545 _PyObject_GC_TRACK(np); 546 return (PyObject *) np; 547} 548 549/*[clinic input] 550tuple.index 551 552 value: object 553 start: slice_index(accept={int}) = 0 554 stop: slice_index(accept={int}, c_default="PY_SSIZE_T_MAX") = sys.maxsize 555 / 556 557Return first index of value. 558 559Raises ValueError if the value is not present. 560[clinic start generated code]*/ 561 562static PyObject * 563tuple_index_impl(PyTupleObject *self, PyObject *value, Py_ssize_t start, 564 Py_ssize_t stop) 565/*[clinic end generated code: output=07b6f9f3cb5c33eb input=fb39e9874a21fe3f]*/ 566{ 567 Py_ssize_t i; 568 569 if (start < 0) { 570 start += Py_SIZE(self); 571 if (start < 0) 572 start = 0; 573 } 574 if (stop < 0) { 575 stop += Py_SIZE(self); 576 } 577 else if (stop > Py_SIZE(self)) { 578 stop = Py_SIZE(self); 579 } 580 for (i = start; i < stop; i++) { 581 int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ); 582 if (cmp > 0) 583 return PyLong_FromSsize_t(i); 584 else if (cmp < 0) 585 return NULL; 586 } 587 PyErr_SetString(PyExc_ValueError, "tuple.index(x): x not in tuple"); 588 return NULL; 589} 590 591/*[clinic input] 592tuple.count 593 594 value: object 595 / 596 597Return number of occurrences of value. 598[clinic start generated code]*/ 599 600static PyObject * 601tuple_count(PyTupleObject *self, PyObject *value) 602/*[clinic end generated code: output=aa927affc5a97605 input=531721aff65bd772]*/ 603{ 604 Py_ssize_t count = 0; 605 Py_ssize_t i; 606 607 for (i = 0; i < Py_SIZE(self); i++) { 608 int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ); 609 if (cmp > 0) 610 count++; 611 else if (cmp < 0) 612 return NULL; 613 } 614 return PyLong_FromSsize_t(count); 615} 616 617static int 618tupletraverse(PyTupleObject *o, visitproc visit, void *arg) 619{ 620 Py_ssize_t i; 621 622 for (i = Py_SIZE(o); --i >= 0; ) 623 Py_VISIT(o->ob_item[i]); 624 return 0; 625} 626 627static PyObject * 628tuplerichcompare(PyObject *v, PyObject *w, int op) 629{ 630 PyTupleObject *vt, *wt; 631 Py_ssize_t i; 632 Py_ssize_t vlen, wlen; 633 634 if (!PyTuple_Check(v) || !PyTuple_Check(w)) 635 Py_RETURN_NOTIMPLEMENTED; 636 637 vt = (PyTupleObject *)v; 638 wt = (PyTupleObject *)w; 639 640 vlen = Py_SIZE(vt); 641 wlen = Py_SIZE(wt); 642 643 /* Note: the corresponding code for lists has an "early out" test 644 * here when op is EQ or NE and the lengths differ. That pays there, 645 * but Tim was unable to find any real code where EQ/NE tuple 646 * compares don't have the same length, so testing for it here would 647 * have cost without benefit. 648 */ 649 650 /* Search for the first index where items are different. 651 * Note that because tuples are immutable, it's safe to reuse 652 * vlen and wlen across the comparison calls. 653 */ 654 for (i = 0; i < vlen && i < wlen; i++) { 655 int k = PyObject_RichCompareBool(vt->ob_item[i], 656 wt->ob_item[i], Py_EQ); 657 if (k < 0) 658 return NULL; 659 if (!k) 660 break; 661 } 662 663 if (i >= vlen || i >= wlen) { 664 /* No more items to compare -- compare sizes */ 665 Py_RETURN_RICHCOMPARE(vlen, wlen, op); 666 } 667 668 /* We have an item that differs -- shortcuts for EQ/NE */ 669 if (op == Py_EQ) { 670 Py_RETURN_FALSE; 671 } 672 if (op == Py_NE) { 673 Py_RETURN_TRUE; 674 } 675 676 /* Compare the final item again using the proper operator */ 677 return PyObject_RichCompare(vt->ob_item[i], wt->ob_item[i], op); 678} 679 680static PyObject * 681tuple_subtype_new(PyTypeObject *type, PyObject *iterable); 682 683/*[clinic input] 684@classmethod 685tuple.__new__ as tuple_new 686 iterable: object(c_default="NULL") = () 687 / 688 689Built-in immutable sequence. 690 691If no argument is given, the constructor returns an empty tuple. 692If iterable is specified the tuple is initialized from iterable's items. 693 694If the argument is a tuple, the return value is the same object. 695[clinic start generated code]*/ 696 697static PyObject * 698tuple_new_impl(PyTypeObject *type, PyObject *iterable) 699/*[clinic end generated code: output=4546d9f0d469bce7 input=86963bcde633b5a2]*/ 700{ 701 if (type != &PyTuple_Type) 702 return tuple_subtype_new(type, iterable); 703 704 if (iterable == NULL) { 705 return tuple_get_empty(); 706 } 707 else { 708 return PySequence_Tuple(iterable); 709 } 710} 711 712static PyObject * 713tuple_vectorcall(PyObject *type, PyObject * const*args, 714 size_t nargsf, PyObject *kwnames) 715{ 716 if (!_PyArg_NoKwnames("tuple", kwnames)) { 717 return NULL; 718 } 719 720 Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); 721 if (!_PyArg_CheckPositional("tuple", nargs, 0, 1)) { 722 return NULL; 723 } 724 725 if (nargs) { 726 return tuple_new_impl(_PyType_CAST(type), args[0]); 727 } 728 else { 729 return tuple_get_empty(); 730 } 731} 732 733static PyObject * 734tuple_subtype_new(PyTypeObject *type, PyObject *iterable) 735{ 736 PyObject *tmp, *newobj, *item; 737 Py_ssize_t i, n; 738 739 assert(PyType_IsSubtype(type, &PyTuple_Type)); 740 // tuple subclasses must implement the GC protocol 741 assert(_PyType_IS_GC(type)); 742 743 tmp = tuple_new_impl(&PyTuple_Type, iterable); 744 if (tmp == NULL) 745 return NULL; 746 assert(PyTuple_Check(tmp)); 747 /* This may allocate an empty tuple that is not the global one. */ 748 newobj = type->tp_alloc(type, n = PyTuple_GET_SIZE(tmp)); 749 if (newobj == NULL) { 750 Py_DECREF(tmp); 751 return NULL; 752 } 753 for (i = 0; i < n; i++) { 754 item = PyTuple_GET_ITEM(tmp, i); 755 Py_INCREF(item); 756 PyTuple_SET_ITEM(newobj, i, item); 757 } 758 Py_DECREF(tmp); 759 760 // Don't track if a subclass tp_alloc is PyType_GenericAlloc() 761 if (!_PyObject_GC_IS_TRACKED(newobj)) { 762 _PyObject_GC_TRACK(newobj); 763 } 764 return newobj; 765} 766 767static PySequenceMethods tuple_as_sequence = { 768 (lenfunc)tuplelength, /* sq_length */ 769 (binaryfunc)tupleconcat, /* sq_concat */ 770 (ssizeargfunc)tuplerepeat, /* sq_repeat */ 771 (ssizeargfunc)tupleitem, /* sq_item */ 772 0, /* sq_slice */ 773 0, /* sq_ass_item */ 774 0, /* sq_ass_slice */ 775 (objobjproc)tuplecontains, /* sq_contains */ 776}; 777 778static PyObject* 779tuplesubscript(PyTupleObject* self, PyObject* item) 780{ 781 if (_PyIndex_Check(item)) { 782 Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError); 783 if (i == -1 && PyErr_Occurred()) 784 return NULL; 785 if (i < 0) 786 i += PyTuple_GET_SIZE(self); 787 return tupleitem(self, i); 788 } 789 else if (PySlice_Check(item)) { 790 Py_ssize_t start, stop, step, slicelength, i; 791 size_t cur; 792 PyObject* it; 793 PyObject **src, **dest; 794 795 if (PySlice_Unpack(item, &start, &stop, &step) < 0) { 796 return NULL; 797 } 798 slicelength = PySlice_AdjustIndices(PyTuple_GET_SIZE(self), &start, 799 &stop, step); 800 801 if (slicelength <= 0) { 802 return tuple_get_empty(); 803 } 804 else if (start == 0 && step == 1 && 805 slicelength == PyTuple_GET_SIZE(self) && 806 PyTuple_CheckExact(self)) { 807 Py_INCREF(self); 808 return (PyObject *)self; 809 } 810 else { 811 PyTupleObject* result = tuple_alloc(slicelength); 812 if (!result) return NULL; 813 814 src = self->ob_item; 815 dest = result->ob_item; 816 for (cur = start, i = 0; i < slicelength; 817 cur += step, i++) { 818 it = src[cur]; 819 Py_INCREF(it); 820 dest[i] = it; 821 } 822 823 _PyObject_GC_TRACK(result); 824 return (PyObject *)result; 825 } 826 } 827 else { 828 PyErr_Format(PyExc_TypeError, 829 "tuple indices must be integers or slices, not %.200s", 830 Py_TYPE(item)->tp_name); 831 return NULL; 832 } 833} 834 835/*[clinic input] 836tuple.__getnewargs__ 837[clinic start generated code]*/ 838 839static PyObject * 840tuple___getnewargs___impl(PyTupleObject *self) 841/*[clinic end generated code: output=25e06e3ee56027e2 input=1aeb4b286a21639a]*/ 842{ 843 return Py_BuildValue("(N)", tupleslice(self, 0, Py_SIZE(self))); 844} 845 846static PyMethodDef tuple_methods[] = { 847 TUPLE___GETNEWARGS___METHODDEF 848 TUPLE_INDEX_METHODDEF 849 TUPLE_COUNT_METHODDEF 850 {"__class_getitem__", Py_GenericAlias, METH_O|METH_CLASS, PyDoc_STR("See PEP 585")}, 851 {NULL, NULL} /* sentinel */ 852}; 853 854static PyMappingMethods tuple_as_mapping = { 855 (lenfunc)tuplelength, 856 (binaryfunc)tuplesubscript, 857 0 858}; 859 860static PyObject *tuple_iter(PyObject *seq); 861 862PyTypeObject PyTuple_Type = { 863 PyVarObject_HEAD_INIT(&PyType_Type, 0) 864 "tuple", 865 sizeof(PyTupleObject) - sizeof(PyObject *), 866 sizeof(PyObject *), 867 (destructor)tupledealloc, /* tp_dealloc */ 868 0, /* tp_vectorcall_offset */ 869 0, /* tp_getattr */ 870 0, /* tp_setattr */ 871 0, /* tp_as_async */ 872 (reprfunc)tuplerepr, /* tp_repr */ 873 0, /* tp_as_number */ 874 &tuple_as_sequence, /* tp_as_sequence */ 875 &tuple_as_mapping, /* tp_as_mapping */ 876 (hashfunc)tuplehash, /* tp_hash */ 877 0, /* tp_call */ 878 0, /* tp_str */ 879 PyObject_GenericGetAttr, /* tp_getattro */ 880 0, /* tp_setattro */ 881 0, /* tp_as_buffer */ 882 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | 883 Py_TPFLAGS_BASETYPE | Py_TPFLAGS_TUPLE_SUBCLASS | 884 _Py_TPFLAGS_MATCH_SELF | Py_TPFLAGS_SEQUENCE, /* tp_flags */ 885 tuple_new__doc__, /* tp_doc */ 886 (traverseproc)tupletraverse, /* tp_traverse */ 887 0, /* tp_clear */ 888 tuplerichcompare, /* tp_richcompare */ 889 0, /* tp_weaklistoffset */ 890 tuple_iter, /* tp_iter */ 891 0, /* tp_iternext */ 892 tuple_methods, /* tp_methods */ 893 0, /* tp_members */ 894 0, /* tp_getset */ 895 0, /* tp_base */ 896 0, /* tp_dict */ 897 0, /* tp_descr_get */ 898 0, /* tp_descr_set */ 899 0, /* tp_dictoffset */ 900 0, /* tp_init */ 901 0, /* tp_alloc */ 902 tuple_new, /* tp_new */ 903 PyObject_GC_Del, /* tp_free */ 904 .tp_vectorcall = tuple_vectorcall, 905}; 906 907/* The following function breaks the notion that tuples are immutable: 908 it changes the size of a tuple. We get away with this only if there 909 is only one module referencing the object. You can also think of it 910 as creating a new tuple object and destroying the old one, only more 911 efficiently. In any case, don't use this if the tuple may already be 912 known to some other part of the code. */ 913 914int 915_PyTuple_Resize(PyObject **pv, Py_ssize_t newsize) 916{ 917 PyTupleObject *v; 918 PyTupleObject *sv; 919 Py_ssize_t i; 920 Py_ssize_t oldsize; 921 922 v = (PyTupleObject *) *pv; 923 if (v == NULL || !Py_IS_TYPE(v, &PyTuple_Type) || 924 (Py_SIZE(v) != 0 && Py_REFCNT(v) != 1)) { 925 *pv = 0; 926 Py_XDECREF(v); 927 PyErr_BadInternalCall(); 928 return -1; 929 } 930 931 oldsize = Py_SIZE(v); 932 if (oldsize == newsize) { 933 return 0; 934 } 935 if (newsize == 0) { 936 Py_DECREF(v); 937 *pv = tuple_get_empty(); 938 return 0; 939 } 940 if (oldsize == 0) { 941#ifdef Py_DEBUG 942 assert(v == &_Py_SINGLETON(tuple_empty)); 943#endif 944 /* The empty tuple is statically allocated so we never 945 resize it in-place. */ 946 Py_DECREF(v); 947 *pv = PyTuple_New(newsize); 948 return *pv == NULL ? -1 : 0; 949 } 950 951 /* XXX UNREF/NEWREF interface should be more symmetrical */ 952#ifdef Py_REF_DEBUG 953 _Py_RefTotal--; 954#endif 955 if (_PyObject_GC_IS_TRACKED(v)) { 956 _PyObject_GC_UNTRACK(v); 957 } 958#ifdef Py_TRACE_REFS 959 _Py_ForgetReference((PyObject *) v); 960#endif 961 /* DECREF items deleted by shrinkage */ 962 for (i = newsize; i < oldsize; i++) { 963 Py_CLEAR(v->ob_item[i]); 964 } 965 sv = PyObject_GC_Resize(PyTupleObject, v, newsize); 966 if (sv == NULL) { 967 *pv = NULL; 968 PyObject_GC_Del(v); 969 return -1; 970 } 971 _Py_NewReference((PyObject *) sv); 972 /* Zero out items added by growing */ 973 if (newsize > oldsize) 974 memset(&sv->ob_item[oldsize], 0, 975 sizeof(*sv->ob_item) * (newsize - oldsize)); 976 *pv = (PyObject *) sv; 977 _PyObject_GC_TRACK(sv); 978 return 0; 979} 980 981 982PyStatus 983_PyTuple_InitTypes(PyInterpreterState *interp) 984{ 985 if (!_Py_IsMainInterpreter(interp)) { 986 return _PyStatus_OK(); 987 } 988 989 if (PyType_Ready(&PyTuple_Type) < 0) { 990 return _PyStatus_ERR("Can't initialize tuple type"); 991 } 992 993 if (PyType_Ready(&PyTupleIter_Type) < 0) { 994 return _PyStatus_ERR("Can't initialize tuple iterator type"); 995 } 996 997 return _PyStatus_OK(); 998} 999 1000static void maybe_freelist_clear(PyInterpreterState *, int); 1001 1002void 1003_PyTuple_Fini(PyInterpreterState *interp) 1004{ 1005 maybe_freelist_clear(interp, 1); 1006} 1007 1008void 1009_PyTuple_ClearFreeList(PyInterpreterState *interp) 1010{ 1011 maybe_freelist_clear(interp, 0); 1012} 1013 1014/*********************** Tuple Iterator **************************/ 1015 1016typedef struct { 1017 PyObject_HEAD 1018 Py_ssize_t it_index; 1019 PyTupleObject *it_seq; /* Set to NULL when iterator is exhausted */ 1020} tupleiterobject; 1021 1022static void 1023tupleiter_dealloc(tupleiterobject *it) 1024{ 1025 _PyObject_GC_UNTRACK(it); 1026 Py_XDECREF(it->it_seq); 1027 PyObject_GC_Del(it); 1028} 1029 1030static int 1031tupleiter_traverse(tupleiterobject *it, visitproc visit, void *arg) 1032{ 1033 Py_VISIT(it->it_seq); 1034 return 0; 1035} 1036 1037static PyObject * 1038tupleiter_next(tupleiterobject *it) 1039{ 1040 PyTupleObject *seq; 1041 PyObject *item; 1042 1043 assert(it != NULL); 1044 seq = it->it_seq; 1045 if (seq == NULL) 1046 return NULL; 1047 assert(PyTuple_Check(seq)); 1048 1049 if (it->it_index < PyTuple_GET_SIZE(seq)) { 1050 item = PyTuple_GET_ITEM(seq, it->it_index); 1051 ++it->it_index; 1052 Py_INCREF(item); 1053 return item; 1054 } 1055 1056 it->it_seq = NULL; 1057 Py_DECREF(seq); 1058 return NULL; 1059} 1060 1061static PyObject * 1062tupleiter_len(tupleiterobject *it, PyObject *Py_UNUSED(ignored)) 1063{ 1064 Py_ssize_t len = 0; 1065 if (it->it_seq) 1066 len = PyTuple_GET_SIZE(it->it_seq) - it->it_index; 1067 return PyLong_FromSsize_t(len); 1068} 1069 1070PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); 1071 1072static PyObject * 1073tupleiter_reduce(tupleiterobject *it, PyObject *Py_UNUSED(ignored)) 1074{ 1075 PyObject *iter = _PyEval_GetBuiltin(&_Py_ID(iter)); 1076 1077 /* _PyEval_GetBuiltin can invoke arbitrary code, 1078 * call must be before access of iterator pointers. 1079 * see issue #101765 */ 1080 1081 if (it->it_seq) 1082 return Py_BuildValue("N(O)n", iter, it->it_seq, it->it_index); 1083 else 1084 return Py_BuildValue("N(())", iter); 1085} 1086 1087static PyObject * 1088tupleiter_setstate(tupleiterobject *it, PyObject *state) 1089{ 1090 Py_ssize_t index = PyLong_AsSsize_t(state); 1091 if (index == -1 && PyErr_Occurred()) 1092 return NULL; 1093 if (it->it_seq != NULL) { 1094 if (index < 0) 1095 index = 0; 1096 else if (index > PyTuple_GET_SIZE(it->it_seq)) 1097 index = PyTuple_GET_SIZE(it->it_seq); /* exhausted iterator */ 1098 it->it_index = index; 1099 } 1100 Py_RETURN_NONE; 1101} 1102 1103PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); 1104PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); 1105 1106static PyMethodDef tupleiter_methods[] = { 1107 {"__length_hint__", (PyCFunction)tupleiter_len, METH_NOARGS, length_hint_doc}, 1108 {"__reduce__", (PyCFunction)tupleiter_reduce, METH_NOARGS, reduce_doc}, 1109 {"__setstate__", (PyCFunction)tupleiter_setstate, METH_O, setstate_doc}, 1110 {NULL, NULL} /* sentinel */ 1111}; 1112 1113PyTypeObject PyTupleIter_Type = { 1114 PyVarObject_HEAD_INIT(&PyType_Type, 0) 1115 "tuple_iterator", /* tp_name */ 1116 sizeof(tupleiterobject), /* tp_basicsize */ 1117 0, /* tp_itemsize */ 1118 /* methods */ 1119 (destructor)tupleiter_dealloc, /* tp_dealloc */ 1120 0, /* tp_vectorcall_offset */ 1121 0, /* tp_getattr */ 1122 0, /* tp_setattr */ 1123 0, /* tp_as_async */ 1124 0, /* tp_repr */ 1125 0, /* tp_as_number */ 1126 0, /* tp_as_sequence */ 1127 0, /* tp_as_mapping */ 1128 0, /* tp_hash */ 1129 0, /* tp_call */ 1130 0, /* tp_str */ 1131 PyObject_GenericGetAttr, /* tp_getattro */ 1132 0, /* tp_setattro */ 1133 0, /* tp_as_buffer */ 1134 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */ 1135 0, /* tp_doc */ 1136 (traverseproc)tupleiter_traverse, /* tp_traverse */ 1137 0, /* tp_clear */ 1138 0, /* tp_richcompare */ 1139 0, /* tp_weaklistoffset */ 1140 PyObject_SelfIter, /* tp_iter */ 1141 (iternextfunc)tupleiter_next, /* tp_iternext */ 1142 tupleiter_methods, /* tp_methods */ 1143 0, 1144}; 1145 1146static PyObject * 1147tuple_iter(PyObject *seq) 1148{ 1149 tupleiterobject *it; 1150 1151 if (!PyTuple_Check(seq)) { 1152 PyErr_BadInternalCall(); 1153 return NULL; 1154 } 1155 it = PyObject_GC_New(tupleiterobject, &PyTupleIter_Type); 1156 if (it == NULL) 1157 return NULL; 1158 it->it_index = 0; 1159 Py_INCREF(seq); 1160 it->it_seq = (PyTupleObject *)seq; 1161 _PyObject_GC_TRACK(it); 1162 return (PyObject *)it; 1163} 1164 1165 1166/************* 1167 * freelists * 1168 *************/ 1169 1170#define STATE (interp->tuple) 1171#define FREELIST_FINALIZED (STATE.numfree[0] < 0) 1172 1173static inline PyTupleObject * 1174maybe_freelist_pop(Py_ssize_t size) 1175{ 1176#if PyTuple_NFREELISTS > 0 1177 PyInterpreterState *interp = _PyInterpreterState_GET(); 1178#ifdef Py_DEBUG 1179 /* maybe_freelist_pop() must not be called after maybe_freelist_fini(). */ 1180 assert(!FREELIST_FINALIZED); 1181#endif 1182 if (size == 0) { 1183 return NULL; 1184 } 1185 assert(size > 0); 1186 if (size < PyTuple_MAXSAVESIZE) { 1187 Py_ssize_t index = size - 1; 1188 PyTupleObject *op = STATE.free_list[index]; 1189 if (op != NULL) { 1190 /* op is the head of a linked list, with the first item 1191 pointing to the next node. Here we pop off the old head. */ 1192 STATE.free_list[index] = (PyTupleObject *) op->ob_item[0]; 1193 STATE.numfree[index]--; 1194 /* Inlined _PyObject_InitVar() without _PyType_HasFeature() test */ 1195#ifdef Py_TRACE_REFS 1196 /* maybe_freelist_push() ensures these were already set. */ 1197 // XXX Can we drop these? See commit 68055ce6fe01 (GvR, Dec 1998). 1198 Py_SET_SIZE(op, size); 1199 Py_SET_TYPE(op, &PyTuple_Type); 1200#endif 1201 _Py_NewReference((PyObject *)op); 1202 /* END inlined _PyObject_InitVar() */ 1203 OBJECT_STAT_INC(from_freelist); 1204 return op; 1205 } 1206 } 1207#endif 1208 return NULL; 1209} 1210 1211static inline int 1212maybe_freelist_push(PyTupleObject *op) 1213{ 1214#if PyTuple_NFREELISTS > 0 1215 PyInterpreterState *interp = _PyInterpreterState_GET(); 1216#ifdef Py_DEBUG 1217 /* maybe_freelist_push() must not be called after maybe_freelist_fini(). */ 1218 assert(!FREELIST_FINALIZED); 1219#endif 1220 if (Py_SIZE(op) == 0) { 1221 return 0; 1222 } 1223 Py_ssize_t index = Py_SIZE(op) - 1; 1224 if (index < PyTuple_NFREELISTS 1225 && STATE.numfree[index] < PyTuple_MAXFREELIST 1226 && Py_IS_TYPE(op, &PyTuple_Type)) 1227 { 1228 /* op is the head of a linked list, with the first item 1229 pointing to the next node. Here we set op as the new head. */ 1230 op->ob_item[0] = (PyObject *) STATE.free_list[index]; 1231 STATE.free_list[index] = op; 1232 STATE.numfree[index]++; 1233 OBJECT_STAT_INC(to_freelist); 1234 return 1; 1235 } 1236#endif 1237 return 0; 1238} 1239 1240static void 1241maybe_freelist_clear(PyInterpreterState *interp, int fini) 1242{ 1243#if PyTuple_NFREELISTS > 0 1244 for (Py_ssize_t i = 0; i < PyTuple_NFREELISTS; i++) { 1245 PyTupleObject *p = STATE.free_list[i]; 1246 STATE.free_list[i] = NULL; 1247 STATE.numfree[i] = fini ? -1 : 0; 1248 while (p) { 1249 PyTupleObject *q = p; 1250 p = (PyTupleObject *)(p->ob_item[0]); 1251 PyObject_GC_Del(q); 1252 } 1253 } 1254#endif 1255} 1256 1257/* Print summary info about the state of the optimized allocator */ 1258void 1259_PyTuple_DebugMallocStats(FILE *out) 1260{ 1261#if PyTuple_NFREELISTS > 0 1262 PyInterpreterState *interp = _PyInterpreterState_GET(); 1263 for (int i = 0; i < PyTuple_NFREELISTS; i++) { 1264 int len = i + 1; 1265 char buf[128]; 1266 PyOS_snprintf(buf, sizeof(buf), 1267 "free %d-sized PyTupleObject", len); 1268 _PyDebugAllocatorStats(out, buf, STATE.numfree[i], 1269 _PyObject_VAR_SIZE(&PyTuple_Type, len)); 1270 } 1271#endif 1272} 1273 1274#undef STATE 1275#undef FREELIST_FINALIZED 1276