Skip to content

Commit 8298469

Browse files
committed
gh-142472: Clean-up _PyStackRef functions
This combines most _PyStackRef functions and macros between the free threaded and default builds. - Remove Py_TAG_DEFERRED (same as Py_TAG_REFCNT) - Remove PyStackRef_IsDeferred (same as !PyStackRef_RefcountOnObject)
1 parent b20722c commit 8298469

File tree

5 files changed

+58
-201
lines changed

5 files changed

+58
-201
lines changed

Include/internal/pycore_object.h

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -496,6 +496,9 @@ static inline void Py_DECREF_MORTAL_SPECIALIZED(PyObject *op, destructor destruc
496496
#define Py_DECREF_MORTAL_SPECIALIZED(op, destruct) Py_DECREF_MORTAL_SPECIALIZED(_PyObject_CAST(op), destruct)
497497

498498
#endif
499+
#else // Py_GIL_DISABLED
500+
# define Py_DECREF_MORTAL(op) Py_DECREF(op)
501+
# define Py_DECREF_MORTAL_SPECIALIZED(op, destruct) Py_DECREF(op)
499502
#endif
500503

501504
/* Inline functions trading binary compatibility for speed:
@@ -1045,6 +1048,8 @@ static inline Py_ALWAYS_INLINE void _Py_INCREF_MORTAL(PyObject *op)
10451048
}
10461049
#endif
10471050
}
1051+
#else
1052+
# define _Py_INCREF_MORTAL(op) Py_INCREF(op)
10481053
#endif
10491054

10501055
/* Utility for the tp_traverse slot of mutable heap types that have no other

Include/internal/pycore_stackref.h

Lines changed: 49 additions & 196 deletions
Original file line numberDiff line numberDiff line change
@@ -446,184 +446,6 @@ PyStackRef_IncrementTaggedIntNoOverflow(_PyStackRef ref)
446446
return (_PyStackRef){ .bits = ref.bits + (1 << Py_TAGGED_SHIFT) };
447447
}
448448

449-
#define PyStackRef_IsDeferredOrTaggedInt(ref) (((ref).bits & Py_TAG_REFCNT) != 0)
450-
451-
#ifdef Py_GIL_DISABLED
452-
453-
#define Py_TAG_DEFERRED Py_TAG_REFCNT
454-
455-
#define Py_TAG_PTR ((uintptr_t)0)
456-
457-
458-
static const _PyStackRef PyStackRef_NULL = { .bits = Py_TAG_DEFERRED};
459-
460-
#define PyStackRef_IsNull(stackref) ((stackref).bits == PyStackRef_NULL.bits)
461-
#define PyStackRef_True ((_PyStackRef){.bits = ((uintptr_t)&_Py_TrueStruct) | Py_TAG_DEFERRED })
462-
#define PyStackRef_False ((_PyStackRef){.bits = ((uintptr_t)&_Py_FalseStruct) | Py_TAG_DEFERRED })
463-
#define PyStackRef_None ((_PyStackRef){.bits = ((uintptr_t)&_Py_NoneStruct) | Py_TAG_DEFERRED })
464-
465-
// Checks that mask out the deferred bit in the free threading build.
466-
#define PyStackRef_IsNone(ref) (PyStackRef_AsPyObjectBorrow(ref) == Py_None)
467-
#define PyStackRef_IsTrue(ref) (PyStackRef_AsPyObjectBorrow(ref) == Py_True)
468-
#define PyStackRef_IsFalse(ref) (PyStackRef_AsPyObjectBorrow(ref) == Py_False)
469-
470-
#define PyStackRef_IsNullOrInt(stackref) (PyStackRef_IsNull(stackref) || PyStackRef_IsTaggedInt(stackref))
471-
472-
static inline PyObject *
473-
PyStackRef_AsPyObjectBorrow(_PyStackRef stackref)
474-
{
475-
assert(!PyStackRef_IsTaggedInt(stackref));
476-
PyObject *cleared = ((PyObject *)((stackref).bits & (~Py_TAG_BITS)));
477-
return cleared;
478-
}
479-
480-
#define PyStackRef_IsDeferred(ref) (((ref).bits & Py_TAG_BITS) == Py_TAG_DEFERRED)
481-
482-
static inline PyObject *
483-
PyStackRef_AsPyObjectSteal(_PyStackRef stackref)
484-
{
485-
assert(!PyStackRef_IsNull(stackref));
486-
if (PyStackRef_IsDeferred(stackref)) {
487-
return Py_NewRef(PyStackRef_AsPyObjectBorrow(stackref));
488-
}
489-
return PyStackRef_AsPyObjectBorrow(stackref);
490-
}
491-
492-
static inline _PyStackRef
493-
_PyStackRef_FromPyObjectSteal(PyObject *obj)
494-
{
495-
assert(obj != NULL);
496-
// Make sure we don't take an already tagged value.
497-
assert(((uintptr_t)obj & Py_TAG_BITS) == 0);
498-
return (_PyStackRef){ .bits = (uintptr_t)obj };
499-
}
500-
# define PyStackRef_FromPyObjectSteal(obj) _PyStackRef_FromPyObjectSteal(_PyObject_CAST(obj))
501-
502-
static inline bool
503-
PyStackRef_IsHeapSafe(_PyStackRef stackref)
504-
{
505-
if (PyStackRef_IsDeferred(stackref)) {
506-
PyObject *obj = PyStackRef_AsPyObjectBorrow(stackref);
507-
return obj == NULL || _Py_IsImmortal(obj) || _PyObject_HasDeferredRefcount(obj);
508-
}
509-
return true;
510-
}
511-
512-
static inline _PyStackRef
513-
PyStackRef_MakeHeapSafe(_PyStackRef stackref)
514-
{
515-
if (PyStackRef_IsHeapSafe(stackref)) {
516-
return stackref;
517-
}
518-
PyObject *obj = PyStackRef_AsPyObjectBorrow(stackref);
519-
return (_PyStackRef){ .bits = (uintptr_t)(Py_NewRef(obj)) | Py_TAG_PTR };
520-
}
521-
522-
static inline _PyStackRef
523-
PyStackRef_FromPyObjectStealMortal(PyObject *obj)
524-
{
525-
assert(obj != NULL);
526-
assert(!_Py_IsImmortal(obj));
527-
// Make sure we don't take an already tagged value.
528-
assert(((uintptr_t)obj & Py_TAG_BITS) == 0);
529-
return (_PyStackRef){ .bits = (uintptr_t)obj };
530-
}
531-
532-
static inline _PyStackRef
533-
PyStackRef_FromPyObjectNew(PyObject *obj)
534-
{
535-
// Make sure we don't take an already tagged value.
536-
assert(((uintptr_t)obj & Py_TAG_BITS) == 0);
537-
assert(obj != NULL);
538-
if (_PyObject_HasDeferredRefcount(obj)) {
539-
return (_PyStackRef){ .bits = (uintptr_t)obj | Py_TAG_DEFERRED };
540-
}
541-
else {
542-
return (_PyStackRef){ .bits = (uintptr_t)(Py_NewRef(obj)) | Py_TAG_PTR };
543-
}
544-
}
545-
#define PyStackRef_FromPyObjectNew(obj) PyStackRef_FromPyObjectNew(_PyObject_CAST(obj))
546-
547-
static inline _PyStackRef
548-
PyStackRef_FromPyObjectBorrow(PyObject *obj)
549-
{
550-
// Make sure we don't take an already tagged value.
551-
assert(((uintptr_t)obj & Py_TAG_BITS) == 0);
552-
assert(obj != NULL);
553-
return (_PyStackRef){ .bits = (uintptr_t)obj | Py_TAG_DEFERRED };
554-
}
555-
#define PyStackRef_FromPyObjectBorrow(obj) PyStackRef_FromPyObjectBorrow(_PyObject_CAST(obj))
556-
557-
#define PyStackRef_CLOSE(REF) \
558-
do { \
559-
_PyStackRef _close_tmp = (REF); \
560-
assert(!PyStackRef_IsNull(_close_tmp)); \
561-
if (!PyStackRef_IsDeferredOrTaggedInt(_close_tmp)) { \
562-
Py_DECREF(PyStackRef_AsPyObjectBorrow(_close_tmp)); \
563-
} \
564-
} while (0)
565-
566-
static inline void
567-
PyStackRef_CLOSE_SPECIALIZED(_PyStackRef ref, destructor destruct)
568-
{
569-
(void)destruct;
570-
PyStackRef_CLOSE(ref);
571-
}
572-
573-
static inline int
574-
PyStackRef_RefcountOnObject(_PyStackRef ref)
575-
{
576-
return (ref.bits & Py_TAG_REFCNT) == 0;
577-
}
578-
579-
static inline _PyStackRef
580-
PyStackRef_DUP(_PyStackRef stackref)
581-
{
582-
assert(!PyStackRef_IsNull(stackref));
583-
if (PyStackRef_IsDeferredOrTaggedInt(stackref)) {
584-
return stackref;
585-
}
586-
Py_INCREF(PyStackRef_AsPyObjectBorrow(stackref));
587-
return stackref;
588-
}
589-
590-
static inline _PyStackRef
591-
PyStackRef_Borrow(_PyStackRef stackref)
592-
{
593-
return (_PyStackRef){ .bits = stackref.bits | Py_TAG_DEFERRED };
594-
}
595-
596-
// Convert a possibly deferred reference to a strong reference.
597-
static inline _PyStackRef
598-
PyStackRef_AsStrongReference(_PyStackRef stackref)
599-
{
600-
return PyStackRef_FromPyObjectSteal(PyStackRef_AsPyObjectSteal(stackref));
601-
}
602-
603-
#define PyStackRef_XCLOSE(stackref) \
604-
do { \
605-
_PyStackRef _tmp = (stackref); \
606-
if (!PyStackRef_IsNull(_tmp)) { \
607-
PyStackRef_CLOSE(_tmp); \
608-
} \
609-
} while (0);
610-
611-
#define PyStackRef_CLEAR(op) \
612-
do { \
613-
_PyStackRef *_tmp_op_ptr = &(op); \
614-
_PyStackRef _tmp_old_op = (*_tmp_op_ptr); \
615-
if (!PyStackRef_IsNull(_tmp_old_op)) { \
616-
*_tmp_op_ptr = PyStackRef_NULL; \
617-
PyStackRef_CLOSE(_tmp_old_op); \
618-
} \
619-
} while (0)
620-
621-
#define PyStackRef_FromPyObjectNewMortal PyStackRef_FromPyObjectNew
622-
623-
#else // Py_GIL_DISABLED
624-
625-
// With GIL
626-
627449
/* References to immortal objects always have their tag bit set to Py_TAG_REFCNT
628450
* as they can (must) have their reclamation deferred */
629451

@@ -642,13 +464,24 @@ static const _PyStackRef PyStackRef_NULL = { .bits = PyStackRef_NULL_BITS };
642464
#define PyStackRef_False ((_PyStackRef){.bits = ((uintptr_t)&_Py_FalseStruct) | Py_TAG_REFCNT })
643465
#define PyStackRef_None ((_PyStackRef){.bits = ((uintptr_t)&_Py_NoneStruct) | Py_TAG_REFCNT })
644466

467+
#ifdef Py_GIL_DISABLED
468+
// Checks that mask out the deferred bit in the free threading build.
469+
#define PyStackRef_IsNone(REF) (((REF).bits & ~Py_TAG_REFCNT) == (uintptr_t)&_Py_NoneStruct)
470+
#define PyStackRef_IsTrue(REF) (((REF).bits & ~Py_TAG_REFCNT) == (uintptr_t)&_Py_TrueStruct)
471+
#define PyStackRef_IsFalse(REF) (((REF).bits & ~Py_TAG_REFCNT) == (uintptr_t)&_Py_FalseStruct)
472+
#else
645473
#define PyStackRef_IsTrue(REF) ((REF).bits == (((uintptr_t)&_Py_TrueStruct) | Py_TAG_REFCNT))
646474
#define PyStackRef_IsFalse(REF) ((REF).bits == (((uintptr_t)&_Py_FalseStruct) | Py_TAG_REFCNT))
647475
#define PyStackRef_IsNone(REF) ((REF).bits == (((uintptr_t)&_Py_NoneStruct) | Py_TAG_REFCNT))
476+
#endif
648477

649-
#ifdef Py_DEBUG
478+
#define PyStackRef_IsNullOrInt(stackref) (PyStackRef_IsNull(stackref) || PyStackRef_IsTaggedInt(stackref))
479+
480+
#if defined(Py_DEBUG) && !defined(Py_GIL_DISABLED)
650481

651-
static inline void PyStackRef_CheckValid(_PyStackRef ref) {
482+
static inline void
483+
PyStackRef_CheckValid(_PyStackRef ref)
484+
{
652485
assert(ref.bits != 0);
653486
int tag = ref.bits & Py_TAG_BITS;
654487
PyObject *obj = BITS_TO_PTR_MASKED(ref);
@@ -694,39 +527,45 @@ PyStackRef_Borrow(_PyStackRef ref)
694527
{
695528
return (_PyStackRef){ .bits = ref.bits | Py_TAG_REFCNT };
696529
}
697-
#endif
698530

699531
static inline PyObject *
700532
PyStackRef_AsPyObjectSteal(_PyStackRef ref)
701533
{
534+
assert(!PyStackRef_IsNull(ref));
535+
assert(!PyStackRef_IsTaggedInt(ref));
702536
if (PyStackRef_RefcountOnObject(ref)) {
703537
return BITS_TO_PTR(ref);
704538
}
705539
else {
706540
return Py_NewRef(BITS_TO_PTR_MASKED(ref));
707541
}
708542
}
543+
#endif
709544

710545
static inline _PyStackRef
711546
PyStackRef_FromPyObjectSteal(PyObject *obj)
712547
{
713548
assert(obj != NULL);
714-
#if SIZEOF_VOID_P > 4
715-
unsigned int tag = obj->ob_flags & Py_TAG_REFCNT;
549+
#ifdef Py_GIL_DISABLED
550+
return (_PyStackRef){ .bits = (uintptr_t)obj };
716551
#else
552+
# if SIZEOF_VOID_P > 4
553+
unsigned int tag = obj->ob_flags & Py_TAG_REFCNT;
554+
# else
717555
unsigned int tag = _Py_IsImmortal(obj) ? Py_TAG_REFCNT : 0;
718-
#endif
556+
# endif
719557
_PyStackRef ref = ((_PyStackRef){.bits = ((uintptr_t)(obj)) | tag});
720558
PyStackRef_CheckValid(ref);
721559
return ref;
560+
#endif
722561
}
723562

724563
static inline _PyStackRef
725564
PyStackRef_FromPyObjectStealMortal(PyObject *obj)
726565
{
727566
assert(obj != NULL);
728567
assert(!_Py_IsImmortal(obj));
729-
_PyStackRef ref = ((_PyStackRef){.bits = ((uintptr_t)(obj)) });
568+
_PyStackRef ref = (_PyStackRef){ .bits = (uintptr_t)obj };
730569
PyStackRef_CheckValid(ref);
731570
return ref;
732571
}
@@ -735,9 +574,15 @@ static inline _PyStackRef
735574
_PyStackRef_FromPyObjectNew(PyObject *obj)
736575
{
737576
assert(obj != NULL);
577+
#ifdef Py_GIL_DISABLED
578+
if (_PyObject_HasDeferredRefcount(obj)) {
579+
return (_PyStackRef){ .bits = (uintptr_t)obj | Py_TAG_REFCNT };
580+
}
581+
#else
738582
if (_Py_IsImmortal(obj)) {
739-
return (_PyStackRef){ .bits = ((uintptr_t)obj) | Py_TAG_REFCNT};
583+
return (_PyStackRef){ .bits = (uintptr_t)obj | Py_TAG_REFCNT };
740584
}
585+
#endif
741586
_Py_INCREF_MORTAL(obj);
742587
_PyStackRef ref = (_PyStackRef){ .bits = (uintptr_t)obj };
743588
PyStackRef_CheckValid(ref);
@@ -760,6 +605,7 @@ _PyStackRef_FromPyObjectNewMortal(PyObject *obj)
760605
static inline _PyStackRef
761606
PyStackRef_FromPyObjectBorrow(PyObject *obj)
762607
{
608+
assert(obj != NULL);
763609
return (_PyStackRef){ .bits = (uintptr_t)obj | Py_TAG_REFCNT};
764610
}
765611

@@ -782,7 +628,15 @@ PyStackRef_DUP(_PyStackRef ref)
782628
static inline bool
783629
PyStackRef_IsHeapSafe(_PyStackRef ref)
784630
{
785-
return (ref.bits & Py_TAG_BITS) != Py_TAG_REFCNT || ref.bits == PyStackRef_NULL_BITS || _Py_IsImmortal(BITS_TO_PTR_MASKED(ref));
631+
if ((ref.bits & Py_TAG_BITS) != Py_TAG_REFCNT) {
632+
return true;
633+
}
634+
PyObject *obj = BITS_TO_PTR_MASKED(ref);
635+
#ifdef Py_GIL_DISABLED
636+
return obj == NULL || _PyObject_HasDeferredRefcount(obj);
637+
#else
638+
return obj == NULL || _Py_IsImmortal(obj);
639+
#endif
786640
}
787641

788642
static inline _PyStackRef
@@ -798,6 +652,13 @@ PyStackRef_MakeHeapSafe(_PyStackRef ref)
798652
return ref;
799653
}
800654

655+
// Convert a possibly deferred reference to a strong reference.
656+
static inline _PyStackRef
657+
PyStackRef_AsStrongReference(_PyStackRef stackref)
658+
{
659+
return PyStackRef_FromPyObjectSteal(PyStackRef_AsPyObjectSteal(stackref));
660+
}
661+
801662
#ifdef _WIN32
802663
#define PyStackRef_CLOSE(REF) \
803664
do { \
@@ -815,12 +676,6 @@ PyStackRef_CLOSE(_PyStackRef ref)
815676
}
816677
#endif
817678

818-
static inline bool
819-
PyStackRef_IsNullOrInt(_PyStackRef ref)
820-
{
821-
return PyStackRef_IsNull(ref) || PyStackRef_IsTaggedInt(ref);
822-
}
823-
824679
static inline void
825680
PyStackRef_CLOSE_SPECIALIZED(_PyStackRef ref, destructor destruct)
826681
{
@@ -853,8 +708,6 @@ PyStackRef_XCLOSE(_PyStackRef ref)
853708
} while (0)
854709

855710

856-
#endif // Py_GIL_DISABLED
857-
858711
// Note: this is a macro because MSVC (Windows) has trouble inlining it.
859712

860713
#define PyStackRef_Is(a, b) (((a).bits & (~Py_TAG_REFCNT)) == ((b).bits & (~Py_TAG_REFCNT)))
@@ -928,7 +781,7 @@ static inline int
928781
_Py_TryIncrefCompareStackRef(PyObject **src, PyObject *op, _PyStackRef *out)
929782
{
930783
if (_PyObject_HasDeferredRefcount(op)) {
931-
*out = (_PyStackRef){ .bits = (uintptr_t)op | Py_TAG_DEFERRED };
784+
*out = (_PyStackRef){ .bits = (uintptr_t)op | Py_TAG_REFCNT };
932785
return 1;
933786
}
934787
if (_Py_TryIncrefCompare(src, op)) {

InternalDocs/stackrefs.md

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,6 @@ these values. Type checks use `PyStackRef_IsTaggedInt` and `PyStackRef_LongCheck
6464

6565
## Free threading considerations
6666

67-
With `Py_GIL_DISABLED`, `Py_TAG_DEFERRED` is an alias for `Py_TAG_REFCNT`.
6867
Objects that support deferred reference counting can be pushed to the evaluation
6968
stack and stored in local variables without directly incrementing the reference
7069
count because they are only freed during cyclic garbage collection. This avoids

Objects/dictobject.c

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1599,7 +1599,7 @@ lookup_threadsafe_unicode(PyDictKeysObject *dk, PyObject *key, Py_hash_t hash, _
15991599
return DKIX_EMPTY;
16001600
}
16011601
if (_PyObject_HasDeferredRefcount(value)) {
1602-
*value_addr = (_PyStackRef){ .bits = (uintptr_t)value | Py_TAG_DEFERRED };
1602+
*value_addr = (_PyStackRef){ .bits = (uintptr_t)value | Py_TAG_REFCNT };
16031603
return ix;
16041604
}
16051605
if (_Py_TryIncrefCompare(addr_of_value, value)) {

0 commit comments

Comments
 (0)