Skip to content

Commit 68a856e

Browse files
committed
Merge in the main branch
2 parents 9c13f77 + da1d468 commit 68a856e

File tree

19 files changed

+147
-69
lines changed

19 files changed

+147
-69
lines changed

Doc/c-api/import.rst

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -314,6 +314,13 @@ Importing Modules
314314
initialization.
315315
316316
317+
.. c:var:: struct _inittab *PyImport_Inittab
318+
319+
The table of built-in modules used by Python initialization. Do not use this directly;
320+
use :c:func:`PyImport_AppendInittab` and :c:func:`PyImport_ExtendInittab`
321+
instead.
322+
323+
317324
.. c:function:: PyObject* PyImport_ImportModuleAttr(PyObject *mod_name, PyObject *attr_name)
318325
319326
Import the module *mod_name* and get its attribute *attr_name*.

Doc/c-api/veryhigh.rst

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -396,3 +396,43 @@ Available start symbols
396396
* :pep:`484`
397397
398398
.. versionadded:: 3.8
399+
400+
401+
Stack Effects
402+
^^^^^^^^^^^^^
403+
404+
.. seealso::
405+
:py:func:`dis.stack_effect`
406+
407+
408+
.. c:macro:: PY_INVALID_STACK_EFFECT
409+
410+
Sentinel value representing an invalid stack effect.
411+
412+
This is currently equivalent to ``INT_MAX``.
413+
414+
.. versionadded:: 3.8
415+
416+
417+
.. c:function:: int PyCompile_OpcodeStackEffect(int opcode, int oparg)
418+
419+
Compute the stack effect of *opcode* with argument *oparg*.
420+
421+
On success, this function returns the stack effect; on failure, this
422+
returns :c:macro:`PY_INVALID_STACK_EFFECT`.
423+
424+
.. versionadded:: 3.4
425+
426+
427+
.. c:function:: int PyCompile_OpcodeStackEffectWithJump(int opcode, int oparg, int jump)
428+
429+
Similar to :c:func:`PyCompile_OpcodeStackEffect`, but don't include the
430+
stack effect of jumping if *jump* is zero.
431+
432+
If *jump* is ``0``, this will not include the stack effect of jumping, but
433+
if *jump* is ``1`` or ``-1``, this will include it.
434+
435+
On success, this function returns the stack effect; on failure, this
436+
returns :c:macro:`PY_INVALID_STACK_EFFECT`.
437+
438+
.. versionadded:: 3.8

Doc/extending/extending.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -426,7 +426,7 @@ A pointer to the module definition must be returned via :c:func:`PyModuleDef_Ini
426426
so that the import machinery can create the module and store it in ``sys.modules``.
427427

428428
When embedding Python, the :c:func:`!PyInit_spam` function is not called
429-
automatically unless there's an entry in the :c:data:`!PyImport_Inittab` table.
429+
automatically unless there's an entry in the :c:data:`PyImport_Inittab` table.
430430
To add the module to the initialization table, use :c:func:`PyImport_AppendInittab`,
431431
optionally followed by an import of the module::
432432

Include/internal/pycore_initconfig.h

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -153,10 +153,8 @@ typedef enum {
153153
} _PyConfigInitEnum;
154154

155155
typedef enum {
156-
/* For now, this means the GIL is enabled.
157-
158-
gh-116329: This will eventually change to "the GIL is disabled but can
159-
be re-enabled by loading an incompatible extension module." */
156+
/* In free threaded builds, this means that the GIL is disabled at startup,
157+
but may be enabled by loading an incompatible extension module. */
160158
_PyConfig_GIL_DEFAULT = -1,
161159

162160
/* The GIL has been forced off or on, and will not be affected by module loading. */

Lib/asyncio/streams.py

Lines changed: 10 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -667,8 +667,7 @@ async def readuntil(self, separator=b'\n'):
667667
# adds data which makes separator be found. That's why we check for
668668
# EOF *after* inspecting the buffer.
669669
if self._eof:
670-
chunk = bytes(self._buffer)
671-
self._buffer.clear()
670+
chunk = self._buffer.take_bytes()
672671
raise exceptions.IncompleteReadError(chunk, None)
673672

674673
# _wait_for_data() will resume reading if stream was paused.
@@ -678,10 +677,9 @@ async def readuntil(self, separator=b'\n'):
678677
raise exceptions.LimitOverrunError(
679678
'Separator is found, but chunk is longer than limit', match_start)
680679

681-
chunk = self._buffer[:match_end]
682-
del self._buffer[:match_end]
680+
chunk = self._buffer.take_bytes(match_end)
683681
self._maybe_resume_transport()
684-
return bytes(chunk)
682+
return chunk
685683

686684
async def read(self, n=-1):
687685
"""Read up to `n` bytes from the stream.
@@ -716,20 +714,16 @@ async def read(self, n=-1):
716714
# collect everything in self._buffer, but that would
717715
# deadlock if the subprocess sends more than self.limit
718716
# bytes. So just call self.read(self._limit) until EOF.
719-
blocks = []
720-
while True:
721-
block = await self.read(self._limit)
722-
if not block:
723-
break
724-
blocks.append(block)
725-
return b''.join(blocks)
717+
joined = bytearray()
718+
while block := await self.read(self._limit):
719+
joined += block
720+
return joined.take_bytes()
726721

727722
if not self._buffer and not self._eof:
728723
await self._wait_for_data('read')
729724

730725
# This will work right even if buffer is less than n bytes
731-
data = bytes(memoryview(self._buffer)[:n])
732-
del self._buffer[:n]
726+
data = self._buffer.take_bytes(min(len(self._buffer), n))
733727

734728
self._maybe_resume_transport()
735729
return data
@@ -760,18 +754,12 @@ async def readexactly(self, n):
760754

761755
while len(self._buffer) < n:
762756
if self._eof:
763-
incomplete = bytes(self._buffer)
764-
self._buffer.clear()
757+
incomplete = self._buffer.take_bytes()
765758
raise exceptions.IncompleteReadError(incomplete, n)
766759

767760
await self._wait_for_data('readexactly')
768761

769-
if len(self._buffer) == n:
770-
data = bytes(self._buffer)
771-
self._buffer.clear()
772-
else:
773-
data = bytes(memoryview(self._buffer)[:n])
774-
del self._buffer[:n]
762+
data = self._buffer.take_bytes(n)
775763
self._maybe_resume_transport()
776764
return data
777765

Lib/pdb.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -654,7 +654,7 @@ def _show_display(self):
654654

655655
def _get_tb_and_exceptions(self, tb_or_exc):
656656
"""
657-
Given a tracecack or an exception, return a tuple of chained exceptions
657+
Given a traceback or an exception, return a tuple of chained exceptions
658658
and current traceback to inspect.
659659
660660
This will deal with selecting the right ``__cause__`` or ``__context__``
@@ -2429,7 +2429,9 @@ def print_stack_trace(self, count=None):
24292429
except KeyboardInterrupt:
24302430
pass
24312431

2432-
def print_stack_entry(self, frame_lineno, prompt_prefix=line_prefix):
2432+
def print_stack_entry(self, frame_lineno, prompt_prefix=None):
2433+
if prompt_prefix is None:
2434+
prompt_prefix = line_prefix
24332435
frame, lineno = frame_lineno
24342436
if frame is self.curframe:
24352437
prefix = '> '

Lib/test/test_capi/test_opt.py

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2662,6 +2662,38 @@ def f():
26622662
f" {executor} at offset {idx} rather"
26632663
f" than expected _EXIT_TRACE")
26642664

2665+
def test_enter_executor_valid_op_arg(self):
2666+
script_helper.assert_python_ok("-c", textwrap.dedent("""
2667+
import sys
2668+
sys.setrecursionlimit(30) # reduce time of the run
2669+
2670+
str_v1 = ''
2671+
tuple_v2 = (None, None, None, None, None)
2672+
small_int_v3 = 4
2673+
2674+
def f1():
2675+
2676+
for _ in range(10):
2677+
abs(0)
2678+
2679+
tuple_v2[small_int_v3]
2680+
tuple_v2[small_int_v3]
2681+
tuple_v2[small_int_v3]
2682+
2683+
def recursive_wrapper_4569():
2684+
str_v1 > str_v1
2685+
str_v1 > str_v1
2686+
str_v1 > str_v1
2687+
recursive_wrapper_4569()
2688+
2689+
recursive_wrapper_4569()
2690+
2691+
for i_f1 in range(19000):
2692+
try:
2693+
f1()
2694+
except RecursionError:
2695+
pass
2696+
"""))
26652697

26662698

26672699
def global_identity(x):

Lib/test/test_free_threading/test_monitoring.py

Lines changed: 6 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -35,10 +35,10 @@ def work(self, n, funcs):
3535
return n
3636
return self.work(n - 1, funcs) + self.work(n - 2, funcs)
3737

38-
def start_work(self, n, funcs):
38+
def start_work(self, n, funcs, barrier):
3939
# With the GIL builds we need to make sure that the hooks have
4040
# a chance to run as it's possible to run w/o releasing the GIL.
41-
time.sleep(0.1)
41+
barrier.wait()
4242
self.work(n, funcs)
4343

4444
def after_test(self):
@@ -53,14 +53,16 @@ def test_instrumentation(self):
5353
exec("def f(): pass", x)
5454
funcs.append(x["f"])
5555

56+
barrier = Barrier(self.thread_count + 1)
5657
threads = []
5758
for i in range(self.thread_count):
5859
# Each thread gets a copy of the func list to avoid contention
59-
t = Thread(target=self.start_work, args=(self.fib, list(funcs)))
60+
t = Thread(target=self.start_work, args=(self.fib, list(funcs), barrier))
6061
t.start()
6162
threads.append(t)
6263

6364
self.after_threads()
65+
barrier.wait()
6466

6567
while True:
6668
any_alive = False
@@ -120,7 +122,6 @@ class MonitoringMultiThreaded(
120122
def setUp(self):
121123
super().setUp()
122124
self.set = False
123-
self.called = False
124125
monitoring.register_callback(
125126
self.tool_id, monitoring.events.LINE, self.callback
126127
)
@@ -130,10 +131,7 @@ def tearDown(self):
130131
super().tearDown()
131132

132133
def callback(self, *args):
133-
self.called = True
134-
135-
def after_test(self):
136-
self.assertTrue(self.called)
134+
pass
137135

138136
def during_threads(self):
139137
if self.set:
@@ -151,16 +149,11 @@ class SetTraceMultiThreaded(InstrumentationMultiThreadedMixin, TestCase):
151149

152150
def setUp(self):
153151
self.set = False
154-
self.called = False
155-
156-
def after_test(self):
157-
self.assertTrue(self.called)
158152

159153
def tearDown(self):
160154
sys.settrace(None)
161155

162156
def trace_func(self, frame, event, arg):
163-
self.called = True
164157
return self.trace_func
165158

166159
def during_threads(self):
@@ -177,16 +170,11 @@ class SetProfileMultiThreaded(InstrumentationMultiThreadedMixin, TestCase):
177170

178171
def setUp(self):
179172
self.set = False
180-
self.called = False
181-
182-
def after_test(self):
183-
self.assertTrue(self.called)
184173

185174
def tearDown(self):
186175
sys.setprofile(None)
187176

188177
def trace_func(self, frame, event, arg):
189-
self.called = True
190178
return self.trace_func
191179

192180
def during_threads(self):
@@ -203,16 +191,11 @@ class SetProfileAllThreadsMultiThreaded(InstrumentationMultiThreadedMixin, TestC
203191

204192
def setUp(self):
205193
self.set = False
206-
self.called = False
207-
208-
def after_test(self):
209-
self.assertTrue(self.called)
210194

211195
def tearDown(self):
212196
threading.setprofile_all_threads(None)
213197

214198
def trace_func(self, frame, event, arg):
215-
self.called = True
216199
return self.trace_func
217200

218201
def during_threads(self):

Lib/test/test_hashlib.py

Lines changed: 18 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -40,12 +40,15 @@
4040
openssl_hashlib = import_fresh_module('hashlib', fresh=['_hashlib'])
4141

4242
try:
43-
from _hashlib import HASH, HASHXOF, openssl_md_meth_names, get_fips_mode
43+
import _hashlib
4444
except ImportError:
45-
HASH = None
46-
HASHXOF = None
47-
openssl_md_meth_names = frozenset()
48-
45+
_hashlib = None
46+
# The extension module may exist but only define some of these. gh-141907
47+
HASH = getattr(_hashlib, 'HASH', None)
48+
HASHXOF = getattr(_hashlib, 'HASHXOF', None)
49+
openssl_md_meth_names = getattr(_hashlib, 'openssl_md_meth_names', frozenset())
50+
get_fips_mode = getattr(_hashlib, 'get_fips_mode', None)
51+
if not get_fips_mode:
4952
def get_fips_mode():
5053
return 0
5154

@@ -631,9 +634,14 @@ def check_sha3(self, name, capacity, rate, suffix):
631634
constructors = self.constructors_to_test[name]
632635
for hash_object_constructor in constructors:
633636
m = hash_object_constructor()
634-
if HASH is not None and isinstance(m, HASH):
635-
# _hashopenssl's variant does not have extra SHA3 attributes
636-
continue
637+
if name.startswith('shake_'):
638+
if HASHXOF is not None and isinstance(m, HASHXOF):
639+
# _hashopenssl's variant does not have extra SHA3 attributes
640+
continue
641+
else:
642+
if HASH is not None and isinstance(m, HASH):
643+
# _hashopenssl's variant does not have extra SHA3 attributes
644+
continue
637645
self.assertEqual(capacity + rate, 1600)
638646
self.assertEqual(m._capacity_bits, capacity)
639647
self.assertEqual(m._rate_bits, rate)
@@ -1156,7 +1164,8 @@ def test_disallow_instantiation(self):
11561164
def test_hash_disallow_instantiation(self):
11571165
# internal types like _hashlib.HASH are not constructable
11581166
support.check_disallow_instantiation(self, HASH)
1159-
support.check_disallow_instantiation(self, HASHXOF)
1167+
if HASHXOF is not None:
1168+
support.check_disallow_instantiation(self, HASHXOF)
11601169

11611170
def test_readonly_types(self):
11621171
for algorithm, constructors in self.constructors_to_test.items():

Lib/test/test_monitoring.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,10 @@
1212
import unittest
1313

1414
import test.support
15-
from test.support import requires_specialization_ft, script_helper
15+
from test.support import import_helper, requires_specialization_ft, script_helper
1616

17-
_testcapi = test.support.import_helper.import_module("_testcapi")
18-
_testinternalcapi = test.support.import_helper.import_module("_testinternalcapi")
17+
_testcapi = import_helper.import_module("_testcapi")
18+
_testinternalcapi = import_helper.import_module("_testinternalcapi")
1919

2020
PAIR = (0,1)
2121

0 commit comments

Comments
 (0)