Skip to content

Commit a3e98a7

Browse files
gpsheadclaude
andcommitted
Improve test_pipeline_large_data_with_stderr to use large stderr
Update the test to write 64KB to stderr from each process (128KB total) instead of just small status messages. This better tests that the multiplexed I/O handles concurrent large data on both stdout and stderr without deadlocking. Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
1 parent e22d1da commit a3e98a7

File tree

1 file changed

+21
-11
lines changed

1 file changed

+21
-11
lines changed

Lib/test/test_subprocess.py

Lines changed: 21 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -2256,36 +2256,46 @@ def test_pipeline_large_data_three_stages(self):
22562256
self.assertGreater(output_len, len(large_data))
22572257

22582258
def test_pipeline_large_data_with_stderr(self):
2259-
"""Test large data with stderr output from multiple processes.
2259+
"""Test large data with large stderr output from multiple processes.
22602260
22612261
Ensures stderr collection doesn't interfere with the main data flow
2262-
and doesn't cause deadlocks when multiple processes write stderr.
2262+
and doesn't cause deadlocks when multiple processes write large
2263+
amounts to stderr concurrently with stdin/stdout data flow.
22632264
"""
2264-
# 64KB of data
2265+
# 64KB of data through the pipeline
22652266
data_size = 64 * 1024
22662267
large_data = 'z' * data_size
2268+
# Each process writes 64KB to stderr as well
2269+
stderr_size = 64 * 1024
22672270

22682271
result = subprocess.run_pipeline(
2269-
[sys.executable, '-c', '''
2272+
[sys.executable, '-c', f'''
22702273
import sys
2271-
sys.stderr.write("stage1 processing\\n")
2274+
# Write large stderr output
2275+
sys.stderr.write("E" * {stderr_size})
2276+
sys.stderr.write("\\nstage1 done\\n")
2277+
# Pass through stdin to stdout
22722278
data = sys.stdin.read()
2273-
sys.stderr.write(f"stage1 read {len(data)} bytes\\n")
22742279
print(data)
22752280
'''],
2276-
[sys.executable, '-c', '''
2281+
[sys.executable, '-c', f'''
22772282
import sys
2278-
sys.stderr.write("stage2 processing\\n")
2283+
# Write large stderr output
2284+
sys.stderr.write("F" * {stderr_size})
2285+
sys.stderr.write("\\nstage2 done\\n")
2286+
# Count input size
22792287
data = sys.stdin.read()
2280-
sys.stderr.write(f"stage2 read {len(data)} bytes\\n")
22812288
print(len(data.strip()))
22822289
'''],
22832290
input=large_data, capture_output=True, text=True, timeout=30
22842291
)
22852292

22862293
self.assertEqual(result.stdout.strip(), str(data_size))
2287-
self.assertIn('stage1 processing', result.stderr)
2288-
self.assertIn('stage2 processing', result.stderr)
2294+
# Verify both processes wrote to stderr
2295+
self.assertIn('stage1 done', result.stderr)
2296+
self.assertIn('stage2 done', result.stderr)
2297+
# Verify large stderr was captured (at least most of it)
2298+
self.assertGreater(len(result.stderr), stderr_size)
22892299
self.assertEqual(result.returncodes, [0, 0])
22902300

22912301

0 commit comments

Comments
 (0)