@@ -119,13 +119,13 @@ _PyOptimizer_Optimize(
119119 _PyInterpreterFrame * frame , PyThreadState * tstate )
120120{
121121 PyInterpreterState * interp = _PyInterpreterState_GET ();
122- int chain_depth = tstate -> interp -> jit_state .initial_chain_depth ;
122+ int chain_depth = tstate -> interp -> jit_state .initial_state . chain_depth ;
123123 assert (interp -> jit );
124124 assert (!interp -> compiling );
125- assert (tstate -> interp -> jit_state .initial_stack_depth >= 0 );
125+ assert (tstate -> interp -> jit_state .initial_state . stack_depth >= 0 );
126126#ifndef Py_GIL_DISABLED
127127 // Trace got stomped on by another thread.
128- if (tstate -> interp -> jit_state .initial_func == NULL ) {
128+ if (tstate -> interp -> jit_state .initial_state . func == NULL ) {
129129 return 0 ;
130130 }
131131 interp -> compiling = true;
@@ -135,8 +135,8 @@ _PyOptimizer_Optimize(
135135 // this is true, since a deopt won't infinitely re-enter the executor:
136136 chain_depth %= MAX_CHAIN_DEPTH ;
137137 bool progress_needed = chain_depth == 0 ;
138- PyCodeObject * code = (PyCodeObject * )tstate -> interp -> jit_state .initial_code ;
139- _Py_CODEUNIT * start = tstate -> interp -> jit_state .start_instr ;
138+ PyCodeObject * code = (PyCodeObject * )tstate -> interp -> jit_state .initial_state . code ;
139+ _Py_CODEUNIT * start = tstate -> interp -> jit_state .initial_state . start_instr ;
140140 if (progress_needed && !has_space_for_executor (code , start )) {
141141 interp -> compiling = false;
142142 return 0 ;
@@ -171,9 +171,9 @@ _PyOptimizer_Optimize(
171171 else {
172172 executor -> vm_data .code = NULL ;
173173 }
174- _PyExitData * prev_exit = tstate -> interp -> jit_state .prev_exit ;
175- if (prev_exit != NULL ) {
176- prev_exit -> executor = executor ;
174+ _PyExitData * exit = tstate -> interp -> jit_state .initial_state . exit ;
175+ if (exit != NULL ) {
176+ exit -> executor = executor ;
177177 }
178178 executor -> vm_data .chain_depth = chain_depth ;
179179 assert (executor -> vm_data .valid );
@@ -569,7 +569,7 @@ _PyJit_translate_single_bytecode_to_trace(
569569 if (old_code == NULL ) {
570570 return 0 ;
571571 }
572- bool progress_needed = (tstate -> interp -> jit_state .initial_chain_depth % MAX_CHAIN_DEPTH ) == 0 ; ;
572+ bool progress_needed = (tstate -> interp -> jit_state .initial_state . chain_depth % MAX_CHAIN_DEPTH ) == 0 ;
573573 _PyBloomFilter * dependencies = & tstate -> interp -> jit_state .dependencies ;
574574 _Py_BloomFilter_Add (dependencies , old_code );
575575 int trace_length = tstate -> interp -> jit_state .code_curr_size ;
@@ -748,8 +748,8 @@ _PyJit_translate_single_bytecode_to_trace(
748748 _Py_FALLTHROUGH ;
749749 case JUMP_BACKWARD_NO_INTERRUPT :
750750 {
751- if ((next_instr != tstate -> interp -> jit_state .close_loop_instr ) &&
752- (next_instr != tstate -> interp -> jit_state .start_instr ) &&
751+ if ((next_instr != tstate -> interp -> jit_state .initial_state . close_loop_instr ) &&
752+ (next_instr != tstate -> interp -> jit_state .initial_state . start_instr ) &&
753753 tstate -> interp -> jit_state .code_curr_size > 5 &&
754754 // These are coroutines, and we want to unroll those usually.
755755 opcode != JUMP_BACKWARD_NO_INTERRUPT ) {
@@ -760,7 +760,8 @@ _PyJit_translate_single_bytecode_to_trace(
760760 OPT_STAT_INC (inner_loop );
761761 ADD_TO_TRACE (_EXIT_TRACE , 0 , 0 , target );
762762 trace [trace_length - 1 ].operand1 = true; // is_control_flow
763- DPRINTF (2 , "JUMP_BACKWARD not to top ends trace %p %p %p\n" , next_instr , tstate -> interp -> jit_state .close_loop_instr , tstate -> interp -> jit_state .start_instr );
763+ DPRINTF (2 , "JUMP_BACKWARD not to top ends trace %p %p %p\n" , next_instr ,
764+ tstate -> interp -> jit_state .initial_state .close_loop_instr , tstate -> interp -> jit_state .initial_state .start_instr );
764765 goto done ;
765766 }
766767 break ;
@@ -915,7 +916,8 @@ _PyJit_translate_single_bytecode_to_trace(
915916 }
916917 }
917918 // Loop back to the start
918- int is_first_instr = tstate -> interp -> jit_state .close_loop_instr == next_instr || tstate -> interp -> jit_state .start_instr == next_instr ;
919+ int is_first_instr = tstate -> interp -> jit_state .initial_state .close_loop_instr == next_instr ||
920+ tstate -> interp -> jit_state .initial_state .start_instr == next_instr ;
919921 if (is_first_instr && tstate -> interp -> jit_state .code_curr_size > 5 ) {
920922 if (needs_guard_ip ) {
921923 ADD_TO_TRACE (_SET_IP , 0 , (uintptr_t )next_instr , 0 );
@@ -985,13 +987,13 @@ _PyJit_TryInitializeTracing(
985987 tstate -> interp -> jit_state .code_curr_size = 2 ;
986988
987989 tstate -> interp -> jit_state .code_max_size = UOP_MAX_TRACE_LENGTH ;
988- tstate -> interp -> jit_state .start_instr = start_instr ;
989- tstate -> interp -> jit_state .close_loop_instr = close_loop_instr ;
990- tstate -> interp -> jit_state .initial_code = (PyCodeObject * )Py_NewRef (code );
991- tstate -> interp -> jit_state .initial_func = (PyFunctionObject * )Py_XNewRef (PyStackRef_AsPyObjectBorrow (frame -> f_funcobj ));
992- tstate -> interp -> jit_state .prev_exit = exit ;
993- tstate -> interp -> jit_state .initial_stack_depth = curr_stackdepth ;
994- tstate -> interp -> jit_state .initial_chain_depth = chain_depth ;
990+ tstate -> interp -> jit_state .initial_state . start_instr = start_instr ;
991+ tstate -> interp -> jit_state .initial_state . close_loop_instr = close_loop_instr ;
992+ tstate -> interp -> jit_state .initial_state . code = (PyCodeObject * )Py_NewRef (code );
993+ tstate -> interp -> jit_state .initial_state . func = (PyFunctionObject * )Py_XNewRef (PyStackRef_AsPyObjectBorrow (frame -> f_funcobj ));
994+ tstate -> interp -> jit_state .initial_state . exit = exit ;
995+ tstate -> interp -> jit_state .initial_state . stack_depth = curr_stackdepth ;
996+ tstate -> interp -> jit_state .initial_state . chain_depth = chain_depth ;
995997 tstate -> interp -> jit_state .prev_instr_frame = frame ;
996998 tstate -> interp -> jit_state .dependencies_still_valid = true;
997999 tstate -> interp -> jit_state .specialize_counter = 0 ;
@@ -1002,7 +1004,7 @@ _PyJit_TryInitializeTracing(
10021004 tstate -> interp -> jit_state .prev_instr_stacklevel = curr_stackdepth ;
10031005 tstate -> interp -> jit_state .prev_instr_is_super = false;
10041006 assert (curr_instr -> op .code == JUMP_BACKWARD_JIT || (exit != NULL ));
1005- tstate -> interp -> jit_state .jump_backward_instr = curr_instr ;
1007+ tstate -> interp -> jit_state .initial_state . jump_backward_instr = curr_instr ;
10061008 assert (curr_instr -> op .code == JUMP_BACKWARD_JIT || (exit != NULL ));
10071009 _Py_BloomFilter_Init (& tstate -> interp -> jit_state .dependencies );
10081010 return 1 ;
@@ -1011,8 +1013,8 @@ _PyJit_TryInitializeTracing(
10111013void
10121014_PyJit_FinalizeTracing (PyThreadState * tstate )
10131015{
1014- Py_CLEAR (tstate -> interp -> jit_state .initial_code );
1015- Py_CLEAR (tstate -> interp -> jit_state .initial_func );
1016+ Py_CLEAR (tstate -> interp -> jit_state .initial_state . code );
1017+ Py_CLEAR (tstate -> interp -> jit_state .initial_state . func );
10161018 Py_CLEAR (tstate -> interp -> jit_state .prev_instr_code );
10171019 tstate -> interp -> jit_state .code_curr_size = 2 ;
10181020 tstate -> interp -> jit_state .code_max_size = UOP_MAX_TRACE_LENGTH - 1 ;
@@ -1335,7 +1337,7 @@ uop_optimize(
13351337 // It is the optimizer's responsibility to add the dependencies it requires on its own.
13361338 _PyBloomFilter new_dependencies ;
13371339 _Py_BloomFilter_Init (& new_dependencies );
1338- _Py_BloomFilter_Add (& new_dependencies , tstate -> interp -> jit_state .initial_code );
1340+ _Py_BloomFilter_Add (& new_dependencies , tstate -> interp -> jit_state .initial_state . code );
13391341 PyInterpreterState * interp = _PyInterpreterState_GET ();
13401342 _PyUOpInstruction * buffer = interp -> jit_state .code_buffer ;
13411343 OPT_STAT_INC (attempts );
@@ -1344,7 +1346,7 @@ uop_optimize(
13441346 if (env_var == NULL || * env_var == '\0' || * env_var > '0' ) {
13451347 is_noopt = false;
13461348 }
1347- int curr_stackentries = tstate -> interp -> jit_state .initial_stack_depth ;
1349+ int curr_stackentries = tstate -> interp -> jit_state .initial_state . stack_depth ;
13481350 int length = interp -> jit_state .code_curr_size ;
13491351 // Trace too short, don't bother.
13501352 if (length <= 5 ) {
@@ -1354,7 +1356,7 @@ uop_optimize(
13541356 assert (length < UOP_MAX_TRACE_LENGTH );
13551357 OPT_STAT_INC (traces_created );
13561358 if (!is_noopt ) {
1357- length = _Py_uop_analyze_and_optimize (tstate -> interp -> jit_state .initial_func , buffer ,
1359+ length = _Py_uop_analyze_and_optimize (tstate -> interp -> jit_state .initial_state . func , buffer ,
13581360 length ,
13591361 curr_stackentries , & new_dependencies );
13601362 if (length <= 0 ) {
@@ -1379,7 +1381,8 @@ uop_optimize(
13791381 OPT_HIST (effective_trace_length (buffer , length ), optimized_trace_length_hist );
13801382 length = prepare_for_execution (buffer , length );
13811383 assert (length <= UOP_MAX_TRACE_LENGTH );
1382- _PyExecutorObject * executor = make_executor_from_uops (buffer , length , & new_dependencies , tstate -> interp -> jit_state .initial_chain_depth );
1384+ _PyExecutorObject * executor = make_executor_from_uops (
1385+ buffer , length , & new_dependencies , tstate -> interp -> jit_state .initial_state .chain_depth );
13831386 if (executor == NULL ) {
13841387 return -1 ;
13851388 }
0 commit comments