From 7b1e0a6096d5da629732bc95f2b991efdc8fa4f1 Mon Sep 17 00:00:00 2001 From: Randy Stauner Date: Mon, 10 Nov 2025 10:56:25 -0700 Subject: [PATCH 01/10] ZJIT: Define jit_compile_exception for ZJIT even without YJIT Seems like an oversight --- vm.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vm.c b/vm.c index 4bd6147fc504e9..0452c4caddcc80 100644 --- a/vm.c +++ b/vm.c @@ -584,7 +584,7 @@ jit_exec(rb_execution_context_t *ec) return Qundef; } -#if USE_YJIT +#if USE_YJIT || USE_ZJIT // Generate JIT code that supports the following kind of ISEQ entry: // * The first ISEQ pushed by vm_exec_handle_exception. The frame would // point to a location specified by a catch table, and it doesn't have From 3ddb5f99a9c472ea7273c3d6c99650eb1c8914f6 Mon Sep 17 00:00:00 2001 From: Randy Stauner Date: Mon, 10 Nov 2025 10:58:21 -0700 Subject: [PATCH 02/10] Allow --jit to mean zjit if yjit isn't defined The --help output suggests this should work as ZJIT is labeled as the default if YJIT isn't enabled. --- ruby.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ruby.c b/ruby.c index c319fd1237a132..872a317e3bbf7e 100644 --- a/ruby.c +++ b/ruby.c @@ -1473,7 +1473,7 @@ proc_long_options(ruby_cmdline_options_t *opt, const char *s, long argc, char ** ruby_verbose = Qtrue; } else if (strcmp("jit", s) == 0) { -#if USE_YJIT +#if USE_YJIT || USE_ZJIT FEATURE_SET(opt->features, FEATURE_BIT(jit)); #else rb_warn("Ruby was built without JIT support"); From f0541312883c27c7b98e004ebedc4d9e1874a147 Mon Sep 17 00:00:00 2001 From: Takashi Kokubun Date: Mon, 10 Nov 2025 13:47:56 -0800 Subject: [PATCH 03/10] ZJIT: Add patch_point_count stat (#15100) --- zjit.rb | 1 + zjit/src/invariants.rs | 55 +++++++++++++++++++++++++++--------------- zjit/src/stats.rs | 7 ++++++ 3 files changed, 44 insertions(+), 19 deletions(-) diff --git a/zjit.rb b/zjit.rb index c59cce41dd7d70..29f43acc5bca7e 100644 --- a/zjit.rb +++ b/zjit.rb @@ -191,6 +191,7 @@ def stats_string :dynamic_getivar_count, :dynamic_setivar_count, + :patch_point_count, :compiled_iseq_count, :failed_iseq_count, diff --git a/zjit/src/invariants.rs b/zjit/src/invariants.rs index f1adc7b32cfdb9..2855d7d592abe2 100644 --- a/zjit/src/invariants.rs +++ b/zjit/src/invariants.rs @@ -2,10 +2,10 @@ use std::{collections::{HashMap, HashSet}, mem}; -use crate::{backend::lir::{asm_comment, Assembler}, cruby::{iseq_name, rb_callable_method_entry_t, rb_gc_location, ruby_basic_operators, src_loc, with_vm_lock, IseqPtr, RedefinitionFlag, ID, VALUE}, hir::Invariant, options::debug, state::{zjit_enabled_p, ZJITState}, virtualmem::CodePtr}; +use crate::{backend::lir::{Assembler, asm_comment}, cast::IntoU64, cruby::{ID, IseqPtr, RedefinitionFlag, VALUE, iseq_name, rb_callable_method_entry_t, rb_gc_location, ruby_basic_operators, src_loc, with_vm_lock}, hir::Invariant, options::debug, state::{ZJITState, zjit_enabled_p}, stats::{decr_counter_by, incr_counter}, virtualmem::CodePtr}; use crate::payload::IseqPayload; use crate::stats::with_time_stat; -use crate::stats::Counter::invalidation_time_ns; +use crate::stats::Counter::{invalidation_time_ns, patch_point_count}; use crate::gc::remove_gc_offsets; macro_rules! compile_patch_points { @@ -36,6 +36,23 @@ struct PatchPoint { payload_ptr: *mut IseqPayload, } +impl PatchPoint { + /// PatchPointer constructor, which also increments `patch_point_count` + fn new(patch_point_ptr: CodePtr, side_exit_ptr: CodePtr, payload_ptr: *mut IseqPayload) -> PatchPoint { + incr_counter!(patch_point_count); + Self { + patch_point_ptr, + side_exit_ptr, + payload_ptr, + } + } + + /// Decrease `patch_point_count` by the size of a given `HashSet` + fn decr_counter(patch_points: HashSet) { + decr_counter_by(patch_point_count, patch_points.len().as_u64()); + } +} + /// Used to track all of the various block references that contain assumptions /// about the state of the virtual machine. #[derive(Default)] @@ -83,17 +100,17 @@ impl Invariants { // generated code referencing the ISEQ are unreachable. We mark the ISEQs baked into // generated code. self.ep_escape_iseqs.remove(&iseq); - self.no_ep_escape_iseq_patch_points.remove(&iseq); + self.no_ep_escape_iseq_patch_points.remove(&iseq).map(PatchPoint::decr_counter); } /// Forget a CME when freeing it. See [Self::forget_iseq] for reasoning. pub fn forget_cme(&mut self, cme: *const rb_callable_method_entry_t) { - self.cme_patch_points.remove(&cme); + self.cme_patch_points.remove(&cme).map(PatchPoint::decr_counter); } /// Forget a class when freeing it. See [Self::forget_iseq] for reasoning. pub fn forget_klass(&mut self, klass: VALUE) { - self.no_singleton_class_patch_points.remove(&klass); + self.no_singleton_class_patch_points.remove(&klass).map(PatchPoint::decr_counter); } /// Update ISEQ references in Invariants::ep_escape_iseqs @@ -198,11 +215,11 @@ pub fn track_no_ep_escape_assumption( payload_ptr: *mut IseqPayload, ) { let invariants = ZJITState::get_invariants(); - invariants.no_ep_escape_iseq_patch_points.entry(iseq).or_default().insert(PatchPoint { + invariants.no_ep_escape_iseq_patch_points.entry(iseq).or_default().insert(PatchPoint::new( patch_point_ptr, side_exit_ptr, payload_ptr, - }); + )); } /// Returns true if a given ISEQ has previously escaped environment pointer. @@ -219,11 +236,11 @@ pub fn track_bop_assumption( payload_ptr: *mut IseqPayload, ) { let invariants = ZJITState::get_invariants(); - invariants.bop_patch_points.entry((klass, bop)).or_default().insert(PatchPoint { + invariants.bop_patch_points.entry((klass, bop)).or_default().insert(PatchPoint::new( patch_point_ptr, side_exit_ptr, payload_ptr, - }); + )); } /// Track a patch point for a callable method entry (CME). @@ -234,11 +251,11 @@ pub fn track_cme_assumption( payload_ptr: *mut IseqPayload, ) { let invariants = ZJITState::get_invariants(); - invariants.cme_patch_points.entry(cme).or_default().insert(PatchPoint { + invariants.cme_patch_points.entry(cme).or_default().insert(PatchPoint::new( patch_point_ptr, side_exit_ptr, payload_ptr, - }); + )); } /// Track a patch point for each constant name in a constant path assumption. @@ -257,11 +274,11 @@ pub fn track_stable_constant_names_assumption( break; } - invariants.constant_state_patch_points.entry(id).or_default().insert(PatchPoint { + invariants.constant_state_patch_points.entry(id).or_default().insert(PatchPoint::new( patch_point_ptr, side_exit_ptr, payload_ptr, - }); + )); idx += 1; } @@ -275,11 +292,11 @@ pub fn track_no_singleton_class_assumption( payload_ptr: *mut IseqPayload, ) { let invariants = ZJITState::get_invariants(); - invariants.no_singleton_class_patch_points.entry(klass).or_default().insert(PatchPoint { + invariants.no_singleton_class_patch_points.entry(klass).or_default().insert(PatchPoint::new( patch_point_ptr, side_exit_ptr, payload_ptr, - }); + )); } /// Called when a method is redefined. Invalidates all JIT code that depends on the CME. @@ -330,11 +347,11 @@ pub extern "C" fn rb_zjit_constant_state_changed(id: ID) { /// Track the JIT code that assumes that the interpreter is running with only one ractor pub fn track_single_ractor_assumption(patch_point_ptr: CodePtr, side_exit_ptr: CodePtr, payload_ptr: *mut IseqPayload) { let invariants = ZJITState::get_invariants(); - invariants.single_ractor_patch_points.insert(PatchPoint { + invariants.single_ractor_patch_points.insert(PatchPoint::new( patch_point_ptr, side_exit_ptr, payload_ptr, - }); + )); } /// Callback for when Ruby is about to spawn a ractor. In that case we need to @@ -359,11 +376,11 @@ pub extern "C" fn rb_zjit_before_ractor_spawn() { pub fn track_no_trace_point_assumption(patch_point_ptr: CodePtr, side_exit_ptr: CodePtr, payload_ptr: *mut IseqPayload) { let invariants = ZJITState::get_invariants(); - invariants.no_trace_point_patch_points.insert(PatchPoint { + invariants.no_trace_point_patch_points.insert(PatchPoint::new( patch_point_ptr, side_exit_ptr, payload_ptr, - }); + )); } #[unsafe(no_mangle)] diff --git a/zjit/src/stats.rs b/zjit/src/stats.rs index a2105ae27e16dc..85e233a43f6756 100644 --- a/zjit/src/stats.rs +++ b/zjit/src/stats.rs @@ -114,6 +114,7 @@ macro_rules! make_counters { make_counters! { // Default counters that are available without --zjit-stats default { + patch_point_count, compiled_iseq_count, failed_iseq_count, @@ -307,6 +308,12 @@ pub fn incr_counter_by(counter: Counter, amount: u64) { unsafe { *ptr += amount; } } +/// Decrease a counter by a specified amount +pub fn decr_counter_by(counter: Counter, amount: u64) { + let ptr = counter_ptr(counter); + unsafe { *ptr -= amount; } +} + /// Increment a counter by its identifier macro_rules! incr_counter { ($counter_name:ident) => { From c7f0a9c4cd3ec65a06c6e51252af7a45afa9d358 Mon Sep 17 00:00:00 2001 From: Peter Zhu Date: Sun, 9 Nov 2025 17:35:17 -0500 Subject: [PATCH 04/10] Fix memory leak in subclasses when freeing classext We don't decrement the super and module subclasses count for iclasses that are having their classext replaced. This causes the reference count to be incorrect and leak memory. The following script demonstrates the memory leak: module Foo refine(Object) do define_method(:<=) {} end end class Bar include Comparable end With RUBY_FREE_AT_EXIT and ASAN, we can see many memory leaks, including: Direct leak of 16 byte(s) in 1 object(s) allocated from: #0 0x599f715adca2 in calloc (miniruby+0x64ca2) #1 0x599f716bd779 in calloc1 gc/default/default.c:1495:12 #2 0x599f716d1370 in rb_gc_impl_calloc gc/default/default.c:8216:5 #3 0x599f716b8ab1 in ruby_xcalloc_body gc.c:5221:12 #4 0x599f716b269c in ruby_xcalloc gc.c:5215:34 #5 0x599f715eab23 in class_alloc0 class.c:790:22 #6 0x599f715e4bec in class_alloc class.c:836:12 #7 0x599f715e60c9 in module_new class.c:1693:17 #8 0x599f715e60a2 in rb_module_new class.c:1701:12 #9 0x599f715e6303 in rb_define_module class.c:1733:14 #10 0x599f715ebc5f in Init_Comparable compar.c:315:22 #11 0x599f716e35f5 in rb_call_inits inits.c:32:5 #12 0x599f7169cbfd in ruby_setup eval.c:88:9 #13 0x599f7169cdac in ruby_init eval.c:100:17 #14 0x599f715b0fa9 in rb_main main.c:41:5 #15 0x599f715b0f59 in main main.c:62:12 #16 0x739b2f02a1c9 in __libc_start_call_main csu/../sysdeps/nptl/libc_start_call_main.h:58:16 #17 0x739b2f02a28a in __libc_start_main csu/../csu/libc-start.c:360:3 #18 0x599f7157c424 in _start (miniruby+0x33424) --- class.c | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/class.c b/class.c index 23e1188bb2f369..770ace99d148fe 100644 --- a/class.c +++ b/class.c @@ -695,14 +695,14 @@ rb_class_classext_free_subclasses(rb_classext_t *ext, VALUE klass, bool replacin rb_box_subclasses_ref_dec(anchor->box_subclasses); xfree(anchor); - if (!replacing && RCLASSEXT_BOX_SUPER_SUBCLASSES(ext)) { + if (RCLASSEXT_BOX_SUPER_SUBCLASSES(ext)) { rb_box_subclasses_t *box_sub = RCLASSEXT_BOX_SUPER_SUBCLASSES(ext); - remove_class_from_subclasses(box_sub->tbl, box_id, klass); + if (!replacing) remove_class_from_subclasses(box_sub->tbl, box_id, klass); rb_box_subclasses_ref_dec(box_sub); } - if (!replacing && RCLASSEXT_BOX_MODULE_SUBCLASSES(ext)) { + if (RCLASSEXT_BOX_MODULE_SUBCLASSES(ext)) { rb_box_subclasses_t *box_sub = RCLASSEXT_BOX_MODULE_SUBCLASSES(ext); - remove_class_from_subclasses(box_sub->tbl, box_id, klass); + if (!replacing) remove_class_from_subclasses(box_sub->tbl, box_id, klass); rb_box_subclasses_ref_dec(box_sub); } } From f95aa5b2a9d559d6deda1b0aa53aee8198ab3168 Mon Sep 17 00:00:00 2001 From: Randy Stauner Date: Mon, 10 Nov 2025 16:16:31 -0700 Subject: [PATCH 05/10] ZJIT: Rename not_optimized_instruction to uncategorized_instruction (#15130) Make it more obvious that this hasn't been handled and could be broken down more. --- zjit.rb | 2 +- zjit/src/codegen.rs | 2 +- zjit/src/hir.rs | 14 +++++++------- zjit/src/stats.rs | 8 ++++---- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/zjit.rb b/zjit.rb index 29f43acc5bca7e..9728551c3fd0f4 100644 --- a/zjit.rb +++ b/zjit.rb @@ -162,7 +162,7 @@ def stats_string # Show fallback counters, ordered by the typical amount of fallbacks for the prefix at the time print_counters_with_prefix(prefix: 'unspecialized_send_def_type_', prompt: 'not optimized method types for send', buf:, stats:, limit: 20) print_counters_with_prefix(prefix: 'unspecialized_send_without_block_def_type_', prompt: 'not optimized method types for send_without_block', buf:, stats:, limit: 20) - print_counters_with_prefix(prefix: 'not_optimized_yarv_insn_', prompt: 'not optimized instructions', buf:, stats:, limit: 20) + print_counters_with_prefix(prefix: 'uncategorized_fallback_yarv_insn_', prompt: 'instructions with uncategorized fallback reason', buf:, stats:, limit: 20) print_counters_with_prefix(prefix: 'send_fallback_', prompt: 'send fallback reasons', buf:, stats:, limit: 20) print_counters_with_prefix(prefix: 'invokeblock_handler_', prompt: 'invokeblock handler', buf:, stats:, limit: 10) diff --git a/zjit/src/codegen.rs b/zjit/src/codegen.rs index 58c396ed9b7ce4..0fc0b1ce47d367 100644 --- a/zjit/src/codegen.rs +++ b/zjit/src/codegen.rs @@ -1828,7 +1828,7 @@ fn gen_incr_send_fallback_counter(asm: &mut Assembler, reason: SendFallbackReaso use SendFallbackReason::*; match reason { - NotOptimizedInstruction(opcode) => { + Uncategorized(opcode) => { gen_incr_counter_ptr(asm, send_fallback_counter_ptr_for_opcode(opcode)); } SendWithoutBlockNotOptimizedMethodType(method_type) => { diff --git a/zjit/src/hir.rs b/zjit/src/hir.rs index d4053a34e63d42..85e9bdb00a2fb7 100644 --- a/zjit/src/hir.rs +++ b/zjit/src/hir.rs @@ -607,7 +607,7 @@ pub enum SendFallbackReason { ComplexArgPass, /// Initial fallback reason for every instruction, which should be mutated to /// a more actionable reason when an attempt to specialize the instruction fails. - NotOptimizedInstruction(ruby_vminsn_type), + Uncategorized(ruby_vminsn_type), } /// An instruction in the SSA IR. The output of an instruction is referred to by the index of @@ -4971,7 +4971,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result { let args = state.stack_pop_n(argc as usize)?; let recv = state.stack_pop()?; - let send = fun.push_insn(block, Insn::SendWithoutBlock { recv, cd, args, state: exit_id, reason: NotOptimizedInstruction(opcode) }); + let send = fun.push_insn(block, Insn::SendWithoutBlock { recv, cd, args, state: exit_id, reason: Uncategorized(opcode) }); state.stack_push(send); } YARVINSN_opt_hash_freeze => { @@ -5071,7 +5071,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result { let args = state.stack_pop_n(argc as usize)?; let recv = state.stack_pop()?; - let send = fun.push_insn(block, Insn::SendWithoutBlock { recv, cd, args, state: exit_id, reason: NotOptimizedInstruction(opcode) }); + let send = fun.push_insn(block, Insn::SendWithoutBlock { recv, cd, args, state: exit_id, reason: Uncategorized(opcode) }); state.stack_push(send); } YARVINSN_send => { @@ -5089,7 +5089,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result { let args = state.stack_pop_n(argc as usize + usize::from(block_arg))?; let recv = state.stack_pop()?; - let send = fun.push_insn(block, Insn::Send { recv, cd, blockiseq, args, state: exit_id, reason: NotOptimizedInstruction(opcode) }); + let send = fun.push_insn(block, Insn::Send { recv, cd, blockiseq, args, state: exit_id, reason: Uncategorized(opcode) }); state.stack_push(send); if !blockiseq.is_null() { @@ -5119,7 +5119,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result { let args = state.stack_pop_n(argc as usize + usize::from(forwarding))?; let recv = state.stack_pop()?; - let send_forward = fun.push_insn(block, Insn::SendForward { recv, cd, blockiseq, args, state: exit_id, reason: NotOptimizedInstruction(opcode) }); + let send_forward = fun.push_insn(block, Insn::SendForward { recv, cd, blockiseq, args, state: exit_id, reason: Uncategorized(opcode) }); state.stack_push(send_forward); if !blockiseq.is_null() { @@ -5146,7 +5146,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result { let args = state.stack_pop_n(argc as usize + usize::from(block_arg))?; let recv = state.stack_pop()?; let blockiseq: IseqPtr = get_arg(pc, 1).as_ptr(); - let result = fun.push_insn(block, Insn::InvokeSuper { recv, cd, blockiseq, args, state: exit_id, reason: NotOptimizedInstruction(opcode) }); + let result = fun.push_insn(block, Insn::InvokeSuper { recv, cd, blockiseq, args, state: exit_id, reason: Uncategorized(opcode) }); state.stack_push(result); if !blockiseq.is_null() { @@ -5173,7 +5173,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result { let argc = unsafe { vm_ci_argc((*cd).ci) }; let block_arg = (flags & VM_CALL_ARGS_BLOCKARG) != 0; let args = state.stack_pop_n(argc as usize + usize::from(block_arg))?; - let result = fun.push_insn(block, Insn::InvokeBlock { cd, args, state: exit_id, reason: NotOptimizedInstruction(opcode) }); + let result = fun.push_insn(block, Insn::InvokeBlock { cd, args, state: exit_id, reason: Uncategorized(opcode) }); state.stack_push(result); } YARVINSN_getglobal => { diff --git a/zjit/src/stats.rs b/zjit/src/stats.rs index 85e233a43f6756..3b74cc45330bff 100644 --- a/zjit/src/stats.rs +++ b/zjit/src/stats.rs @@ -186,7 +186,7 @@ make_counters! { send_fallback_one_or_more_complex_arg_pass, send_fallback_bmethod_non_iseq_proc, send_fallback_obj_to_string_not_string, - send_fallback_not_optimized_instruction, + send_fallback_uncategorized, } // Optimized send counters that are summed as optimized_send_count @@ -453,7 +453,7 @@ pub fn send_fallback_counter(reason: crate::hir::SendFallbackReason) -> Counter SendNotOptimizedMethodType(_) => send_fallback_send_not_optimized_method_type, CCallWithFrameTooManyArgs => send_fallback_ccall_with_frame_too_many_args, ObjToStringNotString => send_fallback_obj_to_string_not_string, - NotOptimizedInstruction(_) => send_fallback_not_optimized_instruction, + Uncategorized(_) => send_fallback_uncategorized, } } @@ -621,11 +621,11 @@ pub extern "C" fn rb_zjit_stats(_ec: EcPtr, _self: VALUE, target_key: VALUE) -> set_stat_usize!(hash, "optimized_send_count", optimized_send_count); set_stat_usize!(hash, "send_count", dynamic_send_count + optimized_send_count); - // Set send fallback counters for NotOptimizedInstruction + // Set send fallback counters for Uncategorized let send_fallback_counters = ZJITState::get_send_fallback_counters(); for (op_idx, count) in send_fallback_counters.iter().enumerate().take(VM_INSTRUCTION_SIZE as usize) { let op_name = insn_name(op_idx); - let key_string = "not_optimized_yarv_insn_".to_owned() + &op_name; + let key_string = "uncategorized_fallback_yarv_insn_".to_owned() + &op_name; set_stat_usize!(hash, &key_string, *count); } From 6238b6f53e4b10a1833eb90320f002f60be72d42 Mon Sep 17 00:00:00 2001 From: John Hawthorn Date: Fri, 7 Nov 2025 16:15:38 -0800 Subject: [PATCH 06/10] Remove unused subclass methods --- class.c | 37 ------------------------------------- internal/class.h | 6 ------ 2 files changed, 43 deletions(-) diff --git a/class.c b/class.c index 770ace99d148fe..550e5422f5655b 100644 --- a/class.c +++ b/class.c @@ -590,13 +590,6 @@ rb_module_add_to_subclasses_list(VALUE module, VALUE iclass) } } -void -rb_class_remove_subclass_head(VALUE klass) -{ - rb_classext_t *ext = RCLASS_EXT_WRITABLE(klass); - rb_class_classext_free_subclasses(ext, klass, false); -} - static struct rb_subclass_entry * class_get_subclasses_for_ns(struct st_table *tbl, VALUE box_id) { @@ -663,18 +656,6 @@ rb_class_remove_from_super_subclasses(VALUE klass) RCLASSEXT_BOX_SUPER_SUBCLASSES(ext) = 0; } -void -rb_class_remove_from_module_subclasses(VALUE klass) -{ - rb_classext_t *ext = RCLASS_EXT_WRITABLE(klass); - rb_box_subclasses_t *box_subclasses = RCLASSEXT_BOX_MODULE_SUBCLASSES(ext); - - if (!box_subclasses) return; - remove_class_from_subclasses(box_subclasses->tbl, box_subclasses_tbl_key(RCLASSEXT_BOX(ext)), klass); - rb_box_subclasses_ref_dec(box_subclasses); - RCLASSEXT_BOX_MODULE_SUBCLASSES(ext) = 0; -} - void rb_class_classext_free_subclasses(rb_classext_t *ext, VALUE klass, bool replacing) { @@ -730,24 +711,6 @@ class_detach_subclasses(VALUE klass, VALUE arg) rb_class_remove_from_super_subclasses(klass); } -void -rb_class_detach_subclasses(VALUE klass) -{ - rb_class_foreach_subclass(klass, class_detach_subclasses, Qnil); -} - -static void -class_detach_module_subclasses(VALUE klass, VALUE arg) -{ - rb_class_remove_from_module_subclasses(klass); -} - -void -rb_class_detach_module_subclasses(VALUE klass) -{ - rb_class_foreach_subclass(klass, class_detach_module_subclasses, Qnil); -} - static void class_switch_superclass(VALUE super, VALUE klass) { diff --git a/internal/class.h b/internal/class.h index 04d2849656222b..d4306fc84d9f6a 100644 --- a/internal/class.h +++ b/internal/class.h @@ -488,15 +488,9 @@ RCLASSEXT_SET_INCLUDER(rb_classext_t *ext, VALUE klass, VALUE includer) typedef void rb_class_classext_foreach_callback_func(rb_classext_t *classext, bool is_prime, VALUE box_value, void *arg); void rb_class_classext_foreach(VALUE klass, rb_class_classext_foreach_callback_func *func, void *arg); void rb_class_subclass_add(VALUE super, VALUE klass); -void rb_class_remove_from_super_subclasses(VALUE); -void rb_class_remove_from_module_subclasses(VALUE); void rb_class_classext_free_subclasses(rb_classext_t *, VALUE, bool); void rb_class_foreach_subclass(VALUE klass, void (*f)(VALUE, VALUE), VALUE); -void rb_class_detach_subclasses(VALUE); -void rb_class_detach_module_subclasses(VALUE); void rb_class_update_superclasses(VALUE); -size_t rb_class_superclasses_memsize(VALUE); -void rb_class_remove_subclass_head(VALUE); int rb_singleton_class_internal_p(VALUE sklass); VALUE rb_class_set_super(VALUE klass, VALUE super); VALUE rb_class_boot(VALUE); From d268a551865977160aba99429c29f61c8d7c0eb0 Mon Sep 17 00:00:00 2001 From: Takashi Kokubun Date: Mon, 10 Nov 2025 16:29:50 -0800 Subject: [PATCH 07/10] ZJIT: Split unhandled_hir_insn and unknown_newarray_send stats (#15127) --- zjit.rb | 3 ++- zjit/src/codegen.rs | 3 ++- zjit/src/hir.rs | 22 +++++++++++----------- zjit/src/hir/tests.rs | 6 +++--- zjit/src/stats.rs | 37 +++++++++++++++++++++++++++++++++---- 5 files changed, 51 insertions(+), 20 deletions(-) diff --git a/zjit.rb b/zjit.rb index 9728551c3fd0f4..396f52d4b44b20 100644 --- a/zjit.rb +++ b/zjit.rb @@ -172,8 +172,9 @@ def stats_string print_counters_with_prefix(prefix: 'complex_arg_pass_', prompt: 'popular complex argument-parameter features not optimized', buf:, stats:, limit: 10) # Show exit counters, ordered by the typical amount of exits for the prefix at the time - print_counters_with_prefix(prefix: 'unhandled_yarv_insn_', prompt: 'unhandled YARV insns', buf:, stats:, limit: 20) print_counters_with_prefix(prefix: 'compile_error_', prompt: 'compile error reasons', buf:, stats:, limit: 20) + print_counters_with_prefix(prefix: 'unhandled_yarv_insn_', prompt: 'unhandled YARV insns', buf:, stats:, limit: 20) + print_counters_with_prefix(prefix: 'unhandled_hir_insn_', prompt: 'unhandled HIR insns', buf:, stats:, limit: 20) print_counters_with_prefix(prefix: 'exit_', prompt: 'side exit reasons', buf:, stats:, limit: 20) # Show no-prefix counters, having the most important stat `ratio_in_zjit` at the end diff --git a/zjit/src/codegen.rs b/zjit/src/codegen.rs index 0fc0b1ce47d367..5047de934da28b 100644 --- a/zjit/src/codegen.rs +++ b/zjit/src/codegen.rs @@ -15,7 +15,7 @@ use crate::invariants::{ use crate::gc::append_gc_offsets; use crate::payload::{get_or_create_iseq_payload, get_or_create_iseq_payload_ptr, IseqCodePtrs, IseqPayload, IseqStatus}; use crate::state::ZJITState; -use crate::stats::{send_fallback_counter, exit_counter_for_compile_error, incr_counter, incr_counter_by, send_fallback_counter_for_method_type, send_without_block_fallback_counter_for_method_type, send_without_block_fallback_counter_for_optimized_method_type, send_fallback_counter_ptr_for_opcode, CompileError}; +use crate::stats::{CompileError, exit_counter_for_compile_error, exit_counter_for_unhandled_hir_insn, incr_counter, incr_counter_by, send_fallback_counter, send_fallback_counter_for_method_type, send_fallback_counter_ptr_for_opcode, send_without_block_fallback_counter_for_method_type, send_without_block_fallback_counter_for_optimized_method_type}; use crate::stats::{counter_ptr, with_time_stat, Counter, Counter::{compile_time_ns, exit_compile_error}}; use crate::{asm::CodeBlock, cruby::*, options::debug, virtualmem::CodePtr}; use crate::backend::lir::{self, Assembler, C_ARG_OPNDS, C_RET_OPND, CFP, EC, NATIVE_BASE_PTR, NATIVE_STACK_PTR, Opnd, SP, SideExit, Target, asm_ccall, asm_comment}; @@ -280,6 +280,7 @@ fn gen_function(cb: &mut CodeBlock, iseq: IseqPtr, function: &Function) -> Resul let insn = function.find(insn_id); if let Err(last_snapshot) = gen_insn(cb, &mut jit, &mut asm, function, insn_id, &insn) { debug!("ZJIT: gen_function: Failed to compile insn: {insn_id} {insn}. Generating side-exit."); + gen_incr_counter(&mut asm, exit_counter_for_unhandled_hir_insn(&insn)); gen_side_exit(&mut jit, &mut asm, &SideExitReason::UnhandledHIRInsn(insn_id), &function.frame_state(last_snapshot)); // Don't bother generating code after a side-exit. We won't run it. // TODO(max): Generate ud2 or equivalent. diff --git a/zjit/src/hir.rs b/zjit/src/hir.rs index 85e9bdb00a2fb7..a8eff79765e21a 100644 --- a/zjit/src/hir.rs +++ b/zjit/src/hir.rs @@ -456,8 +456,8 @@ impl PtrPrintMap { #[derive(Debug, Clone, Copy)] pub enum SideExitReason { - UnknownNewarraySend(vm_opt_newarray_send_type), - UnknownDuparraySend(u64), + UnhandledNewarraySend(vm_opt_newarray_send_type), + UnhandledDuparraySend(u64), UnknownSpecialVariable(u64), UnhandledHIRInsn(InsnId), UnhandledYARVInsn(u32), @@ -547,13 +547,13 @@ impl std::fmt::Display for SideExitReason { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { SideExitReason::UnhandledYARVInsn(opcode) => write!(f, "UnhandledYARVInsn({})", insn_name(*opcode as usize)), - SideExitReason::UnknownNewarraySend(VM_OPT_NEWARRAY_SEND_MAX) => write!(f, "UnknownNewarraySend(MAX)"), - SideExitReason::UnknownNewarraySend(VM_OPT_NEWARRAY_SEND_MIN) => write!(f, "UnknownNewarraySend(MIN)"), - SideExitReason::UnknownNewarraySend(VM_OPT_NEWARRAY_SEND_HASH) => write!(f, "UnknownNewarraySend(HASH)"), - SideExitReason::UnknownNewarraySend(VM_OPT_NEWARRAY_SEND_PACK) => write!(f, "UnknownNewarraySend(PACK)"), - SideExitReason::UnknownNewarraySend(VM_OPT_NEWARRAY_SEND_PACK_BUFFER) => write!(f, "UnknownNewarraySend(PACK_BUFFER)"), - SideExitReason::UnknownNewarraySend(VM_OPT_NEWARRAY_SEND_INCLUDE_P) => write!(f, "UnknownNewarraySend(INCLUDE_P)"), - SideExitReason::UnknownDuparraySend(method_id) => write!(f, "UnknownDuparraySend({})", method_id), + SideExitReason::UnhandledNewarraySend(VM_OPT_NEWARRAY_SEND_MAX) => write!(f, "UnhandledNewarraySend(MAX)"), + SideExitReason::UnhandledNewarraySend(VM_OPT_NEWARRAY_SEND_MIN) => write!(f, "UnhandledNewarraySend(MIN)"), + SideExitReason::UnhandledNewarraySend(VM_OPT_NEWARRAY_SEND_HASH) => write!(f, "UnhandledNewarraySend(HASH)"), + SideExitReason::UnhandledNewarraySend(VM_OPT_NEWARRAY_SEND_PACK) => write!(f, "UnhandledNewarraySend(PACK)"), + SideExitReason::UnhandledNewarraySend(VM_OPT_NEWARRAY_SEND_PACK_BUFFER) => write!(f, "UnhandledNewarraySend(PACK_BUFFER)"), + SideExitReason::UnhandledNewarraySend(VM_OPT_NEWARRAY_SEND_INCLUDE_P) => write!(f, "UnhandledNewarraySend(INCLUDE_P)"), + SideExitReason::UnhandledDuparraySend(method_id) => write!(f, "UnhandledDuparraySend({})", method_id), SideExitReason::GuardType(guard_type) => write!(f, "GuardType({guard_type})"), SideExitReason::GuardTypeNot(guard_type) => write!(f, "GuardTypeNot({guard_type})"), SideExitReason::GuardBitEquals(value) => write!(f, "GuardBitEquals({})", value.print(&PtrPrintMap::identity())), @@ -4658,7 +4658,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result { }, _ => { // Unknown opcode; side-exit into the interpreter - fun.push_insn(block, Insn::SideExit { state: exit_id, reason: SideExitReason::UnknownNewarraySend(method) }); + fun.push_insn(block, Insn::SideExit { state: exit_id, reason: SideExitReason::UnhandledNewarraySend(method) }); break; // End the block }, }; @@ -4686,7 +4686,7 @@ pub fn iseq_to_hir(iseq: *const rb_iseq_t) -> Result { let bop = match method_id { x if x == ID!(include_p).0 => BOP_INCLUDE_P, _ => { - fun.push_insn(block, Insn::SideExit { state: exit_id, reason: SideExitReason::UnknownDuparraySend(method_id) }); + fun.push_insn(block, Insn::SideExit { state: exit_id, reason: SideExitReason::UnhandledDuparraySend(method_id) }); break; }, }; diff --git a/zjit/src/hir/tests.rs b/zjit/src/hir/tests.rs index b3230b71c824f2..4caf0f838fac43 100644 --- a/zjit/src/hir/tests.rs +++ b/zjit/src/hir/tests.rs @@ -1925,7 +1925,7 @@ pub mod hir_build_tests { Jump bb2(v8, v9, v10, v11, v12) bb2(v14:BasicObject, v15:BasicObject, v16:BasicObject, v17:NilClass, v18:NilClass): v25:BasicObject = SendWithoutBlock v15, :+, v16 - SideExit UnknownNewarraySend(MIN) + SideExit UnhandledNewarraySend(MIN) "); } @@ -1957,7 +1957,7 @@ pub mod hir_build_tests { Jump bb2(v8, v9, v10, v11, v12) bb2(v14:BasicObject, v15:BasicObject, v16:BasicObject, v17:NilClass, v18:NilClass): v25:BasicObject = SendWithoutBlock v15, :+, v16 - SideExit UnknownNewarraySend(HASH) + SideExit UnhandledNewarraySend(HASH) "); } @@ -1991,7 +1991,7 @@ pub mod hir_build_tests { v25:BasicObject = SendWithoutBlock v15, :+, v16 v31:StringExact[VALUE(0x1000)] = Const Value(VALUE(0x1000)) v32:StringExact = StringCopy v31 - SideExit UnknownNewarraySend(PACK) + SideExit UnhandledNewarraySend(PACK) "); } diff --git a/zjit/src/stats.rs b/zjit/src/stats.rs index 3b74cc45330bff..ec52662086e642 100644 --- a/zjit/src/stats.rs +++ b/zjit/src/stats.rs @@ -128,8 +128,12 @@ make_counters! { exit { // exit_: Side exits reasons exit_compile_error, - exit_unknown_newarray_send, - exit_unknown_duparray_send, + exit_unhandled_newarray_send_min, + exit_unhandled_newarray_send_hash, + exit_unhandled_newarray_send_pack, + exit_unhandled_newarray_send_pack_buffer, + exit_unhandled_newarray_send_unknown, + exit_unhandled_duparray_send, exit_unhandled_tailcall, exit_unhandled_splat, exit_unhandled_kwarg, @@ -218,6 +222,13 @@ make_counters! { compile_error_validation_type_check_failure, compile_error_validation_misc_validation_error, + // unhandled_hir_insn_: Unhandled HIR instructions + unhandled_hir_insn_array_max, + unhandled_hir_insn_fixnum_div, + unhandled_hir_insn_throw, + unhandled_hir_insn_invokebuiltin, + unhandled_hir_insn_unknown, + // The number of times YARV instructions are executed on JIT code zjit_insn_count, @@ -377,14 +388,32 @@ pub fn exit_counter_for_compile_error(compile_error: &CompileError) -> Counter { } } +pub fn exit_counter_for_unhandled_hir_insn(insn: &crate::hir::Insn) -> Counter { + use crate::hir::Insn::*; + use crate::stats::Counter::*; + match insn { + ArrayMax { .. } => unhandled_hir_insn_array_max, + FixnumDiv { .. } => unhandled_hir_insn_fixnum_div, + Throw { .. } => unhandled_hir_insn_throw, + InvokeBuiltin { .. } => unhandled_hir_insn_invokebuiltin, + _ => unhandled_hir_insn_unknown, + } +} + pub fn side_exit_counter(reason: crate::hir::SideExitReason) -> Counter { use crate::hir::SideExitReason::*; use crate::hir::CallType::*; use crate::hir::Invariant; use crate::stats::Counter::*; match reason { - UnknownNewarraySend(_) => exit_unknown_newarray_send, - UnknownDuparraySend(_) => exit_unknown_duparray_send, + UnhandledNewarraySend(send_type) => match send_type { + VM_OPT_NEWARRAY_SEND_MIN => exit_unhandled_newarray_send_min, + VM_OPT_NEWARRAY_SEND_HASH => exit_unhandled_newarray_send_hash, + VM_OPT_NEWARRAY_SEND_PACK => exit_unhandled_newarray_send_pack, + VM_OPT_NEWARRAY_SEND_PACK_BUFFER => exit_unhandled_newarray_send_pack_buffer, + _ => exit_unhandled_newarray_send_unknown, + } + UnhandledDuparraySend(_) => exit_unhandled_duparray_send, UnhandledCallType(Tailcall) => exit_unhandled_tailcall, UnhandledCallType(Splat) => exit_unhandled_splat, UnhandledCallType(Kwarg) => exit_unhandled_kwarg, From 87493e32ed822b78f1294c3c7a11f710ebdb6a90 Mon Sep 17 00:00:00 2001 From: Peter Zhu Date: Mon, 10 Nov 2025 18:08:27 -0500 Subject: [PATCH 08/10] Fix memory leak in ObjectSpace tracing allocation_info_tracer_compact_update_object_table_i deletes entries where the key is no longer in the GC heap but did not free the allocation_info causing the memory to be leaked. --- ext/objspace/object_tracing.c | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ext/objspace/object_tracing.c b/ext/objspace/object_tracing.c index 63eec6739320f9..c06f1f68ddacfc 100644 --- a/ext/objspace/object_tracing.c +++ b/ext/objspace/object_tracing.c @@ -203,6 +203,8 @@ allocation_info_tracer_compact_update_object_table_i(st_data_t key, st_data_t va st_table *table = (st_table *)data; if (!rb_gc_pointer_to_heap_p(key)) { + struct allocation_info *info = (struct allocation_info *)value; + xfree(info); return ST_DELETE; } From 327f070acd081d0b7e1d296f519aa6736bcc7cd3 Mon Sep 17 00:00:00 2001 From: Stan Lo Date: Mon, 10 Nov 2025 22:30:30 +0900 Subject: [PATCH 09/10] ZJIT: Don't need to store class in profiled type resolution enums --- zjit/src/hir.rs | 37 ++++++++++++++----------------------- 1 file changed, 14 insertions(+), 23 deletions(-) diff --git a/zjit/src/hir.rs b/zjit/src/hir.rs index a8eff79765e21a..33567d92426c84 100644 --- a/zjit/src/hir.rs +++ b/zjit/src/hir.rs @@ -570,15 +570,15 @@ pub enum ReceiverTypeResolution { /// No profile information available for the receiver NoProfile, /// The receiver has a monomorphic profile (single type observed, guard needed) - Monomorphic { class: VALUE, profiled_type: ProfiledType }, + Monomorphic { profiled_type: ProfiledType }, /// The receiver is polymorphic (multiple types, none dominant) Polymorphic, /// The receiver has a skewed polymorphic profile (dominant type with some other types, guard needed) - SkewedPolymorphic { class: VALUE, profiled_type: ProfiledType }, + SkewedPolymorphic { profiled_type: ProfiledType }, /// More than N types seen with no clear winner Megamorphic, /// Megamorphic, but with a significant skew towards one type - SkewedMegamorphic { class: VALUE, profiled_type: ProfiledType }, + SkewedMegamorphic { profiled_type: ProfiledType }, /// The receiver's class is statically known at JIT compile-time (no guard needed) StaticallyKnown { class: VALUE }, } @@ -2183,22 +2183,13 @@ impl Function { if self.union_find.borrow().find_const(*entry_insn) == recv { if entry_type_summary.is_monomorphic() { let profiled_type = entry_type_summary.bucket(0); - return ReceiverTypeResolution::Monomorphic { - class: profiled_type.class(), - profiled_type, - }; + return ReceiverTypeResolution::Monomorphic { profiled_type }; } else if entry_type_summary.is_skewed_polymorphic() { let profiled_type = entry_type_summary.bucket(0); - return ReceiverTypeResolution::SkewedPolymorphic { - class: profiled_type.class(), - profiled_type, - }; + return ReceiverTypeResolution::SkewedPolymorphic { profiled_type }; } else if entry_type_summary.is_skewed_megamorphic() { let profiled_type = entry_type_summary.bucket(0); - return ReceiverTypeResolution::SkewedMegamorphic { - class: profiled_type.class(), - profiled_type, - }; + return ReceiverTypeResolution::SkewedMegamorphic { profiled_type }; } else if entry_type_summary.is_polymorphic() { return ReceiverTypeResolution::Polymorphic; } else if entry_type_summary.is_megamorphic() { @@ -2356,8 +2347,8 @@ impl Function { let frame_state = self.frame_state(state); let (klass, profiled_type) = match self.resolve_receiver_type(recv, self.type_of(recv), frame_state.insn_idx) { ReceiverTypeResolution::StaticallyKnown { class } => (class, None), - ReceiverTypeResolution::Monomorphic { class, profiled_type } - | ReceiverTypeResolution::SkewedPolymorphic { class, profiled_type } => (class, Some(profiled_type)), + ReceiverTypeResolution::Monomorphic { profiled_type } + | ReceiverTypeResolution::SkewedPolymorphic { profiled_type } => (profiled_type.class(), Some(profiled_type)), ReceiverTypeResolution::SkewedMegamorphic { .. } | ReceiverTypeResolution::Megamorphic => { if get_option!(stats) { @@ -2557,8 +2548,8 @@ impl Function { let frame_state = self.frame_state(state); let klass = match self.resolve_receiver_type(recv, self.type_of(recv), frame_state.insn_idx) { ReceiverTypeResolution::StaticallyKnown { class } => class, - ReceiverTypeResolution::Monomorphic { class, .. } - | ReceiverTypeResolution::SkewedPolymorphic { class, .. } => class, + ReceiverTypeResolution::Monomorphic { profiled_type } + | ReceiverTypeResolution::SkewedPolymorphic { profiled_type } => profiled_type.class(), ReceiverTypeResolution::SkewedMegamorphic { .. } | ReceiverTypeResolution::Megamorphic => { if get_option!(stats) { @@ -2845,8 +2836,8 @@ impl Function { let iseq_insn_idx = fun.frame_state(state).insn_idx; let (recv_class, profiled_type) = match fun.resolve_receiver_type(recv, self_type, iseq_insn_idx) { ReceiverTypeResolution::StaticallyKnown { class } => (class, None), - ReceiverTypeResolution::Monomorphic { class, profiled_type } - | ReceiverTypeResolution::SkewedPolymorphic { class, profiled_type } => (class, Some(profiled_type)), + ReceiverTypeResolution::Monomorphic { profiled_type } + | ReceiverTypeResolution::SkewedPolymorphic { profiled_type} => (profiled_type.class(), Some(profiled_type)), ReceiverTypeResolution::SkewedMegamorphic { .. } | ReceiverTypeResolution::Polymorphic | ReceiverTypeResolution::Megamorphic | ReceiverTypeResolution::NoProfile => return Err(()), }; @@ -2950,8 +2941,8 @@ impl Function { let iseq_insn_idx = fun.frame_state(state).insn_idx; let (recv_class, profiled_type) = match fun.resolve_receiver_type(recv, self_type, iseq_insn_idx) { ReceiverTypeResolution::StaticallyKnown { class } => (class, None), - ReceiverTypeResolution::Monomorphic { class, profiled_type } - | ReceiverTypeResolution::SkewedPolymorphic { class, profiled_type } => (class, Some(profiled_type)), + ReceiverTypeResolution::Monomorphic { profiled_type } + | ReceiverTypeResolution::SkewedPolymorphic { profiled_type } => (profiled_type.class(), Some(profiled_type)), ReceiverTypeResolution::SkewedMegamorphic { .. } | ReceiverTypeResolution::Polymorphic | ReceiverTypeResolution::Megamorphic | ReceiverTypeResolution::NoProfile => return Err(()), }; From 222d8990d4b0acfd17db9bf9063af1ccf0fe036a Mon Sep 17 00:00:00 2001 From: Stan Lo Date: Tue, 11 Nov 2025 00:16:41 +0900 Subject: [PATCH 10/10] ZJIT: Reduce duplication between profiled_type_of_at and resolve_receiver_type --- zjit/src/hir.rs | 44 ++++++++++++++++++++++---------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/zjit/src/hir.rs b/zjit/src/hir.rs index 33567d92426c84..484063afd582c0 100644 --- a/zjit/src/hir.rs +++ b/zjit/src/hir.rs @@ -2136,41 +2136,41 @@ impl Function { } } - /// Return the interpreter-profiled type of the HIR instruction at the given ISEQ instruction - /// index, if it is known. This historical type record is not a guarantee and must be checked - /// with a GuardType or similar. + /// Return the profiled type of the HIR instruction at the given ISEQ instruction + /// index, if it is known to be monomorphic or skewed polymorphic. This historical type + /// record is not a guarantee and must be checked with a GuardType or similar. fn profiled_type_of_at(&self, insn: InsnId, iseq_insn_idx: usize) -> Option { - let profiles = self.profiles.as_ref()?; - let entries = profiles.types.get(&iseq_insn_idx)?; - let insn = self.chase_insn(insn); - for (entry_insn, entry_type_summary) in entries { - if self.union_find.borrow().find_const(*entry_insn) == insn { - if entry_type_summary.is_monomorphic() || entry_type_summary.is_skewed_polymorphic() { - return Some(entry_type_summary.bucket(0)); - } else { - return None; - } - } + match self.resolve_receiver_type_from_profile(insn, iseq_insn_idx) { + ReceiverTypeResolution::Monomorphic { profiled_type } + | ReceiverTypeResolution::SkewedPolymorphic { profiled_type } => Some(profiled_type), + _ => None, } - None } /// Resolve the receiver type for method dispatch optimization. /// /// Takes the receiver's Type, receiver HIR instruction, and ISEQ instruction index. - /// Performs a single iteration through profile data to determine the receiver type. + /// First checks if the receiver's class is statically known, otherwise consults profile data. /// /// Returns: /// - `StaticallyKnown` if the receiver's exact class is known at compile-time + /// - Result of [`Self::resolve_receiver_type_from_profile`] if we need to check profile data + fn resolve_receiver_type(&self, recv: InsnId, recv_type: Type, insn_idx: usize) -> ReceiverTypeResolution { + if let Some(class) = recv_type.runtime_exact_ruby_class() { + return ReceiverTypeResolution::StaticallyKnown { class }; + } + self.resolve_receiver_type_from_profile(recv, insn_idx) + } + + /// Resolve the receiver type for method dispatch optimization from profile data. + /// + /// Returns: /// - `Monomorphic`/`SkewedPolymorphic` if we have usable profile data /// - `Polymorphic` if the receiver has multiple types /// - `Megamorphic`/`SkewedMegamorphic` if the receiver has too many types to optimize /// (SkewedMegamorphic may be optimized in the future, but for now we don't) /// - `NoProfile` if we have no type information - fn resolve_receiver_type(&self, recv: InsnId, recv_type: Type, insn_idx: usize) -> ReceiverTypeResolution { - if let Some(class) = recv_type.runtime_exact_ruby_class() { - return ReceiverTypeResolution::StaticallyKnown { class }; - } + fn resolve_receiver_type_from_profile(&self, recv: InsnId, insn_idx: usize) -> ReceiverTypeResolution { let Some(profiles) = self.profiles.as_ref() else { return ReceiverTypeResolution::NoProfile; }; @@ -2508,7 +2508,7 @@ impl Function { } // Get the profiled type to check if the fields is embedded or heap allocated. let Some(is_embedded) = self.profiled_type_of_at(recv, frame_state.insn_idx).map(|t| t.flags().is_struct_embedded()) else { - // No (monomorphic) profile info + // No (monomorphic/skewed polymorphic) profile info self.push_insn_id(block, insn_id); continue; }; self.push_insn(block, Insn::PatchPoint { invariant: Invariant::MethodRedefined { klass, method: mid, cme }, state }); @@ -2761,7 +2761,7 @@ impl Function { Insn::GetIvar { self_val, id, state } => { let frame_state = self.frame_state(state); let Some(recv_type) = self.profiled_type_of_at(self_val, frame_state.insn_idx) else { - // No (monomorphic) profile info + // No (monomorphic/skewed polymorphic) profile info self.push_insn_id(block, insn_id); continue; }; if recv_type.flags().is_immediate() {