From ab01fcc5123205cdff6e566c2b686e7ab3ed383f Mon Sep 17 00:00:00 2001 From: Takashi Kokubun Date: Fri, 31 Oct 2025 12:47:55 -0700 Subject: [PATCH 1/7] ZJIT: Let AssemblerPanicHook write into stderr (#15019) --- zjit/src/backend/lir.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/zjit/src/backend/lir.rs b/zjit/src/backend/lir.rs index a90e80bf4187b1..584251de802bf2 100644 --- a/zjit/src/backend/lir.rs +++ b/zjit/src/backend/lir.rs @@ -2290,7 +2290,7 @@ pub struct AssemblerPanicHook { impl AssemblerPanicHook { /// Maximum number of lines [`Self::dump_asm`] is allowed to dump by default. /// When --zjit-dump-lir is given, this limit is ignored. - const MAX_DUMP_LINES: usize = 40; + const MAX_DUMP_LINES: usize = 10; /// Install a panic hook to dump Assembler with insn_idx on dev builds. /// This returns shared references to the previous hook and insn_idx. @@ -2340,12 +2340,12 @@ impl AssemblerPanicHook { (insn_idx.saturating_sub(Self::MAX_DUMP_LINES / 2), insn_idx.saturating_add(Self::MAX_DUMP_LINES / 2)) }; - println!("Failed to compile LIR at insn_idx={insn_idx}:"); + eprintln!("Failed to compile LIR at insn_idx={insn_idx}:"); for (idx, line) in lines.iter().enumerate().filter(|(idx, _)| (min_idx..=max_idx).contains(idx)) { if idx == insn_idx && line.starts_with(" ") { - println!("{BOLD_BEGIN}=>{}{BOLD_END}", &line[" ".len()..]); + eprintln!("{BOLD_BEGIN}=>{}{BOLD_END}", &line[" ".len()..]); } else { - println!("{line}"); + eprintln!("{line}"); } } } From 7688b05098af501642b1930ac1091dbb6241285e Mon Sep 17 00:00:00 2001 From: Max Bernstein Date: Fri, 31 Oct 2025 15:48:59 -0400 Subject: [PATCH 2/7] ZJIT: Optimize VM_METHOD_TYPE_ALIAS (#15018) Just loop until you find a non-alias. --- zjit/src/hir.rs | 20 +++++++-- zjit/src/hir/opt_tests.rs | 87 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 103 insertions(+), 4 deletions(-) diff --git a/zjit/src/hir.rs b/zjit/src/hir.rs index 5a609670f4b9ba..955d33a90621e6 100644 --- a/zjit/src/hir.rs +++ b/zjit/src/hir.rs @@ -2276,7 +2276,11 @@ impl Function { // Load an overloaded cme if applicable. See vm_search_cc(). // It allows you to use a faster ISEQ if possible. cme = unsafe { rb_check_overloaded_cme(cme, ci) }; - let def_type = unsafe { get_cme_def_type(cme) }; + let mut def_type = unsafe { get_cme_def_type(cme) }; + while def_type == VM_METHOD_TYPE_ALIAS { + cme = unsafe { rb_aliased_callable_method_entry(cme) }; + def_type = unsafe { get_cme_def_type(cme) }; + } if def_type == VM_METHOD_TYPE_ISEQ { // TODO(max): Allow non-iseq; cache cme // Only specialize positional-positional calls @@ -2453,7 +2457,11 @@ impl Function { // Load an overloaded cme if applicable. See vm_search_cc(). // It allows you to use a faster ISEQ if possible. cme = unsafe { rb_check_overloaded_cme(cme, ci) }; - let def_type = unsafe { get_cme_def_type(cme) }; + let mut def_type = unsafe { get_cme_def_type(cme) }; + while def_type == VM_METHOD_TYPE_ALIAS { + cme = unsafe { rb_aliased_callable_method_entry(cme) }; + def_type = unsafe { get_cme_def_type(cme) }; + } self.set_dynamic_send_reason(insn_id, SendNotOptimizedMethodType(MethodType::from(def_type))); self.push_insn_id(block, insn_id); continue; } @@ -2810,13 +2818,17 @@ impl Function { }; // Do method lookup - let method: *const rb_callable_method_entry_struct = unsafe { rb_callable_method_entry(recv_class, method_id) }; + let mut method: *const rb_callable_method_entry_struct = unsafe { rb_callable_method_entry(recv_class, method_id) }; if method.is_null() { return Err(()); } // Filter for C methods - let def_type = unsafe { get_cme_def_type(method) }; + let mut def_type = unsafe { get_cme_def_type(method) }; + while def_type == VM_METHOD_TYPE_ALIAS { + method = unsafe { rb_aliased_callable_method_entry(method) }; + def_type = unsafe { get_cme_def_type(method) }; + } if def_type != VM_METHOD_TYPE_CFUNC { return Err(()); } diff --git a/zjit/src/hir/opt_tests.rs b/zjit/src/hir/opt_tests.rs index 52471220234e8c..8a9acb59bb2527 100644 --- a/zjit/src/hir/opt_tests.rs +++ b/zjit/src/hir/opt_tests.rs @@ -638,6 +638,93 @@ mod hir_opt_tests { "); } + #[test] + fn test_optimize_send_without_block_to_aliased_iseq() { + eval(" + def foo = 1 + alias bar foo + alias baz bar + def test = baz + test; test + "); + assert_snapshot!(hir_string("test"), @r" + fn test@:5: + bb0(): + EntryPoint interpreter + v1:BasicObject = LoadSelf + Jump bb2(v1) + bb1(v4:BasicObject): + EntryPoint JIT(0) + Jump bb2(v4) + bb2(v6:BasicObject): + PatchPoint MethodRedefined(Object@0x1000, baz@0x1008, cme:0x1010) + PatchPoint NoSingletonClass(Object@0x1000) + v19:HeapObject[class_exact*:Object@VALUE(0x1000)] = GuardType v6, HeapObject[class_exact*:Object@VALUE(0x1000)] + IncrCounter inline_iseq_optimized_send_count + v22:Fixnum[1] = Const Value(1) + CheckInterrupts + Return v22 + "); + } + + #[test] + fn test_optimize_send_without_block_to_aliased_cfunc() { + eval(" + alias bar itself + alias baz bar + def test = baz + test; test + "); + assert_snapshot!(hir_string("test"), @r" + fn test@:4: + bb0(): + EntryPoint interpreter + v1:BasicObject = LoadSelf + Jump bb2(v1) + bb1(v4:BasicObject): + EntryPoint JIT(0) + Jump bb2(v4) + bb2(v6:BasicObject): + PatchPoint MethodRedefined(Object@0x1000, baz@0x1008, cme:0x1010) + PatchPoint NoSingletonClass(Object@0x1000) + v20:HeapObject[class_exact*:Object@VALUE(0x1000)] = GuardType v6, HeapObject[class_exact*:Object@VALUE(0x1000)] + IncrCounter inline_cfunc_optimized_send_count + CheckInterrupts + Return v20 + "); + } + + #[test] + fn test_optimize_send_to_aliased_cfunc() { + eval(" + class C < Array + alias fun_new_map map + end + def test(o) = o.fun_new_map {|e| e } + test C.new; test C.new + "); + assert_snapshot!(hir_string("test"), @r" + fn test@:5: + bb0(): + EntryPoint interpreter + v1:BasicObject = LoadSelf + v2:BasicObject = GetLocal l0, SP@4 + Jump bb2(v1, v2) + bb1(v5:BasicObject, v6:BasicObject): + EntryPoint JIT(0) + Jump bb2(v5, v6) + bb2(v8:BasicObject, v9:BasicObject): + v13:BasicObject = GetLocal l0, EP@3 + PatchPoint MethodRedefined(C@0x1000, fun_new_map@0x1008, cme:0x1010) + PatchPoint NoSingletonClass(C@0x1000) + v25:ArraySubclass[class_exact:C] = GuardType v13, ArraySubclass[class_exact:C] + v26:BasicObject = CCallWithFrame fun_new_map@0x1038, v25, block=0x1040 + v16:BasicObject = GetLocal l0, EP@3 + CheckInterrupts + Return v26 + "); + } + #[test] fn test_optimize_nonexistent_top_level_call() { eval(" From 4fc9ad5264af5f866520511a58ce57ea3af099cb Mon Sep 17 00:00:00 2001 From: John Hawthorn Date: Fri, 31 Oct 2025 08:51:03 -0700 Subject: [PATCH 3/7] Remove always true conditonals in vm_populate_cc --- vm_insnhelper.c | 16 +++------------- 1 file changed, 3 insertions(+), 13 deletions(-) diff --git a/vm_insnhelper.c b/vm_insnhelper.c index cbed6143297b81..1f6eb9222c297b 100644 --- a/vm_insnhelper.c +++ b/vm_insnhelper.c @@ -2156,17 +2156,7 @@ vm_populate_cc(VALUE klass, const struct rb_callinfo * const ci, ID mid) RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs); - const rb_callable_method_entry_t *cme; - - if (ccs) { - cme = ccs->cme; - cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme; - - VM_ASSERT(cme == rb_callable_method_entry(klass, mid)); - } - else { - cme = rb_callable_method_entry(klass, mid); - } + const rb_callable_method_entry_t *cme = rb_callable_method_entry(klass, mid); VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment)); @@ -2180,9 +2170,9 @@ vm_populate_cc(VALUE klass, const struct rb_callinfo * const ci, ID mid) METHOD_ENTRY_CACHED_SET((struct rb_callable_method_entry_struct *)cme); - if (ccs == NULL) { - VM_ASSERT(cc_tbl); + VM_ASSERT(cc_tbl); + { VALUE ccs_obj; if (UNLIKELY(rb_managed_id_table_lookup(cc_tbl, mid, &ccs_obj))) { ccs = (struct rb_class_cc_entries *)ccs_obj; From b931199d458fe24167be51157867ec9e6fa12eaa Mon Sep 17 00:00:00 2001 From: John Hawthorn Date: Fri, 31 Oct 2025 08:08:57 -0700 Subject: [PATCH 4/7] Avoid duping cc table when cme == NULL --- vm_insnhelper.c | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/vm_insnhelper.c b/vm_insnhelper.c index 1f6eb9222c297b..27177d9b13ef75 100644 --- a/vm_insnhelper.c +++ b/vm_insnhelper.c @@ -2143,17 +2143,6 @@ vm_populate_cc(VALUE klass, const struct rb_callinfo * const ci, ID mid) { ASSERT_vm_locking(); - VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass); - const VALUE original_cc_table = cc_tbl; - struct rb_class_cc_entries *ccs = NULL; - - if (!cc_tbl) { - cc_tbl = rb_vm_cc_table_create(1); - } - else if (rb_multi_ractor_p()) { - cc_tbl = rb_vm_cc_table_dup(cc_tbl); - } - RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs); const rb_callable_method_entry_t *cme = rb_callable_method_entry(klass, mid); @@ -2166,12 +2155,23 @@ vm_populate_cc(VALUE klass, const struct rb_callinfo * const ci, ID mid) return &vm_empty_cc; } + VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass); + const VALUE original_cc_table = cc_tbl; + if (!cc_tbl) { + // Is this possible after rb_callable_method_entry ? + cc_tbl = rb_vm_cc_table_create(1); + } + else if (rb_multi_ractor_p()) { + cc_tbl = rb_vm_cc_table_dup(cc_tbl); + } + VM_ASSERT(cme == rb_callable_method_entry(klass, mid)); METHOD_ENTRY_CACHED_SET((struct rb_callable_method_entry_struct *)cme); VM_ASSERT(cc_tbl); + struct rb_class_cc_entries *ccs = NULL; { VALUE ccs_obj; if (UNLIKELY(rb_managed_id_table_lookup(cc_tbl, mid, &ccs_obj))) { From cc8cfbcd8592cb003c58558ce63c117461a5ef32 Mon Sep 17 00:00:00 2001 From: Stan Lo Date: Fri, 31 Oct 2025 21:09:25 +0000 Subject: [PATCH 5/7] ZJIT: Standardize variable name for callable method entry (#15021) --- zjit/src/hir.rs | 48 ++++++++++++++++++++++++------------------------ 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/zjit/src/hir.rs b/zjit/src/hir.rs index 955d33a90621e6..55bec186512799 100644 --- a/zjit/src/hir.rs +++ b/zjit/src/hir.rs @@ -2677,9 +2677,9 @@ impl Function { self.infer_types(); } - fn gen_patch_points_for_optimized_ccall(&mut self, block: BlockId, recv_class: VALUE, method_id: ID, method: *const rb_callable_method_entry_struct, state: InsnId) { + fn gen_patch_points_for_optimized_ccall(&mut self, block: BlockId, recv_class: VALUE, method_id: ID, cme: *const rb_callable_method_entry_struct, state: InsnId) { self.push_insn(block, Insn::PatchPoint { invariant: Invariant::NoTracePoint, state }); - self.push_insn(block, Insn::PatchPoint { invariant: Invariant::MethodRedefined { klass: recv_class, method: method_id, cme: method }, state }); + self.push_insn(block, Insn::PatchPoint { invariant: Invariant::MethodRedefined { klass: recv_class, method: method_id, cme }, state }); } /// Optimize SendWithoutBlock that land in a C method to a direct CCall without @@ -2715,19 +2715,19 @@ impl Function { }; // Do method lookup - let method: *const rb_callable_method_entry_struct = unsafe { rb_callable_method_entry(recv_class, method_id) }; - if method.is_null() { + let cme: *const rb_callable_method_entry_struct = unsafe { rb_callable_method_entry(recv_class, method_id) }; + if cme.is_null() { return Err(()); } // Filter for C methods - let def_type = unsafe { get_cme_def_type(method) }; + let def_type = unsafe { get_cme_def_type(cme) }; if def_type != VM_METHOD_TYPE_CFUNC { return Err(()); } // Find the `argc` (arity) of the C method, which describes the parameters it expects - let cfunc = unsafe { get_cme_def_body_cfunc(method) }; + let cfunc = unsafe { get_cme_def_body_cfunc(cme) }; let cfunc_argc = unsafe { get_mct_argc(cfunc) }; match cfunc_argc { 0.. => { @@ -2747,7 +2747,7 @@ impl Function { } // Commit to the replacement. Put PatchPoint. - fun.gen_patch_points_for_optimized_ccall(block, recv_class, method_id, method, state); + fun.gen_patch_points_for_optimized_ccall(block, recv_class, method_id, cme, state); if recv_class.instance_can_have_singleton_class() { fun.push_insn(block, Insn::PatchPoint { invariant: Invariant::NoSingletonClass { klass: recv_class }, state }); } @@ -2769,7 +2769,7 @@ impl Function { cd, cfunc, args: cfunc_args, - cme: method, + cme, name: method_id, state, return_type: types::BasicObject, @@ -2818,23 +2818,23 @@ impl Function { }; // Do method lookup - let mut method: *const rb_callable_method_entry_struct = unsafe { rb_callable_method_entry(recv_class, method_id) }; - if method.is_null() { + let mut cme: *const rb_callable_method_entry_struct = unsafe { rb_callable_method_entry(recv_class, method_id) }; + if cme.is_null() { return Err(()); } // Filter for C methods - let mut def_type = unsafe { get_cme_def_type(method) }; + let mut def_type = unsafe { get_cme_def_type(cme) }; while def_type == VM_METHOD_TYPE_ALIAS { - method = unsafe { rb_aliased_callable_method_entry(method) }; - def_type = unsafe { get_cme_def_type(method) }; + cme = unsafe { rb_aliased_callable_method_entry(cme) }; + def_type = unsafe { get_cme_def_type(cme) }; } if def_type != VM_METHOD_TYPE_CFUNC { return Err(()); } // Find the `argc` (arity) of the C method, which describes the parameters it expects - let cfunc = unsafe { get_cme_def_body_cfunc(method) }; + let cfunc = unsafe { get_cme_def_body_cfunc(cme) }; let cfunc_argc = unsafe { get_mct_argc(cfunc) }; match cfunc_argc { 0.. => { @@ -2854,14 +2854,14 @@ impl Function { } // Commit to the replacement. Put PatchPoint. - fun.gen_patch_points_for_optimized_ccall(block, recv_class, method_id, method, state); + fun.gen_patch_points_for_optimized_ccall(block, recv_class, method_id, cme, state); if recv_class.instance_can_have_singleton_class() { fun.push_insn(block, Insn::PatchPoint { invariant: Invariant::NoSingletonClass { klass: recv_class }, state }); } - let props = ZJITState::get_method_annotations().get_cfunc_properties(method); + let props = ZJITState::get_method_annotations().get_cfunc_properties(cme); if props.is_none() && get_option!(stats) { - count_not_annotated_cfunc(fun, block, method); + count_not_annotated_cfunc(fun, block, cme); } let props = props.unwrap_or_default(); @@ -2897,13 +2897,13 @@ impl Function { fun.make_equal_to(send_insn_id, ccall); } else { if get_option!(stats) { - count_not_inlined_cfunc(fun, block, method); + count_not_inlined_cfunc(fun, block, cme); } let ccall = fun.push_insn(block, Insn::CCallWithFrame { cd, cfunc, args: cfunc_args, - cme: method, + cme, name: method_id, state, return_type, @@ -2923,7 +2923,7 @@ impl Function { if ci_flags & VM_CALL_ARGS_SIMPLE == 0 { fun.count_fancy_call_features(block, ci_flags); } else { - fun.gen_patch_points_for_optimized_ccall(block, recv_class, method_id, method, state); + fun.gen_patch_points_for_optimized_ccall(block, recv_class, method_id, cme, state); if recv_class.instance_can_have_singleton_class() { fun.push_insn(block, Insn::PatchPoint { invariant: Invariant::NoSingletonClass { klass: recv_class }, state }); @@ -2935,9 +2935,9 @@ impl Function { } let cfunc = unsafe { get_mct_func(cfunc) }.cast(); - let props = ZJITState::get_method_annotations().get_cfunc_properties(method); + let props = ZJITState::get_method_annotations().get_cfunc_properties(cme); if props.is_none() && get_option!(stats) { - count_not_annotated_cfunc(fun, block, method); + count_not_annotated_cfunc(fun, block, cme); } let props = props.unwrap_or_default(); @@ -2956,7 +2956,7 @@ impl Function { // No inlining; emit a call if get_option!(stats) { - count_not_inlined_cfunc(fun, block, method); + count_not_inlined_cfunc(fun, block, cme); } let return_type = props.return_type; let elidable = props.elidable; @@ -2964,7 +2964,7 @@ impl Function { cfunc, recv, args, - cme: method, + cme, name: method_id, state, return_type, From 8db30094fce210cd193b571d7c4b7e5702ed1ca9 Mon Sep 17 00:00:00 2001 From: Alan Wu Date: Fri, 31 Oct 2025 18:58:21 -0400 Subject: [PATCH 6/7] ZJIT: Fix incorrect elision of call to BasicObject#!= rb_obj_not_equal() uses rb_funcall(), so it's not `no_gc`, `leaf`, nor `elidable`. --- zjit/src/cruby_methods.rs | 2 +- zjit/src/hir/opt_tests.rs | 46 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+), 1 deletion(-) diff --git a/zjit/src/cruby_methods.rs b/zjit/src/cruby_methods.rs index bd3409ff07ce5c..0af3be1819dac7 100644 --- a/zjit/src/cruby_methods.rs +++ b/zjit/src/cruby_methods.rs @@ -221,7 +221,7 @@ pub fn init() -> Annotations { annotate!(rb_mKernel, "respond_to?", inline_kernel_respond_to_p); annotate!(rb_cBasicObject, "==", inline_basic_object_eq, types::BoolExact, no_gc, leaf, elidable); annotate!(rb_cBasicObject, "!", types::BoolExact, no_gc, leaf, elidable); - annotate!(rb_cBasicObject, "!=", inline_basic_object_neq, types::BoolExact, no_gc, leaf, elidable); + annotate!(rb_cBasicObject, "!=", inline_basic_object_neq, types::BoolExact); annotate!(rb_cBasicObject, "initialize", inline_basic_object_initialize); annotate!(rb_cInteger, "succ", inline_integer_succ); annotate!(rb_cInteger, "^", inline_integer_xor); diff --git a/zjit/src/hir/opt_tests.rs b/zjit/src/hir/opt_tests.rs index 8a9acb59bb2527..d0b3203ac186e6 100644 --- a/zjit/src/hir/opt_tests.rs +++ b/zjit/src/hir/opt_tests.rs @@ -502,6 +502,52 @@ mod hir_opt_tests { "); } + #[test] + fn neq_with_side_effect_not_elided () { + let result = eval(" + class CustomEq + attr_reader :count + + def ==(o) + @count = @count.to_i + 1 + self.equal?(o) + end + end + + def test(object) + # intentionally unused, but also can't assign to underscore + object != object + nil + end + + custom = CustomEq.new + test(custom) + test(custom) + + custom.count + "); + assert_eq!(VALUE::fixnum_from_usize(2), result); + assert_snapshot!(hir_string("test"), @r" + fn test@:13: + bb0(): + EntryPoint interpreter + v1:BasicObject = LoadSelf + v2:BasicObject = GetLocal l0, SP@4 + Jump bb2(v1, v2) + bb1(v5:BasicObject, v6:BasicObject): + EntryPoint JIT(0) + Jump bb2(v5, v6) + bb2(v8:BasicObject, v9:BasicObject): + PatchPoint MethodRedefined(CustomEq@0x1000, !=@0x1008, cme:0x1010) + PatchPoint NoSingletonClass(CustomEq@0x1000) + v28:HeapObject[class_exact:CustomEq] = GuardType v9, HeapObject[class_exact:CustomEq] + v29:BoolExact = CCallWithFrame !=@0x1038, v28, v9 + v19:NilClass = Const Value(nil) + CheckInterrupts + Return v19 + "); + } + #[test] fn test_replace_guard_if_known_fixnum() { eval(" From 17a7b4e03170599c942d2a7f1bbac6247aa3bd56 Mon Sep 17 00:00:00 2001 From: Sam Westerman Date: Sat, 1 Nov 2025 00:18:01 +0000 Subject: [PATCH 7/7] [DOC] Fix typo in `Hash#compare_by_identity` docs --- hash.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hash.c b/hash.c index 603cab76db33d9..97a303c4d5851c 100644 --- a/hash.c +++ b/hash.c @@ -4649,7 +4649,7 @@ rb_hash_compact_bang(VALUE hash) * returns +self+: * * By default, two keys are considered to be the same key - * if and only if they are _equal_ objects (per method #==): + * if and only if they are _equal_ objects (per method #eql?): * * h = {} * h['x'] = 0