diff --git a/depend b/depend
index 7372902666b97b..2be15478970953 100644
--- a/depend
+++ b/depend
@@ -16298,16 +16298,28 @@ sprintf.$(OBJEXT): {$(VPATH)}st.h
sprintf.$(OBJEXT): {$(VPATH)}subst.h
sprintf.$(OBJEXT): {$(VPATH)}util.h
sprintf.$(OBJEXT): {$(VPATH)}vsnprintf.c
+st.$(OBJEXT): $(CCAN_DIR)/check_type/check_type.h
+st.$(OBJEXT): $(CCAN_DIR)/container_of/container_of.h
+st.$(OBJEXT): $(CCAN_DIR)/list/list.h
+st.$(OBJEXT): $(CCAN_DIR)/str/str.h
st.$(OBJEXT): $(hdrdir)/ruby/ruby.h
+st.$(OBJEXT): $(top_srcdir)/internal/array.h
+st.$(OBJEXT): $(top_srcdir)/internal/basic_operators.h
st.$(OBJEXT): $(top_srcdir)/internal/bits.h
+st.$(OBJEXT): $(top_srcdir)/internal/box.h
st.$(OBJEXT): $(top_srcdir)/internal/compilers.h
+st.$(OBJEXT): $(top_srcdir)/internal/gc.h
st.$(OBJEXT): $(top_srcdir)/internal/hash.h
+st.$(OBJEXT): $(top_srcdir)/internal/imemo.h
st.$(OBJEXT): $(top_srcdir)/internal/sanitizers.h
+st.$(OBJEXT): $(top_srcdir)/internal/serial.h
st.$(OBJEXT): $(top_srcdir)/internal/set_table.h
st.$(OBJEXT): $(top_srcdir)/internal/st.h
st.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
+st.$(OBJEXT): $(top_srcdir)/internal/vm.h
st.$(OBJEXT): $(top_srcdir)/internal/warnings.h
st.$(OBJEXT): {$(VPATH)}assert.h
+st.$(OBJEXT): {$(VPATH)}atomic.h
st.$(OBJEXT): {$(VPATH)}backward/2/assume.h
st.$(OBJEXT): {$(VPATH)}backward/2/attributes.h
st.$(OBJEXT): {$(VPATH)}backward/2/bool.h
@@ -16319,6 +16331,9 @@ st.$(OBJEXT): {$(VPATH)}backward/2/stdalign.h
st.$(OBJEXT): {$(VPATH)}backward/2/stdarg.h
st.$(OBJEXT): {$(VPATH)}config.h
st.$(OBJEXT): {$(VPATH)}defines.h
+st.$(OBJEXT): {$(VPATH)}encoding.h
+st.$(OBJEXT): {$(VPATH)}id.h
+st.$(OBJEXT): {$(VPATH)}id_table.h
st.$(OBJEXT): {$(VPATH)}intern.h
st.$(OBJEXT): {$(VPATH)}internal.h
st.$(OBJEXT): {$(VPATH)}internal/abi.h
@@ -16392,6 +16407,15 @@ st.$(OBJEXT): {$(VPATH)}internal/core/rtypeddata.h
st.$(OBJEXT): {$(VPATH)}internal/ctype.h
st.$(OBJEXT): {$(VPATH)}internal/dllexport.h
st.$(OBJEXT): {$(VPATH)}internal/dosish.h
+st.$(OBJEXT): {$(VPATH)}internal/encoding/coderange.h
+st.$(OBJEXT): {$(VPATH)}internal/encoding/ctype.h
+st.$(OBJEXT): {$(VPATH)}internal/encoding/encoding.h
+st.$(OBJEXT): {$(VPATH)}internal/encoding/pathname.h
+st.$(OBJEXT): {$(VPATH)}internal/encoding/re.h
+st.$(OBJEXT): {$(VPATH)}internal/encoding/sprintf.h
+st.$(OBJEXT): {$(VPATH)}internal/encoding/string.h
+st.$(OBJEXT): {$(VPATH)}internal/encoding/symbol.h
+st.$(OBJEXT): {$(VPATH)}internal/encoding/transcode.h
st.$(OBJEXT): {$(VPATH)}internal/error.h
st.$(OBJEXT): {$(VPATH)}internal/eval.h
st.$(OBJEXT): {$(VPATH)}internal/event.h
@@ -16463,11 +16487,21 @@ st.$(OBJEXT): {$(VPATH)}internal/value_type.h
st.$(OBJEXT): {$(VPATH)}internal/variable.h
st.$(OBJEXT): {$(VPATH)}internal/warning_push.h
st.$(OBJEXT): {$(VPATH)}internal/xmalloc.h
+st.$(OBJEXT): {$(VPATH)}method.h
st.$(OBJEXT): {$(VPATH)}missing.h
+st.$(OBJEXT): {$(VPATH)}node.h
+st.$(OBJEXT): {$(VPATH)}onigmo.h
+st.$(OBJEXT): {$(VPATH)}oniguruma.h
st.$(OBJEXT): {$(VPATH)}ruby_assert.h
+st.$(OBJEXT): {$(VPATH)}ruby_atomic.h
+st.$(OBJEXT): {$(VPATH)}rubyparser.h
st.$(OBJEXT): {$(VPATH)}st.c
st.$(OBJEXT): {$(VPATH)}st.h
st.$(OBJEXT): {$(VPATH)}subst.h
+st.$(OBJEXT): {$(VPATH)}thread_$(THREAD_MODEL).h
+st.$(OBJEXT): {$(VPATH)}thread_native.h
+st.$(OBJEXT): {$(VPATH)}vm_core.h
+st.$(OBJEXT): {$(VPATH)}vm_opts.h
strftime.$(OBJEXT): $(hdrdir)/ruby/ruby.h
strftime.$(OBJEXT): $(top_srcdir)/internal/compilers.h
strftime.$(OBJEXT): $(top_srcdir)/internal/encoding.h
diff --git a/gc/default/default.c b/gc/default/default.c
index aaf6f56092b360..a6572b6f4d7ceb 100644
--- a/gc/default/default.c
+++ b/gc/default/default.c
@@ -8270,7 +8270,7 @@ rb_gc_impl_free(void *objspace_ptr, void *ptr, size_t old_size)
struct malloc_obj_info *info = (struct malloc_obj_info *)ptr - 1;
#if VERIFY_FREE_SIZE
if (old_size && (old_size + sizeof(struct malloc_obj_info)) != info->size) {
- rb_bug("buffer %p freed with size %lu, but was allocated with size %lu", ptr, old_size, info->size - sizeof(struct malloc_obj_info));
+ rb_bug("buffer %p freed with old_size=%lu, but was allocated with size=%lu", ptr, old_size, info->size - sizeof(struct malloc_obj_info));
}
#endif
ptr = info;
@@ -8379,6 +8379,11 @@ rb_gc_impl_realloc(void *objspace_ptr, void *ptr, size_t new_size, size_t old_si
struct malloc_obj_info *info = (struct malloc_obj_info *)ptr - 1;
new_size += sizeof(struct malloc_obj_info);
ptr = info;
+#if VERIFY_FREE_SIZE
+ if (old_size && (old_size + sizeof(struct malloc_obj_info)) != info->size) {
+ rb_bug("buffer %p realloced with old_size=%lu, but was allocated with size=%lu", ptr, old_size, info->size - sizeof(struct malloc_obj_info));
+ }
+#endif
old_size = info->size;
}
#endif
diff --git a/io.c b/io.c
index 8563fa6536c02f..ca97e321f7179a 100644
--- a/io.c
+++ b/io.c
@@ -14956,7 +14956,7 @@ set_LAST_READ_LINE(VALUE val, ID _x, VALUE *_y)
*
* When no character '-' is given, stream $stdin is ignored
* (exception:
- * see {Specifying $stdin in ARGV}[rdoc-ref:ARGF@Specifying+-24stdin+in+ARGV]):
+ * see {Specifying $stdin in ARGV}[rdoc-ref:ARGF@Specifying+stdin+in+ARGV]):
*
* - Command and output:
*
diff --git a/lib/prism/translation/ripper.rb b/lib/prism/translation/ripper.rb
index 054ad88ce3e8a3..5b2aa37833207e 100644
--- a/lib/prism/translation/ripper.rb
+++ b/lib/prism/translation/ripper.rb
@@ -480,7 +480,17 @@ def self.lex_state_name(state)
# Create a new Translation::Ripper object with the given source.
def initialize(source, filename = "(ripper)", lineno = 1)
- @source = source
+ if source.is_a?(IO)
+ @source = source.read
+ elsif source.respond_to?(:gets)
+ @source = +""
+ while line = source.gets
+ @source << line
+ end
+ else
+ @source = source.to_str
+ end
+
@filename = filename
@lineno = lineno
@column = 0
diff --git a/parse.y b/parse.y
index 03dd1c6f926067..7ca1197b37339a 100644
--- a/parse.y
+++ b/parse.y
@@ -1995,7 +1995,7 @@ parser_memhash(const void *ptr, long len)
#define STRING_TERM_LEN(str) (1)
#define STRING_TERM_FILL(str) (str->ptr[str->len] = '\0')
#define PARSER_STRING_RESIZE_CAPA_TERM(p,str,capacity,termlen) do {\
- SIZED_REALLOC_N(str->ptr, char, (size_t)total + termlen, STRING_SIZE(str)); \
+ REALLOC_N(str->ptr, char, (size_t)total + termlen); \
str->len = total; \
} while (0)
#define STRING_SET_LEN(str, n) do { \
diff --git a/st.c b/st.c
index 8937f7935f6b22..7891947549aa31 100644
--- a/st.c
+++ b/st.c
@@ -107,6 +107,7 @@
#elif defined RUBY_EXPORT
#include "internal.h"
#include "internal/bits.h"
+#include "internal/gc.h"
#include "internal/hash.h"
#include "internal/sanitizers.h"
#include "internal/set_table.h"
@@ -173,7 +174,14 @@ static const struct st_hash_type type_strcasehash = {
#define malloc ruby_xmalloc
#define calloc ruby_xcalloc
#define realloc ruby_xrealloc
+#define sized_realloc ruby_sized_xrealloc
#define free ruby_xfree
+#define sized_free ruby_sized_xfree
+#define free_fixed_ptr(v) ruby_sized_xfree((v), sizeof(*(v)))
+#else
+#define sized_realloc(ptr, new_size, old_size) realloc(ptr, new_size)
+#define sized_free(v, s) free(v)
+#define free_fixed_ptr(v) free(v)
#endif
#define EQUAL(tab,x,y) ((x) == (y) || (*(tab)->type->compare)((x),(y)) == 0)
@@ -551,7 +559,7 @@ st_init_existing_table_with_size(st_table *tab, const struct st_hash_type *type,
tab->bins = (st_index_t *) malloc(bins_size(tab));
#ifndef RUBY
if (tab->bins == NULL) {
- free(tab);
+ free_fixed_ptr(tab);
return NULL;
}
#endif
@@ -585,7 +593,7 @@ st_init_table_with_size(const struct st_hash_type *type, st_index_t size)
st_init_existing_table_with_size(tab, type, size);
#else
if (st_init_existing_table_with_size(tab, type, size) == NULL) {
- free(tab);
+ free_fixed_ptr(tab);
return NULL;
}
#endif
@@ -661,13 +669,36 @@ st_clear(st_table *tab)
tab->rebuilds_num++;
}
+static inline size_t
+st_entries_memsize(const st_table *tab)
+{
+ return get_allocated_entries(tab) * sizeof(st_table_entry);
+}
+
+static inline size_t
+st_bins_memsize(const st_table *tab)
+{
+ return tab->bins == NULL ? 0 : bins_size(tab);
+}
+
+static inline void
+st_free_entries(const st_table *tab)
+{
+ sized_free(tab->entries, st_entries_memsize(tab));
+}
+
+static inline void
+st_free_bins(const st_table *tab)
+{
+ sized_free(tab->bins, st_bins_memsize(tab));
+}
/* Free table TAB space. */
void
st_free_table(st_table *tab)
{
- free(tab->bins);
- free(tab->entries);
- free(tab);
+ st_free_bins(tab);
+ st_free_entries(tab);
+ free_fixed_ptr(tab);
}
/* Return byte size of memory allocated for table TAB. */
@@ -676,8 +707,8 @@ st_memsize(const st_table *tab)
{
RUBY_ASSERT(tab != NULL);
return(sizeof(st_table)
- + (tab->bins == NULL ? 0 : bins_size(tab))
- + get_allocated_entries(tab) * sizeof(st_table_entry));
+ + st_bins_memsize(tab)
+ + st_entries_memsize(tab));
}
static st_index_t
@@ -799,14 +830,15 @@ rebuild_table_with(st_table *const new_tab, st_table *const tab)
static void
rebuild_move_table(st_table *const new_tab, st_table *const tab)
{
+ st_free_bins(tab);
+ st_free_entries(tab);
+
tab->entry_power = new_tab->entry_power;
tab->bin_power = new_tab->bin_power;
tab->size_ind = new_tab->size_ind;
- free(tab->bins);
tab->bins = new_tab->bins;
- free(tab->entries);
tab->entries = new_tab->entries;
- free(new_tab);
+ free_fixed_ptr(new_tab);
}
static void
@@ -2135,16 +2167,17 @@ st_expand_table(st_table *tab, st_index_t siz)
tmp = st_init_table_with_size(tab->type, siz);
n = get_allocated_entries(tab);
MEMCPY(tmp->entries, tab->entries, st_table_entry, n);
- free(tab->entries);
- free(tab->bins);
- free(tmp->bins);
+ st_free_bins(tab);
+ st_free_entries(tab);
+ st_free_bins(tmp);
+
tab->entry_power = tmp->entry_power;
tab->bin_power = tmp->bin_power;
tab->size_ind = tmp->size_ind;
tab->entries = tmp->entries;
tab->bins = NULL;
tab->rebuilds_num++;
- free(tmp);
+ free_fixed_ptr(tmp);
}
/* Rehash using linear search. Return TRUE if we found that the table
@@ -2156,7 +2189,7 @@ st_rehash_linear(st_table *tab)
st_index_t i, j;
st_table_entry *p, *q;
- free(tab->bins);
+ st_free_bins(tab);
tab->bins = NULL;
for (i = tab->entries_start; i < tab->entries_bound; i++) {
@@ -2188,10 +2221,11 @@ st_rehash_indexed(st_table *tab)
{
int eq_p, rebuilt_p;
st_index_t i;
- st_index_t const n = bins_size(tab);
+
+ if (!tab->bins) {
+ tab->bins = malloc(bins_size(tab));
+ }
unsigned int const size_ind = get_size_ind(tab);
- st_index_t *bins = realloc(tab->bins, n);
- tab->bins = bins;
initialize_bins(tab);
for (i = tab->entries_start; i < tab->entries_bound; i++) {
st_table_entry *p = &tab->entries[i];
@@ -2207,10 +2241,10 @@ st_rehash_indexed(st_table *tab)
ind = hash_bin(p->hash, tab);
for (;;) {
- st_index_t bin = get_bin(bins, size_ind, ind);
+ st_index_t bin = get_bin(tab->bins, size_ind, ind);
if (EMPTY_OR_DELETED_BIN_P(bin)) {
/* ok, new room */
- set_bin(bins, size_ind, ind, i + ENTRY_BASE);
+ set_bin(tab->bins, size_ind, ind, i + ENTRY_BASE);
break;
}
else {
@@ -2446,6 +2480,16 @@ set_make_tab_empty(set_table *tab)
set_initialize_bins(tab);
}
+static inline size_t
+set_entries_memsize(set_table *tab)
+{
+ size_t memsize = set_get_allocated_entries(tab) * sizeof(set_table_entry);
+ if (set_has_bins(tab)) {
+ memsize += set_bins_size(tab);
+ }
+ return memsize;
+}
+
static set_table *
set_init_existing_table_with_size(set_table *tab, const struct st_hash_type *type, st_index_t size)
{
@@ -2471,12 +2515,7 @@ set_init_existing_table_with_size(set_table *tab, const struct st_hash_type *typ
tab->bin_power = features[n].bin_power;
tab->size_ind = features[n].size_ind;
- size_t memsize = 0;
- if (set_has_bins(tab)) {
- memsize += set_bins_size(tab);
- }
- memsize += set_get_allocated_entries(tab) * sizeof(set_table_entry);
- tab->entries = (set_table_entry *)malloc(memsize);
+ tab->entries = (set_table_entry *)malloc(set_entries_memsize(tab));
set_make_tab_empty(tab);
tab->rebuilds_num = 0;
return tab;
@@ -2526,8 +2565,8 @@ set_table_clear(set_table *tab)
void
set_free_table(set_table *tab)
{
- free(tab->entries);
- free(tab);
+ sized_free(tab->entries, set_entries_memsize(tab));
+ free_fixed_ptr(tab);
}
/* Return byte size of memory allocated for table TAB. */
@@ -2625,12 +2664,14 @@ set_rebuild_table_with(set_table *const new_tab, set_table *const tab)
static void
set_rebuild_move_table(set_table *const new_tab, set_table *const tab)
{
+ sized_free(tab->entries, set_entries_memsize(tab));
+ tab->entries = new_tab->entries;
+
tab->entry_power = new_tab->entry_power;
tab->bin_power = new_tab->bin_power;
tab->size_ind = new_tab->size_ind;
- free(tab->entries);
- tab->entries = new_tab->entries;
- free(new_tab);
+
+ free_fixed_ptr(new_tab);
}
static void
diff --git a/test/prism/ruby/ripper_test.rb b/test/prism/ruby/ripper_test.rb
index a89a9503b98fd4..52a5ad7ef4e2f9 100644
--- a/test/prism/ruby/ripper_test.rb
+++ b/test/prism/ruby/ripper_test.rb
@@ -145,6 +145,36 @@ def test_tokenize
assert_equal(Ripper.tokenize(source), Translation::Ripper.tokenize(source))
end
+ def test_sexp_coercion
+ string_like = Object.new
+ def string_like.to_str
+ "a"
+ end
+ assert_equal Ripper.sexp(string_like), Translation::Ripper.sexp(string_like)
+
+ File.open(__FILE__) do |file1|
+ File.open(__FILE__) do |file2|
+ assert_equal Ripper.sexp(file1), Translation::Ripper.sexp(file2)
+ end
+ end
+
+ File.open(__FILE__) do |file1|
+ File.open(__FILE__) do |file2|
+ object1_with_gets = Object.new
+ object1_with_gets.define_singleton_method(:gets) do
+ file1.gets
+ end
+
+ object2_with_gets = Object.new
+ object2_with_gets.define_singleton_method(:gets) do
+ file2.gets
+ end
+
+ assert_equal Ripper.sexp(object1_with_gets), Translation::Ripper.sexp(object2_with_gets)
+ end
+ end
+ end
+
# Check that the hardcoded values don't change without us noticing.
def test_internals
actual = Translation::Ripper.constants.select { |name| name.start_with?("EXPR_") }.sort
diff --git a/zjit/src/codegen.rs b/zjit/src/codegen.rs
index 2038be808dc633..a3068ff23dfea4 100644
--- a/zjit/src/codegen.rs
+++ b/zjit/src/codegen.rs
@@ -2330,6 +2330,26 @@ fn gen_guard_type(jit: &mut JITState, asm: &mut Assembler, val: lir::Opnd, guard
let tag = asm.and(flags, Opnd::UImm(RUBY_T_MASK as u64));
asm.cmp(tag, Opnd::UImm(RUBY_T_STRING as u64));
asm.jne(side);
+ } else if guard_type.is_subtype(types::Array) {
+ let side = side_exit(jit, state, GuardType(guard_type));
+
+ // Check special constant
+ asm.test(val, Opnd::UImm(RUBY_IMMEDIATE_MASK as u64));
+ asm.jnz(side.clone());
+
+ // Check false
+ asm.cmp(val, Qfalse.into());
+ asm.je(side.clone());
+
+ let val = match val {
+ Opnd::Reg(_) | Opnd::VReg { .. } => val,
+ _ => asm.load(val),
+ };
+
+ let flags = asm.load(Opnd::mem(VALUE_BITS, val, RUBY_OFFSET_RBASIC_FLAGS));
+ let tag = asm.and(flags, Opnd::UImm(RUBY_T_MASK as u64));
+ asm.cmp(tag, Opnd::UImm(RUBY_T_ARRAY as u64));
+ asm.jne(side);
} else if guard_type.bit_equal(types::HeapBasicObject) {
let side_exit = side_exit(jit, state, GuardType(guard_type));
asm.cmp(val, Opnd::Value(Qfalse));
diff --git a/zjit/src/hir.rs b/zjit/src/hir.rs
index 901beffea02772..9aa70b5d34d12e 100644
--- a/zjit/src/hir.rs
+++ b/zjit/src/hir.rs
@@ -3564,13 +3564,7 @@ impl Function {
assert!(flags & VM_CALL_FCALL != 0);
// Reject calls with complex argument handling.
- let complex_arg_types = VM_CALL_ARGS_SPLAT
- | VM_CALL_KW_SPLAT
- | VM_CALL_KWARG
- | VM_CALL_ARGS_BLOCKARG
- | VM_CALL_FORWARDING;
-
- if (flags & complex_arg_types) != 0 {
+ if unspecializable_c_call_type(flags) {
self.push_insn_id(block, insn_id);
self.set_dynamic_send_reason(insn_id, SuperComplexArgsPass);
continue;
@@ -3608,14 +3602,18 @@ impl Function {
}
// Look up the super method.
- let super_cme = unsafe { rb_callable_method_entry(superclass, mid) };
+ let mut super_cme = unsafe { rb_callable_method_entry(superclass, mid) };
if super_cme.is_null() {
self.push_insn_id(block, insn_id);
self.set_dynamic_send_reason(insn_id, SuperTargetNotFound);
continue;
}
- let def_type = unsafe { get_cme_def_type(super_cme) };
+ let mut def_type = unsafe { get_cme_def_type(super_cme) };
+ while def_type == VM_METHOD_TYPE_ALIAS {
+ super_cme = unsafe { rb_aliased_callable_method_entry(super_cme) };
+ def_type = unsafe { get_cme_def_type(super_cme) };
+ }
if def_type == VM_METHOD_TYPE_ISEQ {
// Check if the super method's parameters support direct send.
@@ -3653,6 +3651,12 @@ impl Function {
let cfunc_argc = unsafe { get_mct_argc(cfunc) };
let cfunc_ptr = unsafe { get_mct_func(cfunc) }.cast();
+ let props = ZJITState::get_method_annotations().get_cfunc_properties(super_cme);
+ if props.is_none() && get_option!(stats) {
+ self.count_not_annotated_cfunc(block, super_cme);
+ }
+ let props = props.unwrap_or_default();
+
match cfunc_argc {
// C function with fixed argument count.
0.. => {
@@ -3665,20 +3669,48 @@ impl Function {
emit_super_call_guards(self, block, super_cme, current_cme, mid, state);
+ // Try inlining the cfunc into HIR
+ let tmp_block = self.new_block(u32::MAX);
+ if let Some(replacement) = (props.inline)(self, tmp_block, recv, &args, state) {
+ // Copy contents of tmp_block to block
+ assert_ne!(block, tmp_block);
+ let insns = std::mem::take(&mut self.blocks[tmp_block.0].insns);
+ self.blocks[block.0].insns.extend(insns);
+ self.push_insn(block, Insn::IncrCounter(Counter::inline_cfunc_optimized_send_count));
+ self.make_equal_to(insn_id, replacement);
+ if self.type_of(replacement).bit_equal(types::Any) {
+ // Not set yet; infer type
+ self.insn_types[replacement.0] = self.infer_type(replacement);
+ }
+ self.remove_block(tmp_block);
+ continue;
+ }
+
// Use CCallWithFrame for the C function.
let name = rust_str_to_id(&qualified_method_name(unsafe { (*super_cme).owner }, unsafe { (*super_cme).called_id }));
- let ccall = self.push_insn(block, Insn::CCallWithFrame {
- cd,
- cfunc: cfunc_ptr,
- recv,
- args: args.clone(),
- cme: super_cme,
- name,
- state,
- return_type: types::BasicObject,
- elidable: false,
- blockiseq: None,
- });
+ let return_type = props.return_type;
+ let elidable = props.elidable;
+ // Filter for a leaf and GC free function
+ let ccall = if props.leaf && props.no_gc {
+ self.push_insn(block, Insn::IncrCounter(Counter::inline_cfunc_optimized_send_count));
+ self.push_insn(block, Insn::CCall { cfunc: cfunc_ptr, recv, args, name, return_type, elidable })
+ } else {
+ if get_option!(stats) {
+ self.count_not_inlined_cfunc(block, super_cme);
+ }
+ self.push_insn(block, Insn::CCallWithFrame {
+ cd,
+ cfunc: cfunc_ptr,
+ recv,
+ args: args.clone(),
+ cme: super_cme,
+ name,
+ state,
+ return_type: types::BasicObject,
+ elidable: false,
+ blockiseq: None,
+ })
+ };
self.make_equal_to(insn_id, ccall);
}
@@ -3686,19 +3718,48 @@ impl Function {
-1 => {
emit_super_call_guards(self, block, super_cme, current_cme, mid, state);
+ // Try inlining the cfunc into HIR
+ let tmp_block = self.new_block(u32::MAX);
+ if let Some(replacement) = (props.inline)(self, tmp_block, recv, &args, state) {
+ // Copy contents of tmp_block to block
+ assert_ne!(block, tmp_block);
+ emit_super_call_guards(self, block, super_cme, current_cme, mid, state);
+ let insns = std::mem::take(&mut self.blocks[tmp_block.0].insns);
+ self.blocks[block.0].insns.extend(insns);
+ self.push_insn(block, Insn::IncrCounter(Counter::inline_cfunc_optimized_send_count));
+ self.make_equal_to(insn_id, replacement);
+ if self.type_of(replacement).bit_equal(types::Any) {
+ // Not set yet; infer type
+ self.insn_types[replacement.0] = self.infer_type(replacement);
+ }
+ self.remove_block(tmp_block);
+ continue;
+ }
+
// Use CCallVariadic for the variadic C function.
let name = rust_str_to_id(&qualified_method_name(unsafe { (*super_cme).owner }, unsafe { (*super_cme).called_id }));
- let ccall = self.push_insn(block, Insn::CCallVariadic {
- cfunc: cfunc_ptr,
- recv,
- args: args.clone(),
- cme: super_cme,
- name,
- state,
- return_type: types::BasicObject,
- elidable: false,
- blockiseq: None,
- });
+ let return_type = props.return_type;
+ let elidable = props.elidable;
+ // Filter for a leaf and GC free function
+ let ccall = if props.leaf && props.no_gc {
+ self.push_insn(block, Insn::IncrCounter(Counter::inline_cfunc_optimized_send_count));
+ self.push_insn(block, Insn::CCall { cfunc: cfunc_ptr, recv, args, name, return_type, elidable })
+ } else {
+ if get_option!(stats) {
+ self.count_not_inlined_cfunc(block, super_cme);
+ }
+ self.push_insn(block, Insn::CCallVariadic {
+ cfunc: cfunc_ptr,
+ recv,
+ args: args.clone(),
+ cme: super_cme,
+ name,
+ state,
+ return_type: types::BasicObject,
+ elidable: false,
+ blockiseq: None,
+ })
+ };
self.make_equal_to(insn_id, ccall);
}
@@ -3981,6 +4042,28 @@ impl Function {
self.push_insn(block, Insn::PatchPoint { invariant: Invariant::MethodRedefined { klass: recv_class, method: method_id, cme }, state });
}
+ fn count_not_inlined_cfunc(&mut self, block: BlockId, cme: *const rb_callable_method_entry_t) {
+ let owner = unsafe { (*cme).owner };
+ let called_id = unsafe { (*cme).called_id };
+ let qualified_method_name = qualified_method_name(owner, called_id);
+ let not_inlined_cfunc_counter_pointers = ZJITState::get_not_inlined_cfunc_counter_pointers();
+ let counter_ptr = not_inlined_cfunc_counter_pointers.entry(qualified_method_name.clone()).or_insert_with(|| Box::new(0));
+ let counter_ptr = &mut **counter_ptr as *mut u64;
+
+ self.push_insn(block, Insn::IncrCounterPtr { counter_ptr });
+ }
+
+ fn count_not_annotated_cfunc(&mut self, block: BlockId, cme: *const rb_callable_method_entry_t) {
+ let owner = unsafe { (*cme).owner };
+ let called_id = unsafe { (*cme).called_id };
+ let qualified_method_name = qualified_method_name(owner, called_id);
+ let not_annotated_cfunc_counter_pointers = ZJITState::get_not_annotated_cfunc_counter_pointers();
+ let counter_ptr = not_annotated_cfunc_counter_pointers.entry(qualified_method_name.clone()).or_insert_with(|| Box::new(0));
+ let counter_ptr = &mut **counter_ptr as *mut u64;
+
+ self.push_insn(block, Insn::IncrCounterPtr { counter_ptr });
+ }
+
/// Optimize Send/SendWithoutBlock that land in a C method to a direct CCall without
/// runtime lookup.
fn optimize_c_calls(&mut self) {
@@ -4124,7 +4207,7 @@ impl Function {
}
if get_option!(stats) {
- count_not_inlined_cfunc(fun, block, cme);
+ fun.count_not_inlined_cfunc(block, cme);
}
let ccall = fun.push_insn(block, Insn::CCallVariadic {
@@ -4238,7 +4321,7 @@ impl Function {
let props = ZJITState::get_method_annotations().get_cfunc_properties(cme);
if props.is_none() && get_option!(stats) {
- count_not_annotated_cfunc(fun, block, cme);
+ fun.count_not_annotated_cfunc(block, cme);
}
let props = props.unwrap_or_default();
@@ -4277,7 +4360,7 @@ impl Function {
fun.make_equal_to(send_insn_id, ccall);
} else {
if get_option!(stats) {
- count_not_inlined_cfunc(fun, block, cme);
+ fun.count_not_inlined_cfunc(block, cme);
}
let ccall = fun.push_insn(block, Insn::CCallWithFrame {
cd,
@@ -4326,7 +4409,7 @@ impl Function {
let cfunc = unsafe { get_mct_func(cfunc) }.cast();
let props = ZJITState::get_method_annotations().get_cfunc_properties(cme);
if props.is_none() && get_option!(stats) {
- count_not_annotated_cfunc(fun, block, cme);
+ fun.count_not_annotated_cfunc(block, cme);
}
let props = props.unwrap_or_default();
@@ -4349,7 +4432,7 @@ impl Function {
// No inlining; emit a call
if get_option!(stats) {
- count_not_inlined_cfunc(fun, block, cme);
+ fun.count_not_inlined_cfunc(block, cme);
}
let return_type = props.return_type;
let elidable = props.elidable;
@@ -4383,28 +4466,6 @@ impl Function {
Err(())
}
- fn count_not_inlined_cfunc(fun: &mut Function, block: BlockId, cme: *const rb_callable_method_entry_t) {
- let owner = unsafe { (*cme).owner };
- let called_id = unsafe { (*cme).called_id };
- let qualified_method_name = qualified_method_name(owner, called_id);
- let not_inlined_cfunc_counter_pointers = ZJITState::get_not_inlined_cfunc_counter_pointers();
- let counter_ptr = not_inlined_cfunc_counter_pointers.entry(qualified_method_name.clone()).or_insert_with(|| Box::new(0));
- let counter_ptr = &mut **counter_ptr as *mut u64;
-
- fun.push_insn(block, Insn::IncrCounterPtr { counter_ptr });
- }
-
- fn count_not_annotated_cfunc(fun: &mut Function, block: BlockId, cme: *const rb_callable_method_entry_t) {
- let owner = unsafe { (*cme).owner };
- let called_id = unsafe { (*cme).called_id };
- let qualified_method_name = qualified_method_name(owner, called_id);
- let not_annotated_cfunc_counter_pointers = ZJITState::get_not_annotated_cfunc_counter_pointers();
- let counter_ptr = not_annotated_cfunc_counter_pointers.entry(qualified_method_name.clone()).or_insert_with(|| Box::new(0));
- let counter_ptr = &mut **counter_ptr as *mut u64;
-
- fun.push_insn(block, Insn::IncrCounterPtr { counter_ptr });
- }
-
for block in self.rpo() {
let old_insns = std::mem::take(&mut self.blocks[block.0].insns);
assert!(self.blocks[block.0].insns.is_empty());
diff --git a/zjit/src/hir/opt_tests.rs b/zjit/src/hir/opt_tests.rs
index de4e2ec39db7a7..8dec65fed634e6 100644
--- a/zjit/src/hir/opt_tests.rs
+++ b/zjit/src/hir/opt_tests.rs
@@ -11408,21 +11408,20 @@ mod hir_opt_tests {
#[test]
fn test_invokesuper_to_cfunc_optimizes_to_ccall() {
eval("
- class MyArray < Array
- def length
+ class C < Hash
+ def size
super
end
end
- MyArray.new.length; MyArray.new.length
+ C.new.size
");
- let hir = hir_string_proc("MyArray.new.method(:length)");
+ let hir = hir_string_proc("C.new.method(:size)");
assert!(!hir.contains("InvokeSuper "), "Expected unoptimized InvokeSuper but got:\n{hir}");
- assert!(hir.contains("CCallWithFrame"), "Should optimize to CCallWithFrame for non-variadic cfunc:\n{hir}");
- assert_snapshot!(hir, @"
- fn length@:4:
+ assert_snapshot!(hir, @r"
+ fn size@:4:
bb0():
EntryPoint interpreter
v1:BasicObject = LoadSelf
@@ -11431,12 +11430,46 @@ mod hir_opt_tests {
EntryPoint JIT(0)
Jump bb2(v4)
bb2(v6:BasicObject):
- PatchPoint MethodRedefined(Array@0x1000, length@0x1008, cme:0x1010)
+ PatchPoint MethodRedefined(Hash@0x1000, size@0x1008, cme:0x1010)
+ v17:CPtr = GetLEP
+ GuardSuperMethodEntry v17, 0x1038
+ v19:RubyValue = GetBlockHandler v17
+ v20:FalseClass = GuardBitEquals v19, Value(false)
+ IncrCounter inline_cfunc_optimized_send_count
+ v22:Fixnum = CCall v6, :Hash#size@0x1040
+ CheckInterrupts
+ Return v22
+ ");
+ }
+
+ #[test]
+ fn test_inline_invokesuper_to_basicobject_initialize() {
+ eval("
+ class C
+ def initialize
+ super
+ end
+ end
+
+ C.new
+ ");
+ assert_snapshot!(hir_string_proc("C.instance_method(:initialize)"), @r"
+ fn initialize@:4:
+ bb0():
+ EntryPoint interpreter
+ v1:BasicObject = LoadSelf
+ Jump bb2(v1)
+ bb1(v4:BasicObject):
+ EntryPoint JIT(0)
+ Jump bb2(v4)
+ bb2(v6:BasicObject):
+ PatchPoint MethodRedefined(BasicObject@0x1000, initialize@0x1008, cme:0x1010)
v17:CPtr = GetLEP
GuardSuperMethodEntry v17, 0x1038
v19:RubyValue = GetBlockHandler v17
v20:FalseClass = GuardBitEquals v19, Value(false)
- v21:BasicObject = CCallWithFrame v6, :Array#length@0x1040
+ v21:NilClass = Const Value(nil)
+ IncrCounter inline_cfunc_optimized_send_count
CheckInterrupts
Return v21
");