11#include "ruby/internal/config.h"
15#ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
20#include "debug_counter.h"
22#include "internal/class.h"
23#include "internal/compar.h"
24#include "internal/hash.h"
25#include "internal/numeric.h"
26#include "internal/proc.h"
27#include "internal/random.h"
28#include "internal/variable.h"
29#include "internal/struct.h"
34#include "insns_info.inc"
36extern rb_method_definition_t *rb_method_definition_create(rb_method_type_t
type,
ID mid);
37extern void rb_method_definition_set(
const rb_method_entry_t *me, rb_method_definition_t *def,
void *opts);
38extern int rb_method_definition_eq(
const rb_method_definition_t *d1,
const rb_method_definition_t *d2);
40 int argc,
const VALUE *argv,
int priv);
47static rb_control_frame_t *vm_get_ruby_level_caller_cfp(
const rb_execution_context_t *ec,
const rb_control_frame_t *cfp);
50ruby_vm_special_exception_copy(
VALUE exc)
53 rb_obj_copy_ivar(e, exc);
57NORETURN(
static void ec_stack_overflow(rb_execution_context_t *ec,
int));
59ec_stack_overflow(rb_execution_context_t *ec,
int setup)
61 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
62 ec->raised_flag = RAISED_STACKOVERFLOW;
64 VALUE at = rb_ec_backtrace_object(ec);
65 mesg = ruby_vm_special_exception_copy(mesg);
70 EC_JUMP_TAG(ec, TAG_RAISE);
73NORETURN(
static void vm_stackoverflow(
void));
78 ec_stack_overflow(GET_EC(), TRUE);
81NORETURN(
void rb_ec_stack_overflow(rb_execution_context_t *ec,
int crit));
83rb_ec_stack_overflow(rb_execution_context_t *ec,
int crit)
86 rb_bug(
"system stack overflow during GC. Faulty native extension?");
89 ec->raised_flag = RAISED_STACKOVERFLOW;
90 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
91 EC_JUMP_TAG(ec, TAG_RAISE);
94 ec_stack_overflow(ec, TRUE);
96 ec_stack_overflow(ec, FALSE);
100static inline void stack_check(rb_execution_context_t *ec);
104callable_class_p(
VALUE klass)
106#if VM_CHECK_MODE >= 2
107 if (!klass)
return FALSE;
129callable_method_entry_p(
const rb_callable_method_entry_t *cme)
135 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment));
137 if (callable_class_p(cme->defined_class)) {
147vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
149 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
150 enum imemo_type cref_or_me_type = imemo_env;
153 cref_or_me_type = imemo_type(cref_or_me);
155 if (
type & VM_FRAME_FLAG_BMETHOD) {
159 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
160 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
162 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
163 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
167 if (cref_or_me_type != imemo_ment) {
168 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
172 if (req_cref && cref_or_me_type != imemo_cref) {
173 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
176 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
177 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
181 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
187 if (cref_or_me_type == imemo_ment) {
188 const rb_callable_method_entry_t *me = (
const rb_callable_method_entry_t *)cref_or_me;
190 if (!callable_method_entry_p(me)) {
191 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
195 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
196 VM_ASSERT(iseq == NULL ||
198 RUBY_VM_NORMAL_ISEQ_P(iseq)
202 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
210 const rb_iseq_t *iseq)
212 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
215#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
217 vm_check_frame_detail(type, req_block, req_me, req_cref, \
218 specval, cref_or_me, is_cframe, iseq); \
220 switch (given_magic) {
222 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
224 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
225 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
226 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
227 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
228 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
229 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
230 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
232 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
237static VALUE vm_stack_canary;
238static bool vm_stack_canary_was_born =
false;
243previous_insn_index(
const rb_iseq_t *iseq,
const VALUE *pc)
245 unsigned int pos = 0;
246 while (pos < ISEQ_BODY(iseq)->iseq_size) {
247 int opcode = rb_vm_insn_addr2opcode((
void *)ISEQ_BODY(iseq)->iseq_encoded[pos]);
248 unsigned int next_pos = pos + insn_len(opcode);
249 if (ISEQ_BODY(iseq)->iseq_encoded + next_pos == pc) {
254 rb_bug(
"failed to find the previous insn");
258rb_vm_check_canary(
const rb_execution_context_t *ec,
VALUE *sp)
263 if (! LIKELY(vm_stack_canary_was_born)) {
266 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
270 else if (! (iseq = GET_ISEQ())) {
273 else if (LIKELY(sp[0] != vm_stack_canary)) {
282 const VALUE *orig = rb_iseq_original_iseq(iseq);
283 const VALUE iseqw = rb_iseqw_new(iseq);
285 const char *stri = rb_str_to_cstr(inspection);
286 const VALUE disasm = rb_iseq_disasm(iseq);
287 const char *strd = rb_str_to_cstr(disasm);
288 const ptrdiff_t pos = previous_insn_index(iseq, GET_PC());
289 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
290 const char *name = insn_name(insn);
296 "We are killing the stack canary set by %s, "
297 "at %s@pc=%"PRIdPTR
"\n"
298 "watch out the C stack trace.\n"
300 name, stri, pos, strd);
301 rb_bug(
"see above.");
303#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
306#define vm_check_canary(ec, sp)
307#define vm_check_frame(a, b, c, d)
312vm_push_frame_debug_counter_inc(
319 RB_DEBUG_COUNTER_INC(frame_push);
321 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
322 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
323 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
326 RB_DEBUG_COUNTER_INC(frame_R2R);
329 RB_DEBUG_COUNTER_INC(frame_R2C);
334 RB_DEBUG_COUNTER_INC(frame_C2R);
337 RB_DEBUG_COUNTER_INC(frame_C2C);
342 switch (
type & VM_FRAME_MAGIC_MASK) {
343 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
344 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
345 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
346 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
347 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
348 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
349 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
350 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
351 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
354 rb_bug(
"unreachable");
357#define vm_push_frame_debug_counter_inc(ec, cfp, t)
362rb_vm_stack_canary(
void)
365 return vm_stack_canary;
371STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
372STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
373STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
376vm_push_frame(rb_execution_context_t *ec,
377 const rb_iseq_t *iseq,
387 rb_control_frame_t *
const cfp = RUBY_VM_NEXT_CONTROL_FRAME(ec->cfp);
389 vm_check_frame(
type, specval, cref_or_me, iseq);
390 VM_ASSERT(local_size >= 0);
393 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
394 vm_check_canary(ec, sp);
399 for (
int i=0; i < local_size; i++) {
426 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
427 atomic_signal_fence(memory_order_seq_cst);
435 vm_push_frame_debug_counter_inc(ec, cfp,
type);
439rb_vm_pop_frame_no_int(rb_execution_context_t *ec)
441 rb_control_frame_t *cfp = ec->cfp;
443 if (VMDEBUG == 2) SDR();
445 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
450vm_pop_frame(rb_execution_context_t *ec, rb_control_frame_t *cfp,
const VALUE *ep)
452 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
454 if (VMDEBUG == 2) SDR();
456 RUBY_VM_CHECK_INTS(ec);
457 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
459 return flags & VM_FRAME_FLAG_FINISH;
463rb_vm_pop_frame(rb_execution_context_t *ec)
465 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
470rb_vm_push_frame_fname(rb_execution_context_t *ec,
VALUE fname)
472 rb_iseq_t *rb_iseq_alloc_with_dummy_path(
VALUE fname);
473 rb_iseq_t *dmy_iseq = rb_iseq_alloc_with_dummy_path(fname);
477 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
479 VM_BLOCK_HANDLER_NONE,
486 return (
VALUE)dmy_iseq;
491rb_arity_error_new(
int argc,
int min,
int max)
493 VALUE err_mess = rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
501 rb_str_catf(err_mess,
"..%d", max);
508rb_error_arity(
int argc,
int min,
int max)
510 rb_exc_raise(rb_arity_error_new(argc, min, max));
515NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
518vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
521 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
522 VM_FORCE_WRITE(&ep[index], v);
523 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
524 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
529vm_env_write(
const VALUE *ep,
int index,
VALUE v)
531 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
532 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
533 VM_STACK_ENV_WRITE(ep, index, v);
536 vm_env_write_slowpath(ep, index, v);
541rb_vm_env_write(
const VALUE *ep,
int index,
VALUE v)
543 vm_env_write(ep, index, v);
547rb_vm_bh_to_procval(
const rb_execution_context_t *ec,
VALUE block_handler)
549 if (block_handler == VM_BLOCK_HANDLER_NONE) {
553 switch (vm_block_handler_type(block_handler)) {
554 case block_handler_type_iseq:
555 case block_handler_type_ifunc:
556 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
557 case block_handler_type_symbol:
558 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
559 case block_handler_type_proc:
560 return VM_BH_TO_PROC(block_handler);
562 VM_UNREACHABLE(rb_vm_bh_to_procval);
571vm_svar_valid_p(
VALUE svar)
574 switch (imemo_type(svar)) {
583 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
589lep_svar(
const rb_execution_context_t *ec,
const VALUE *lep)
593 if (lep && (ec == NULL || ec->root_lep != lep)) {
594 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
597 svar = ec->root_svar;
600 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
606lep_svar_write(
const rb_execution_context_t *ec,
const VALUE *lep,
const struct vm_svar *svar)
608 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
610 if (lep && (ec == NULL || ec->root_lep != lep)) {
611 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
614 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
619lep_svar_get(
const rb_execution_context_t *ec,
const VALUE *lep, rb_num_t key)
621 const struct vm_svar *svar = lep_svar(ec, lep);
626 case VM_SVAR_LASTLINE:
627 return svar->lastline;
628 case VM_SVAR_BACKREF:
629 return svar->backref;
631 const VALUE ary = svar->others;
637 return rb_ary_entry(ary, key - VM_SVAR_EXTRA_START);
646 struct vm_svar *svar = IMEMO_NEW(
struct vm_svar, imemo_svar, obj);
655lep_svar_set(
const rb_execution_context_t *ec,
const VALUE *lep, rb_num_t key,
VALUE val)
657 struct vm_svar *svar = lep_svar(ec, lep);
660 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
664 case VM_SVAR_LASTLINE:
667 case VM_SVAR_BACKREF:
671 VALUE ary = svar->others;
676 rb_ary_store(ary, key - VM_SVAR_EXTRA_START, val);
682vm_getspecial(
const rb_execution_context_t *ec,
const VALUE *lep, rb_num_t key, rb_num_t
type)
687 val = lep_svar_get(ec, lep, key);
690 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
707 rb_bug(
"unexpected back-ref");
718vm_backref_defined(
const rb_execution_context_t *ec,
const VALUE *lep, rb_num_t
type)
720 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
730 return rb_reg_last_defined(backref);
732 rb_bug(
"unexpected back-ref");
736 nth = (int)(
type >> 1);
741PUREFUNC(
static rb_callable_method_entry_t *check_method_entry(
VALUE obj,
int can_be_svar));
742static rb_callable_method_entry_t *
743check_method_entry(
VALUE obj,
int can_be_svar)
745 if (obj ==
Qfalse)
return NULL;
748 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_method_entry: unknown type: %s", rb_obj_info(obj));
751 switch (imemo_type(obj)) {
753 return (rb_callable_method_entry_t *)obj;
762 rb_bug(
"check_method_entry: svar should not be there:");
768const rb_callable_method_entry_t *
769rb_vm_frame_method_entry(
const rb_control_frame_t *cfp)
771 const VALUE *ep = cfp->ep;
772 rb_callable_method_entry_t *me;
774 while (!VM_ENV_LOCAL_P(ep)) {
775 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
776 ep = VM_ENV_PREV_EP(ep);
779 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
782static const rb_iseq_t *
783method_entry_iseqptr(
const rb_callable_method_entry_t *me)
785 switch (me->def->type) {
786 case VM_METHOD_TYPE_ISEQ:
787 return me->def->body.iseq.
iseqptr;
794method_entry_cref(
const rb_callable_method_entry_t *me)
796 switch (me->def->type) {
797 case VM_METHOD_TYPE_ISEQ:
798 return me->def->body.iseq.
cref;
804#if VM_CHECK_MODE == 0
805PUREFUNC(
static rb_cref_t *check_cref(
VALUE,
int));
808check_cref(
VALUE obj,
int can_be_svar)
810 if (obj ==
Qfalse)
return NULL;
813 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_cref: unknown type: %s", rb_obj_info(obj));
816 switch (imemo_type(obj)) {
818 return method_entry_cref((rb_callable_method_entry_t *)obj);
820 return (rb_cref_t *)obj;
827 rb_bug(
"check_method_entry: svar should not be there:");
833static inline rb_cref_t *
834vm_env_cref(
const VALUE *ep)
838 while (!VM_ENV_LOCAL_P(ep)) {
839 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
840 ep = VM_ENV_PREV_EP(ep);
843 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
847is_cref(
const VALUE v,
int can_be_svar)
850 switch (imemo_type(v)) {
863vm_env_cref_by_cref(
const VALUE *ep)
865 while (!VM_ENV_LOCAL_P(ep)) {
866 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
867 ep = VM_ENV_PREV_EP(ep);
869 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
873cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
875 const VALUE v = *vptr;
876 rb_cref_t *cref, *new_cref;
879 switch (imemo_type(v)) {
881 cref = (rb_cref_t *)v;
882 new_cref = vm_cref_dup(cref);
887 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
889 return (rb_cref_t *)new_cref;
892 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
896 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
905vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
907 if (vm_env_cref_by_cref(ep)) {
911 while (!VM_ENV_LOCAL_P(ep)) {
912 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
913 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
916 ep = VM_ENV_PREV_EP(ep);
918 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
919 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
922 rb_bug(
"vm_cref_dup: unreachable");
927vm_get_cref(
const VALUE *ep)
929 rb_cref_t *cref = vm_env_cref(ep);
935 rb_bug(
"vm_get_cref: unreachable");
940rb_vm_get_cref(
const VALUE *ep)
942 return vm_get_cref(ep);
946vm_ec_cref(
const rb_execution_context_t *ec)
948 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
953 return vm_get_cref(cfp->ep);
956static const rb_cref_t *
957vm_get_const_key_cref(
const VALUE *ep)
959 const rb_cref_t *cref = vm_get_cref(ep);
960 const rb_cref_t *key_cref = cref;
963 if (RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
964 RCLASS_EXT(CREF_CLASS(cref))->cloned) {
967 cref = CREF_NEXT(cref);
975rb_vm_rewrite_cref(rb_cref_t *cref,
VALUE old_klass,
VALUE new_klass, rb_cref_t **new_cref_ptr)
980 if (CREF_CLASS(cref) == old_klass) {
981 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
982 *new_cref_ptr = new_cref;
985 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
986 cref = CREF_NEXT(cref);
987 *new_cref_ptr = new_cref;
988 new_cref_ptr = &new_cref->next;
990 *new_cref_ptr = NULL;
994vm_cref_push(
const rb_execution_context_t *ec,
VALUE klass,
const VALUE *ep,
int pushed_by_eval,
int singleton)
996 rb_cref_t *prev_cref = NULL;
999 prev_cref = vm_env_cref(ep);
1002 rb_control_frame_t *cfp = vm_get_ruby_level_caller_cfp(ec, ec->cfp);
1005 prev_cref = vm_env_cref(cfp->ep);
1009 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
1013vm_get_cbase(
const VALUE *ep)
1015 const rb_cref_t *cref = vm_get_cref(ep);
1017 return CREF_CLASS_FOR_DEFINITION(cref);
1021vm_get_const_base(
const VALUE *ep)
1023 const rb_cref_t *cref = vm_get_cref(ep);
1026 if (!CREF_PUSHED_BY_EVAL(cref)) {
1027 return CREF_CLASS_FOR_DEFINITION(cref);
1029 cref = CREF_NEXT(cref);
1036vm_check_if_namespace(
VALUE klass)
1039 rb_raise(
rb_eTypeError,
"%+"PRIsVALUE
" is not a class/module", klass);
1044vm_ensure_not_refinement_module(
VALUE self)
1047 rb_warn(
"not defined at the refinement, but at the outer class/module");
1052vm_get_iclass(
const rb_control_frame_t *cfp,
VALUE klass)
1058vm_get_ev_const(rb_execution_context_t *ec,
VALUE orig_klass,
ID id,
bool allow_nil,
int is_defined)
1060 void rb_const_warn_if_deprecated(
const rb_const_entry_t *ce,
VALUE klass,
ID id);
1063 if (
NIL_P(orig_klass) && allow_nil) {
1065 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1066 const rb_cref_t *cref;
1069 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1070 root_cref = CREF_NEXT(root_cref);
1073 while (cref && CREF_NEXT(cref)) {
1074 if (CREF_PUSHED_BY_EVAL(cref)) {
1078 klass = CREF_CLASS(cref);
1080 cref = CREF_NEXT(cref);
1082 if (!
NIL_P(klass)) {
1084 rb_const_entry_t *ce;
1086 if ((ce = rb_const_lookup(klass,
id))) {
1087 rb_const_warn_if_deprecated(ce, klass,
id);
1090 if (am == klass)
break;
1092 if (is_defined)
return 1;
1093 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1095 goto search_continue;
1102 if (UNLIKELY(!rb_ractor_main_p())) {
1104 rb_raise(rb_eRactorIsolationError,
1105 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass), rb_id2name(
id));
1116 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1117 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1131 vm_check_if_namespace(orig_klass);
1133 return rb_public_const_defined_from(orig_klass,
id);
1136 return rb_public_const_get_from(orig_klass,
id);
1142rb_vm_get_ev_const(rb_execution_context_t *ec,
VALUE orig_klass,
ID id,
VALUE allow_nil)
1144 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1148vm_get_ev_const_chain(rb_execution_context_t *ec,
const ID *segments)
1152 int allow_nil = TRUE;
1153 if (segments[0] == idNULL) {
1158 while (segments[idx]) {
1159 ID id = segments[idx++];
1160 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1168vm_get_cvar_base(
const rb_cref_t *cref,
const rb_control_frame_t *cfp,
int top_level_raise)
1173 rb_bug(
"vm_get_cvar_base: no cref");
1176 while (CREF_NEXT(cref) &&
1177 (
NIL_P(CREF_CLASS(cref)) || RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
1178 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1179 cref = CREF_NEXT(cref);
1181 if (top_level_raise && !CREF_NEXT(cref)) {
1185 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1193ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq, IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1195fill_ivar_cache(
const rb_iseq_t *iseq, IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1198 vm_cc_attr_index_set(cc, index, shape_id);
1201 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1205#define ractor_incidental_shareable_p(cond, val) \
1206 (!(cond) || rb_ractor_shareable_p(val))
1207#define ractor_object_incidental_shareable_p(obj, val) \
1208 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1210#define ATTR_INDEX_NOT_SET (attr_index_t)-1
1214vm_getivar(
VALUE obj,
ID id,
const rb_iseq_t *iseq, IVC ic,
const struct rb_callcache *cc,
int is_attr,
VALUE default_value)
1218 shape_id_t shape_id;
1222 return default_value;
1225#if SHAPE_IN_BASIC_FLAGS
1226 shape_id = RBASIC_SHAPE_ID(obj);
1234#if !SHAPE_IN_BASIC_FLAGS
1235 shape_id = ROBJECT_SHAPE_ID(obj);
1241 if (UNLIKELY(!rb_ractor_main_p())) {
1249 if (default_value ==
Qundef) {
1257 ivar_list = RCLASS_IVPTR(obj);
1259#if !SHAPE_IN_BASIC_FLAGS
1260 shape_id = RCLASS_SHAPE_ID(obj);
1268 rb_gen_ivtbl_get(obj,
id, &ivtbl);
1269#if !SHAPE_IN_BASIC_FLAGS
1270 shape_id = ivtbl->shape_id;
1272 ivar_list = ivtbl->as.shape.ivptr;
1275 return default_value;
1279 shape_id_t cached_id;
1283 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1286 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1289 if (LIKELY(cached_id == shape_id)) {
1290 RUBY_ASSERT(cached_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1292 if (index == ATTR_INDEX_NOT_SET) {
1293 return default_value;
1296 val = ivar_list[index];
1297#if USE_DEBUG_COUNTER
1298 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1301 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1307#if USE_DEBUG_COUNTER
1309 if (cached_id != INVALID_SHAPE_ID) {
1310 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1313 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1317 if (cached_id != INVALID_SHAPE_ID) {
1318 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1321 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1324 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1327 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1331 if (shape_id == OBJ_TOO_COMPLEX_SHAPE_ID) {
1332 st_table *table = NULL;
1336 table = (st_table *)RCLASS_IVPTR(obj);
1340 table = ROBJECT_IV_HASH(obj);
1345 if (rb_gen_ivtbl_get(obj, 0, &ivtbl)) {
1346 table = ivtbl->as.complex.table;
1352 if (!table || !st_lookup(table,
id, &val)) {
1353 val = default_value;
1357 shape_id_t previous_cached_id = cached_id;
1358 if (rb_shape_get_iv_index_with_hint(shape_id,
id, &index, &cached_id)) {
1361 if (cached_id != previous_cached_id) {
1362 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1365 if (index == ATTR_INDEX_NOT_SET) {
1366 val = default_value;
1370 val = ivar_list[index];
1376 vm_cc_attr_index_initialize(cc, shape_id);
1379 vm_ic_attr_index_initialize(ic, shape_id);
1382 val = default_value;
1388 if (!UNDEF_P(default_value)) {
1396 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1399 return rb_attr_get(obj,
id);
1407populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq, IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1409 RUBY_ASSERT(next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1413 vm_cc_attr_index_set(cc, index, next_shape_id);
1416 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1420ALWAYS_INLINE(
static VALUE vm_setivar_slowpath(
VALUE obj,
ID id,
VALUE val,
const rb_iseq_t *iseq, IVC ic,
const struct rb_callcache *cc,
int is_attr));
1421NOINLINE(
static VALUE vm_setivar_slowpath_ivar(
VALUE obj,
ID id,
VALUE val,
const rb_iseq_t *iseq, IVC ic));
1425vm_setivar_slowpath(
VALUE obj,
ID id,
VALUE val,
const rb_iseq_t *iseq, IVC ic,
const struct rb_callcache *cc,
int is_attr)
1428 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1431 rb_check_frozen(obj);
1433 attr_index_t index = rb_obj_ivar_set(obj,
id, val);
1435 shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
1437 if (next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID) {
1438 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1441 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1449vm_setivar_slowpath_ivar(
VALUE obj,
ID id,
VALUE val,
const rb_iseq_t *iseq, IVC ic)
1451 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1457 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1460NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1462vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1464#if SHAPE_IN_BASIC_FLAGS
1465 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1467 shape_id_t shape_id = rb_generic_shape_id(obj);
1473 if (shape_id == dest_shape_id) {
1474 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1476 else if (dest_shape_id != INVALID_SHAPE_ID) {
1477 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1478 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1480 if (shape_id == dest_shape->parent_id && dest_shape->edge_name ==
id && shape->capacity == dest_shape->capacity) {
1491 rb_gen_ivtbl_get(obj, 0, &ivtbl);
1493 if (shape_id != dest_shape_id) {
1494#if SHAPE_IN_BASIC_FLAGS
1495 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1497 ivtbl->shape_id = dest_shape_id;
1503 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1509vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1517 shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
1518 RUBY_ASSERT(dest_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1520 if (LIKELY(shape_id == dest_shape_id)) {
1521 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1524 else if (dest_shape_id != INVALID_SHAPE_ID) {
1525 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1526 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1527 shape_id_t source_shape_id = dest_shape->parent_id;
1529 if (shape_id == source_shape_id && dest_shape->edge_name ==
id && shape->capacity == dest_shape->capacity) {
1530 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1532 ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
1534 RUBY_ASSERT(rb_shape_get_next_iv_shape(rb_shape_get_shape_by_id(source_shape_id),
id) == dest_shape);
1550 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1551 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1557 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1567update_classvariable_cache(
const rb_iseq_t *iseq,
VALUE klass,
ID id,
const rb_cref_t * cref, ICVARC ic)
1569 VALUE defined_class = 0;
1573 defined_class =
RBASIC(defined_class)->klass;
1576 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1578 rb_bug(
"the cvc table should be set");
1582 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1583 rb_bug(
"should have cvar cache entry");
1588 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1601vm_getclassvariable(
const rb_iseq_t *iseq,
const rb_control_frame_t *reg_cfp,
ID id, ICVARC ic)
1603 const rb_cref_t *cref;
1604 cref = vm_get_cref(GET_EP());
1606 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1607 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1609 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1615 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1617 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1621rb_vm_getclassvariable(
const rb_iseq_t *iseq,
const rb_control_frame_t *cfp,
ID id, ICVARC ic)
1623 return vm_getclassvariable(iseq, cfp,
id, ic);
1627vm_setclassvariable(
const rb_iseq_t *iseq,
const rb_control_frame_t *reg_cfp,
ID id,
VALUE val, ICVARC ic)
1629 const rb_cref_t *cref;
1630 cref = vm_get_cref(GET_EP());
1632 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1633 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1635 rb_class_ivar_set(ic->entry->class_value,
id, val);
1639 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1643 update_classvariable_cache(iseq, klass,
id, cref, ic);
1647rb_vm_setclassvariable(
const rb_iseq_t *iseq,
const rb_control_frame_t *cfp,
ID id,
VALUE val, ICVARC ic)
1649 vm_setclassvariable(iseq, cfp,
id, val, ic);
1653vm_getinstancevariable(
const rb_iseq_t *iseq,
VALUE obj,
ID id, IVC ic)
1655 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE,
Qnil);
1659vm_setinstancevariable(
const rb_iseq_t *iseq,
VALUE obj,
ID id,
VALUE val, IVC ic)
1666 shape_id_t dest_shape_id;
1668 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1670 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1677 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1681 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1686rb_vm_setinstancevariable(
const rb_iseq_t *iseq,
VALUE obj,
ID id,
VALUE val, IVC ic)
1688 vm_setinstancevariable(iseq, obj,
id, val, ic);
1692vm_throw_continue(
const rb_execution_context_t *ec,
VALUE err)
1697 ec->tag->state = RUBY_TAG_FATAL;
1700 ec->tag->state = TAG_THROW;
1702 else if (THROW_DATA_P(err)) {
1703 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1706 ec->tag->state = TAG_RAISE;
1712vm_throw_start(
const rb_execution_context_t *ec, rb_control_frame_t *
const reg_cfp,
enum ruby_tag_type state,
1713 const int flag,
const VALUE throwobj)
1715 const rb_control_frame_t *escape_cfp = NULL;
1716 const rb_control_frame_t *
const eocfp = RUBY_VM_END_CONTROL_FRAME(ec);
1721 else if (state == TAG_BREAK) {
1723 const VALUE *ep = GET_EP();
1724 const rb_iseq_t *base_iseq = GET_ISEQ();
1725 escape_cfp = reg_cfp;
1727 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1728 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1729 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1730 ep = escape_cfp->ep;
1731 base_iseq = escape_cfp->iseq;
1734 ep = VM_ENV_PREV_EP(ep);
1735 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1736 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1737 VM_ASSERT(escape_cfp->iseq == base_iseq);
1741 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1747 ep = VM_ENV_PREV_EP(ep);
1749 while (escape_cfp < eocfp) {
1750 if (escape_cfp->ep == ep) {
1751 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1752 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1757 for (i=0; i < ct->size; i++) {
1759 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1761 if (entry->type == CATCH_TYPE_BREAK &&
1762 entry->iseq == base_iseq &&
1763 entry->start < epc && entry->end >= epc) {
1764 if (entry->cont == epc) {
1773 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1778 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1781 else if (state == TAG_RETRY) {
1782 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1784 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1786 else if (state == TAG_RETURN) {
1787 const VALUE *current_ep = GET_EP();
1788 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1789 int in_class_frame = 0;
1791 escape_cfp = reg_cfp;
1794 while (!VM_ENV_LOCAL_P(ep)) {
1795 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1798 ep = VM_ENV_PREV_EP(ep);
1802 while (escape_cfp < eocfp) {
1803 const VALUE *lep = VM_CF_LEP(escape_cfp);
1809 if (lep == target_lep &&
1810 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1811 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1816 if (lep == target_lep) {
1817 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1819 if (in_class_frame) {
1824 const VALUE *tep = current_ep;
1826 while (target_lep != tep) {
1827 if (escape_cfp->ep == tep) {
1829 if (tep == target_ep) {
1833 goto unexpected_return;
1836 tep = VM_ENV_PREV_EP(tep);
1840 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1841 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1843 case ISEQ_TYPE_MAIN:
1845 if (in_class_frame)
goto unexpected_return;
1846 if (target_ep == NULL) {
1850 goto unexpected_return;
1854 case ISEQ_TYPE_EVAL: {
1855 const rb_iseq_t *is = escape_cfp->iseq;
1856 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1857 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1858 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
1859 t = ISEQ_BODY(is)->type;
1861 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1864 case ISEQ_TYPE_CLASS:
1873 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1874 if (target_ep == NULL) {
1878 goto unexpected_return;
1882 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1885 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1891 rb_bug(
"isns(throw): unsupported throw type");
1894 ec->tag->state = state;
1895 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1899vm_throw(
const rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
1900 rb_num_t throw_state,
VALUE throwobj)
1902 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1903 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1906 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1909 return vm_throw_continue(ec, throwobj);
1914rb_vm_throw(
const rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t throw_state,
VALUE throwobj)
1916 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1922 int is_splat = flag & 0x01;
1925 const VALUE obj = ary;
1937 if (num + is_splat == 0) {
1940 else if (flag & 0x02) {
1945 for (i = 0; i < num -
len; i++) {
1950 for (j = 0; i < num; i++, j++) {
1963 *cfp->sp++ = rb_ary_new();
1972 for (; i < num -
len; i++) {
1976 for (rb_num_t j = 0; i < num; i++, j++) {
1977 *cfp->sp++ = ptr[
len - j - 1];
1981 for (rb_num_t j = 0; j < num; j++) {
1982 *cfp->sp++ = ptr[num - j - 1];
1990static VALUE vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling);
1995vm_ccs_create(
VALUE klass,
struct rb_id_table *cc_tbl,
ID mid,
const rb_callable_method_entry_t *cme)
1998#if VM_CHECK_MODE > 0
1999 ccs->debug_sig = ~(
VALUE)ccs;
2004 METHOD_ENTRY_CACHED_SET((rb_callable_method_entry_t *)cme);
2005 ccs->entries = NULL;
2007 rb_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2015 if (! vm_cc_markable(cc)) {
2019 if (UNLIKELY(ccs->len == ccs->capa)) {
2020 if (ccs->capa == 0) {
2022 ccs->entries =
ALLOC_N(
struct rb_class_cc_entries_entry, ccs->capa);
2026 REALLOC_N(ccs->entries,
struct rb_class_cc_entries_entry, ccs->capa);
2029 VM_ASSERT(ccs->len < ccs->capa);
2031 const int pos = ccs->len++;
2032 ccs->entries[pos].argc = vm_ci_argc(ci);
2033 ccs->entries[pos].flag = vm_ci_flag(ci);
2036 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
2042#if VM_CHECK_MODE > 0
2046 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
2047 for (
int i=0; i<ccs->len; i++) {
2048 ruby_debug_printf(
"CCS CI ID:flag:%x argc:%u\n",
2049 ccs->entries[i].flag,
2050 ccs->entries[i].argc);
2051 rp(ccs->entries[i].cc);
2058 VM_ASSERT(vm_ccs_p(ccs));
2059 VM_ASSERT(ccs->len <= ccs->capa);
2061 for (
int i=0; i<ccs->len; i++) {
2064 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2065 VM_ASSERT(vm_cc_class_check(cc, klass));
2066 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2067 VM_ASSERT(!vm_cc_super_p(cc));
2068 VM_ASSERT(!vm_cc_refinement_p(cc));
2074const rb_callable_method_entry_t *rb_check_overloaded_cme(
const rb_callable_method_entry_t *cme,
const struct rb_callinfo *
const ci);
2079 const ID mid = vm_ci_mid(ci);
2080 struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
2087 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
2089 const int ccs_len = ccs->len;
2091 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2092 rb_vm_ccs_free(ccs);
2093 rb_id_table_delete(cc_tbl, mid);
2097 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2102 unsigned int argc = vm_ci_argc(ci);
2103 unsigned int flag = vm_ci_flag(ci);
2105 for (
int i=0; i<ccs_len; i++) {
2106 unsigned int ccs_ci_argc = ccs->entries[i].argc;
2107 unsigned int ccs_ci_flag = ccs->entries[i].flag;
2108 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2110 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2112 if (ccs_ci_argc == argc && ccs_ci_flag == flag) {
2113 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2115 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2116 VM_ASSERT(ccs_cc->klass == klass);
2117 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2126 cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
2129 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2131 const rb_callable_method_entry_t *cme;
2135 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2137 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2140 cme = rb_callable_method_entry(klass, mid);
2143 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2147 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2148 return &vm_empty_cc;
2151 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2156 VM_ASSERT(cc_tbl != NULL);
2158 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2164 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2168 cme = rb_check_overloaded_cme(cme, ci);
2170 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2171 vm_ccs_push(klass, ccs, ci, cc);
2173 VM_ASSERT(vm_cc_cme(cc) != NULL);
2174 VM_ASSERT(cme->called_id == mid);
2175 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2189 cc = vm_search_cc(klass, ci);
2192 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2193 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2194 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2195 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2196 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2206#if USE_DEBUG_COUNTER
2210 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2212#if OPT_INLINE_METHOD_CACHE
2216 if (cd_owner && cc != empty_cc) {
2220#if USE_DEBUG_COUNTER
2221 if (!old_cc || old_cc == empty_cc) {
2223 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2225 else if (old_cc == cc) {
2226 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2228 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2229 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2231 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2232 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2233 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2236 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2241 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2242 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2253#if OPT_INLINE_METHOD_CACHE
2254 if (LIKELY(vm_cc_class_check(cc, klass))) {
2255 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2256 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2257 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2258 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2259 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2260 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2264 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2267 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2271 return vm_search_method_slowpath0(cd_owner, cd, klass);
2278 VM_ASSERT(klass !=
Qfalse);
2281 return vm_search_method_fastpath(cd_owner, cd, klass);
2284#if __has_attribute(transparent_union)
2297 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2298 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2299 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2300 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2301 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2302 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2305# define make_cfunc_type(f) (cfunc_type){.anyargs = (VALUE (*)(ANYARGS))(f)}
2308# define make_cfunc_type(f) (cfunc_type)(f)
2312check_cfunc(
const rb_callable_method_entry_t *me, cfunc_type func)
2318 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2319 VM_ASSERT(callable_method_entry_p(me));
2321 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2325#if __has_attribute(transparent_union)
2326 return me->def->body.cfunc.func == func.anyargs;
2328 return me->def->body.cfunc.func == func;
2335check_method_basic_definition(
const rb_callable_method_entry_t *me)
2337 return me && METHOD_ENTRY_BASIC(me);
2341vm_method_cfunc_is(
const rb_iseq_t *iseq, CALL_DATA cd,
VALUE recv, cfunc_type func)
2343 VM_ASSERT(iseq != NULL);
2345 return check_cfunc(vm_cc_cme(cc), func);
2348#define check_cfunc(me, func) check_cfunc(me, make_cfunc_type(func))
2349#define vm_method_cfunc_is(iseq, cd, recv, func) vm_method_cfunc_is(iseq, cd, recv, make_cfunc_type(func))
2351#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2383opt_equality_specialized(
VALUE recv,
VALUE obj)
2385 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2386 goto compare_by_identity;
2388 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2389 goto compare_by_identity;
2392 goto compare_by_identity;
2401#if MSC_VERSION_BEFORE(1300)
2405 else if (isnan(b)) {
2410 return RBOOL(a == b);
2417 return rb_str_eql_internal(obj, recv);
2422 compare_by_identity:
2423 return RBOOL(recv == obj);
2427opt_equality(
const rb_iseq_t *cd_owner,
VALUE recv,
VALUE obj, CALL_DATA cd)
2429 VM_ASSERT(cd_owner != NULL);
2431 VALUE val = opt_equality_specialized(recv, obj);
2432 if (!UNDEF_P(val))
return val;
2434 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2438 return RBOOL(recv == obj);
2442#undef EQ_UNREDEFINED_P
2445NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2448opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2450 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, &VM_CI_ON_STACK(mid, 0, 1, NULL));
2452 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2453 return RBOOL(recv == obj);
2463 VALUE val = opt_equality_specialized(recv, obj);
2464 if (!UNDEF_P(val)) {
2468 return opt_equality_by_mid_slowpath(recv, obj, mid);
2475 return opt_equality_by_mid(obj1, obj2, idEq);
2481 return opt_equality_by_mid(obj1, obj2, idEqlP);
2484extern VALUE rb_vm_call0(rb_execution_context_t *ec,
VALUE,
ID,
int,
const VALUE*,
const rb_callable_method_entry_t *,
int kw_splat);
2485extern VALUE rb_vm_call_with_refinements(rb_execution_context_t *,
VALUE,
ID,
int,
const VALUE *,
int);
2488check_match(rb_execution_context_t *ec,
VALUE pattern,
VALUE target,
enum vm_check_match_type
type)
2491 case VM_CHECKMATCH_TYPE_WHEN:
2493 case VM_CHECKMATCH_TYPE_RESCUE:
2495 rb_raise(
rb_eTypeError,
"class or module required for rescue clause");
2498 case VM_CHECKMATCH_TYPE_CASE: {
2499 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2502 rb_bug(
"check_match: unreachable");
2507#if MSC_VERSION_BEFORE(1300)
2508#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2510#define CHECK_CMP_NAN(a, b)
2514double_cmp_lt(
double a,
double b)
2516 CHECK_CMP_NAN(a, b);
2517 return RBOOL(a < b);
2521double_cmp_le(
double a,
double b)
2523 CHECK_CMP_NAN(a, b);
2524 return RBOOL(a <= b);
2528double_cmp_gt(
double a,
double b)
2530 CHECK_CMP_NAN(a, b);
2531 return RBOOL(a > b);
2535double_cmp_ge(
double a,
double b)
2537 CHECK_CMP_NAN(a, b);
2538 return RBOOL(a >= b);
2542static inline VALUE *
2543vm_base_ptr(
const rb_control_frame_t *cfp)
2545 const rb_control_frame_t *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
2547 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2548 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2550 if (ISEQ_BODY(cfp->iseq)->param.flags.forwardable && VM_ENV_LOCAL_P(cfp->ep)) {
2551 int lts = ISEQ_BODY(cfp->iseq)->local_table_size;
2552 int params = ISEQ_BODY(cfp->iseq)->param.size;
2554 CALL_INFO ci = (CALL_INFO)cfp->ep[-(VM_ENV_DATA_SIZE + (lts - params))];
2555 bp += vm_ci_argc(ci);
2558 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2562#if VM_DEBUG_BP_CHECK
2563 if (bp != cfp->bp_check) {
2564 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2565 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2566 (
long)(bp - GET_EC()->vm_stack));
2567 rb_bug(
"vm_base_ptr: unreachable");
2578rb_vm_base_ptr(
const rb_control_frame_t *cfp)
2580 return vm_base_ptr(cfp);
2587static inline VALUE vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling,
int opt_pc,
int param_size,
int local_size);
2588ALWAYS_INLINE(
static VALUE vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling,
const rb_callable_method_entry_t *me,
int opt_pc,
int param_size,
int local_size));
2589static inline VALUE vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling,
int opt_pc);
2590static VALUE vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling);
2591static VALUE vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling);
2592static VALUE vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling);
2593static inline VALUE vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling);
2595static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2598vm_call_iseq_setup_tailcall_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
2600 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2602 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2606vm_call_iseq_setup_normal_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
2608 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2611 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2612 int param = ISEQ_BODY(iseq)->param.size;
2613 int local = ISEQ_BODY(iseq)->local_table_size;
2614 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2618rb_simple_iseq_p(
const rb_iseq_t *iseq)
2620 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2621 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2622 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2623 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2624 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2625 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2626 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2627 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2631rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2633 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2634 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2635 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2636 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2637 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2638 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2639 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2640 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2644rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2646 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2647 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2648 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2649 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2650 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2651 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2652 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2655#define ALLOW_HEAP_ARGV (-2)
2656#define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2659vm_caller_setup_arg_splat(rb_control_frame_t *cfp,
struct rb_calling_info *calling,
VALUE ary,
int max_args)
2661 vm_check_canary(GET_EC(), cfp->sp);
2667 int argc = calling->argc;
2669 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2673 VALUE *argv = cfp->sp - argc;
2674 VALUE argv_ary = rb_ary_hidden_new(
len + argc + 1);
2675 rb_ary_cat(argv_ary, argv, argc);
2676 rb_ary_cat(argv_ary, ptr,
len);
2677 cfp->sp -= argc - 1;
2678 cfp->sp[-1] = argv_ary;
2680 calling->heap_argv = argv_ary;
2686 if (max_args >= 0 &&
len + argc > max_args) {
2694 calling->argc +=
len - (max_args - argc + 1);
2695 len = max_args - argc + 1;
2704 calling->heap_argv = 0;
2706 CHECK_VM_STACK_OVERFLOW(cfp,
len);
2708 for (i = 0; i <
len; i++) {
2709 *cfp->sp++ = ptr[i];
2721 const VALUE *
const passed_keywords = vm_ci_kwarg(ci)->keywords;
2722 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2723 const VALUE h = rb_hash_new_with_size(kw_len);
2724 VALUE *sp = cfp->sp;
2727 for (i=0; i<kw_len; i++) {
2728 rb_hash_aset(h, passed_keywords[i], (sp - kw_len)[i]);
2732 cfp->sp -= kw_len - 1;
2733 calling->argc -= kw_len - 1;
2734 calling->kw_splat = 1;
2738vm_caller_setup_keyword_hash(
const struct rb_callinfo *ci,
VALUE keyword_hash)
2741 if (keyword_hash !=
Qnil) {
2743 keyword_hash = rb_hash_dup(rb_to_hash_type(keyword_hash));
2746 else if (!IS_ARGS_KW_SPLAT_MUT(ci) && !
RHASH_EMPTY_P(keyword_hash)) {
2752 keyword_hash = rb_hash_dup(keyword_hash);
2754 return keyword_hash;
2760 const struct rb_callinfo *restrict ci,
int max_args)
2762 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2763 if (IS_ARGS_KW_SPLAT(ci)) {
2765 VM_ASSERT(calling->kw_splat == 1);
2769 VALUE ary = cfp->sp[0];
2770 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2773 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args))
return;
2777 if (UNLIKELY(calling->heap_argv)) {
2778 rb_ary_push(calling->heap_argv, kwh);
2779 ((
struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2780 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2781 calling->kw_splat = 0;
2789 VM_ASSERT(calling->kw_splat == 1);
2793 calling->kw_splat = 0;
2798 VM_ASSERT(calling->kw_splat == 0);
2802 VALUE ary = cfp->sp[0];
2804 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2809 VALUE last_hash, argv_ary;
2810 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2811 if (!IS_ARGS_KEYWORD(ci) &&
2814 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2816 rb_ary_pop(argv_ary);
2818 rb_ary_push(argv_ary, rb_hash_dup(last_hash));
2819 calling->kw_splat = 1;
2825 if (!IS_ARGS_KEYWORD(ci) &&
2826 calling->argc > 0 &&
2828 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2835 cfp->sp[-1] = rb_hash_dup(last_hash);
2836 calling->kw_splat = 1;
2842 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2844 VM_ASSERT(calling->kw_splat == 1);
2845 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2850 calling->kw_splat = 0;
2856 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2858 VM_ASSERT(calling->kw_splat == 0);
2864 vm_caller_setup_arg_kw(cfp, calling, ci);
2868#define USE_OPT_HIST 0
2871#define OPT_HIST_MAX 64
2872static int opt_hist[OPT_HIST_MAX+1];
2876opt_hist_show_results_at_exit(
void)
2878 for (
int i=0; i<OPT_HIST_MAX; i++) {
2879 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2885vm_call_iseq_setup_normal_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2889 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2890 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2891 const int opt = calling->argc - lead_num;
2892 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2893 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2894 const int param = ISEQ_BODY(iseq)->param.size;
2895 const int local = ISEQ_BODY(iseq)->local_table_size;
2896 const int delta = opt_num - opt;
2898 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2901 if (opt_pc < OPT_HIST_MAX) {
2905 opt_hist[OPT_HIST_MAX]++;
2909 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2913vm_call_iseq_setup_tailcall_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2917 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2918 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2919 const int opt = calling->argc - lead_num;
2920 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2922 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2925 if (opt_pc < OPT_HIST_MAX) {
2929 opt_hist[OPT_HIST_MAX]++;
2933 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2937args_setup_kw_parameters(rb_execution_context_t *
const ec,
const rb_iseq_t *
const iseq,
2938 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
2939 VALUE *
const locals);
2942vm_call_iseq_forwardable(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2946 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2947 int param_size = ISEQ_BODY(iseq)->param.size;
2948 int local_size = ISEQ_BODY(iseq)->local_table_size;
2951 VM_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
2953 local_size = local_size + vm_ci_argc(calling->cd->ci);
2954 param_size = param_size + vm_ci_argc(calling->cd->ci);
2956 cfp->sp[0] = (
VALUE)calling->cd->ci;
2958 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param_size, local_size);
2962vm_call_iseq_setup_kwparm_kwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2968 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2969 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2971 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2972 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2974 const int ci_kw_len = kw_arg->keyword_len;
2975 const VALUE *
const ci_keywords = kw_arg->keywords;
2976 VALUE *argv = cfp->sp - calling->argc;
2977 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2978 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2980 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2981 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2983 int param = ISEQ_BODY(iseq)->param.size;
2984 int local = ISEQ_BODY(iseq)->local_table_size;
2985 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2989vm_call_iseq_setup_kwparm_nokwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2992 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
2995 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2996 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2998 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2999 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3000 VALUE *
const argv = cfp->sp - calling->argc;
3001 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3004 for (i=0; i<kw_param->num; i++) {
3005 klocals[i] = kw_param->default_values[i];
3012 int param = ISEQ_BODY(iseq)->param.size;
3013 int local = ISEQ_BODY(iseq)->local_table_size;
3014 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3017static VALUE builtin_invoker0(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr);
3020vm_call_single_noarg_leaf_builtin(rb_execution_context_t *ec, rb_control_frame_t *cfp,
3024 cfp->sp -= (calling->argc + 1);
3025 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
3026 return builtin_invoker0(ec, calling->recv, NULL, func_ptr);
3032warn_unused_block(
const rb_callable_method_entry_t *cme,
const rb_iseq_t *iseq,
void *pc)
3034 rb_vm_t *vm = GET_VM();
3035 st_table *dup_check_table = vm->unused_block_warning_table;
3045 .v = (
VALUE)cme->def,
3049 if (!strict_unused_block) {
3050 key = (st_data_t)cme->def->original_id;
3052 if (st_lookup(dup_check_table, key, NULL)) {
3062 key |= (st_data_t)(k1.b[i] ^ k2.b[
SIZEOF_VALUE-1-i]) << (8 * i);
3067 fprintf(stderr,
"pc:%p def:%p\n", pc, (
void *)cme->def);
3068 fprintf(stderr,
"key:%p\n", (
void *)key);
3072 if (st_insert(dup_check_table, key, 1)) {
3076 VALUE m_loc = rb_method_entry_location((
const rb_method_entry_t *)cme);
3077 VALUE name = rb_gen_method_name(cme->defined_class, ISEQ_BODY(iseq)->location.base_label);
3079 if (!
NIL_P(m_loc)) {
3080 rb_warn(
"the block passed to '%"PRIsVALUE
"' defined at %"PRIsVALUE
":%"PRIsVALUE
" may be ignored",
3084 rb_warn(
"the block may be ignored because '%"PRIsVALUE
"' does not use a block", name);
3090vm_callee_setup_arg(rb_execution_context_t *ec,
struct rb_calling_info *calling,
3091 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
3096 VM_ASSERT((vm_ci_argc(ci), 1));
3097 VM_ASSERT(vm_cc_cme(cc) != NULL);
3099 if (UNLIKELY(!ISEQ_BODY(iseq)->param.flags.use_block &&
3100 calling->block_handler != VM_BLOCK_HANDLER_NONE &&
3101 !(vm_ci_flag(calling->cd->ci) & (VM_CALL_OPT_SEND | VM_CALL_SUPER)))) {
3102 warn_unused_block(vm_cc_cme(cc), iseq, (
void *)ec->cfp->pc);
3105 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
3106 if (LIKELY(rb_simple_iseq_p(iseq))) {
3107 rb_control_frame_t *cfp = ec->cfp;
3108 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3109 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
3111 if (calling->argc != lead_num) {
3112 argument_arity_error(ec, iseq, calling->argc, lead_num, lead_num);
3116 VM_ASSERT(cc == calling->cc);
3118 if (vm_call_iseq_optimizable_p(ci, cc)) {
3119 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_LEAF) &&
3121 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
3122 vm_cc_bf_set(cc, (
void *)iseq->body->iseq_encoded[1]);
3123 CC_SET_FASTPATH(cc, vm_call_single_noarg_leaf_builtin,
true);
3126 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
true);
3131 else if (rb_iseq_only_optparam_p(iseq)) {
3132 rb_control_frame_t *cfp = ec->cfp;
3134 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3135 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3137 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
3138 const int argc = calling->argc;
3139 const int opt = argc - lead_num;
3141 if (opt < 0 || opt > opt_num) {
3142 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
3145 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3146 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
3147 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3148 vm_call_cacheable(ci, cc));
3151 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
3152 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3153 vm_call_cacheable(ci, cc));
3157 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
3158 for (
int i=argc; i<lead_num + opt_num; i++) {
3161 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
3163 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3164 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3165 const int argc = calling->argc;
3166 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3168 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3171 if (argc - kw_arg->keyword_len == lead_num) {
3172 const int ci_kw_len = kw_arg->keyword_len;
3173 const VALUE *
const ci_keywords = kw_arg->keywords;
3175 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3177 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3178 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
3180 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3181 vm_call_cacheable(ci, cc));
3186 else if (argc == lead_num) {
3188 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3189 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
3191 if (klocals[kw_param->num] ==
INT2FIX(0)) {
3193 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3194 vm_call_cacheable(ci, cc));
3220 if (ISEQ_BODY(iseq)->param.flags.forwardable) {
3221 bool can_fastpath =
true;
3223 if ((vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3225 if (vm_ci_argc(ci) != vm_ci_argc(forward_cd->caller_ci)) {
3226 ci = vm_ci_new_runtime(
3232 ci = forward_cd->caller_ci;
3234 can_fastpath =
false;
3238 if (!vm_ci_markable(ci)) {
3239 ci = vm_ci_new_runtime(
3244 can_fastpath =
false;
3246 argv[param_size - 1] = (
VALUE)ci;
3247 CC_SET_FASTPATH(cc, vm_call_iseq_forwardable, can_fastpath);
3251 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3278 const VALUE * lep = VM_CF_LEP(cfp);
3280 const rb_iseq_t *iseq;
3284 if (VM_ENV_FLAGS(lep, VM_ENV_FLAG_ESCAPED)) {
3289 iseq = rb_vm_search_cf_from_ep(ec, cfp, lep)->iseq;
3293 int local_size = ISEQ_BODY(iseq)->local_table_size + argc;
3295 const VALUE * from = lep - (local_size + VM_ENV_DATA_SIZE - 1);
3296 VALUE * to = cfp->sp - 1;
3300 CHECK_VM_STACK_OVERFLOW0(cfp, to,
RARRAY_LEN(splat));
3305 CHECK_VM_STACK_OVERFLOW0(cfp, to, argc);
3307 cfp->sp = to + argc;
3324vm_call_iseq_setup(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
3326 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3329 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3330 int param_size = ISEQ_BODY(iseq)->param.size;
3331 int local_size = ISEQ_BODY(iseq)->local_table_size;
3333 RUBY_ASSERT(!ISEQ_BODY(iseq)->param.flags.forwardable);
3335 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3336 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3340vm_call_iseq_fwd_setup(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
3342 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3345 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3346 int param_size = ISEQ_BODY(iseq)->param.size;
3347 int local_size = ISEQ_BODY(iseq)->local_table_size;
3349 RUBY_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3352 local_size = local_size + vm_ci_argc(calling->cd->ci);
3353 param_size = param_size + vm_ci_argc(calling->cd->ci);
3355 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3356 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3360vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling,
3361 int opt_pc,
int param_size,
int local_size)
3366 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3367 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3370 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3375vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling,
const rb_callable_method_entry_t *me,
3376 int opt_pc,
int param_size,
int local_size)
3378 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3379 VALUE *argv = cfp->sp - calling->argc;
3380 VALUE *sp = argv + param_size;
3381 cfp->sp = argv - 1 ;
3383 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3384 calling->block_handler, (
VALUE)me,
3385 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3386 local_size - param_size,
3387 ISEQ_BODY(iseq)->stack_max);
3392vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling,
int opt_pc)
3396 VALUE *argv = cfp->sp - calling->argc;
3397 const rb_callable_method_entry_t *me = vm_cc_cme(cc);
3398 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3399 VALUE *src_argv = argv;
3400 VALUE *sp_orig, *sp;
3401 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3403 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3404 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3405 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3406 dst_captured->code.val = src_captured->code.val;
3407 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3408 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3411 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3415 vm_pop_frame(ec, cfp, cfp->ep);
3418 sp_orig = sp = cfp->sp;
3421 sp[0] = calling->recv;
3425 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3426 *sp++ = src_argv[i];
3429 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3430 calling->recv, calling->block_handler, (
VALUE)me,
3431 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3432 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3433 ISEQ_BODY(iseq)->stack_max);
3441ractor_unsafe_check(
void)
3443 if (!rb_ractor_main_p()) {
3444 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
3451 ractor_unsafe_check();
3459 ractor_unsafe_check();
3461 return (*f)(argc, argv, recv);
3467 ractor_unsafe_check();
3475 ractor_unsafe_check();
3477 return (*f)(recv, argv[0]);
3483 ractor_unsafe_check();
3485 return (*f)(recv, argv[0], argv[1]);
3491 ractor_unsafe_check();
3493 return (*f)(recv, argv[0], argv[1], argv[2]);
3499 ractor_unsafe_check();
3501 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3507 ractor_unsafe_check();
3508 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3509 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3515 ractor_unsafe_check();
3516 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3517 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3523 ractor_unsafe_check();
3524 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3525 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3531 ractor_unsafe_check();
3532 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3533 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3539 ractor_unsafe_check();
3540 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3541 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3547 ractor_unsafe_check();
3548 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3549 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3555 ractor_unsafe_check();
3556 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3557 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3563 ractor_unsafe_check();
3564 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3565 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3571 ractor_unsafe_check();
3572 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3573 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3579 ractor_unsafe_check();
3580 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3581 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3587 ractor_unsafe_check();
3588 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3589 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3603 return (*f)(argc, argv, recv);
3617 return (*f)(recv, argv[0]);
3624 return (*f)(recv, argv[0], argv[1]);
3631 return (*f)(recv, argv[0], argv[1], argv[2]);
3638 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3644 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3645 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3651 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3652 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3658 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3659 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3665 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3666 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3672 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3673 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3679 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3680 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3686 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3687 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3693 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3694 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3700 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3701 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3707 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3708 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3714 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3715 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3719vm_cfp_consistent_p(rb_execution_context_t *ec,
const rb_control_frame_t *reg_cfp)
3721 const int ov_flags = RAISED_STACKOVERFLOW;
3722 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3723 if (rb_ec_raised_p(ec, ov_flags)) {
3724 rb_ec_raised_reset(ec, ov_flags);
3730#define CHECK_CFP_CONSISTENCY(func) \
3731 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3732 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3735const rb_method_cfunc_t *
3736vm_method_cfunc_entry(
const rb_callable_method_entry_t *me)
3738#if VM_DEBUG_VERIFY_METHOD_CACHE
3739 switch (me->def->type) {
3740 case VM_METHOD_TYPE_CFUNC:
3741 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3743# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3745 METHOD_BUG(ATTRSET);
3747 METHOD_BUG(BMETHOD);
3750 METHOD_BUG(OPTIMIZED);
3751 METHOD_BUG(MISSING);
3752 METHOD_BUG(REFINED);
3756 rb_bug(
"wrong method type: %d", me->def->type);
3759 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3763vm_call_cfunc_with_frame_(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling,
3766 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3770 const rb_callable_method_entry_t *me = vm_cc_cme(cc);
3771 const rb_method_cfunc_t *cfunc = vm_method_cfunc_entry(me);
3773 VALUE recv = calling->recv;
3774 VALUE block_handler = calling->block_handler;
3775 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3777 if (UNLIKELY(calling->kw_splat)) {
3778 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3781 VM_ASSERT(reg_cfp == ec->cfp);
3783 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3786 vm_push_frame(ec, NULL, frame_type, recv,
3787 block_handler, (
VALUE)me,
3788 0, ec->cfp->sp, 0, 0);
3790 int len = cfunc->argc;
3793 reg_cfp->sp = stack_bottom;
3794 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3796 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3798 rb_vm_pop_frame(ec);
3800 VM_ASSERT(ec->cfp->sp == stack_bottom);
3802 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3803 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3811rb_vm_push_cfunc_frame(
const rb_callable_method_entry_t *cme,
int recv_idx)
3813 VM_ASSERT(cme->def->type == VM_METHOD_TYPE_CFUNC);
3814 rb_execution_context_t *ec = GET_EC();
3815 VALUE *sp = ec->cfp->sp;
3816 VALUE recv = *(sp - recv_idx - 1);
3817 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3818 VALUE block_handler = VM_BLOCK_HANDLER_NONE;
3819#if VM_CHECK_MODE > 0
3821 *(GET_EC()->cfp->sp) =
Qfalse;
3823 vm_push_frame(ec, NULL, frame_type, recv, block_handler, (
VALUE)cme, 0, ec->cfp->sp, 0, 0);
3828rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
3830 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3834vm_call_cfunc_with_frame(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
3836 int argc = calling->argc;
3837 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3838 VALUE *argv = &stack_bottom[1];
3840 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3844vm_call_cfunc_other(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
3847 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3849 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3851 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3852 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3855 VALUE *stack_bottom = reg_cfp->sp - 2;
3857 VM_ASSERT(calling->argc == 1);
3861 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3864 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat && !(vm_ci_flag(ci) & VM_CALL_FORWARDING));
3866 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3871vm_call_cfunc_array_argv(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling,
int stack_offset,
int argc_offset)
3873 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3876 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3877 return vm_call_cfunc_other(ec, reg_cfp, calling);
3881 calling->kw_splat = 0;
3883 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
3884 VALUE *sp = stack_bottom;
3885 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
3886 for(i = 0; i < argc; i++) {
3891 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
3895vm_call_cfunc_only_splat(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
3897 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
3898 VALUE argv_ary = reg_cfp->sp[-1];
3902 int argc_offset = 0;
3904 if (UNLIKELY(argc > 0 &&
3906 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
3908 return vm_call_cfunc_other(ec, reg_cfp, calling);
3912 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
3916vm_call_cfunc_only_splat_kw(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
3918 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
3919 VALUE keyword_hash = reg_cfp->sp[-1];
3922 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
3925 return vm_call_cfunc_other(ec, reg_cfp, calling);
3929vm_call_cfunc(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
3932 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3934 if (IS_ARGS_SPLAT(ci) && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3935 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
3937 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
3938 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
3940 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
3942 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
3943 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
3947 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
3948 return vm_call_cfunc_other(ec, reg_cfp, calling);
3952vm_call_ivar(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
3955 RB_DEBUG_COUNTER_INC(ccf_ivar);
3957 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE,
Qnil);
3962vm_call_attrset_direct(rb_execution_context_t *ec, rb_control_frame_t *cfp,
const struct rb_callcache *cc,
VALUE obj)
3964 RB_DEBUG_COUNTER_INC(ccf_attrset);
3965 VALUE val = *(cfp->sp - 1);
3967 attr_index_t index = vm_cc_attr_index(cc);
3968 shape_id_t dest_shape_id = vm_cc_attr_index_dest_shape_id(cc);
3969 ID id = vm_cc_cme(cc)->def->body.attr.id;
3970 rb_check_frozen(obj);
3971 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
3980 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
3981 if (!UNDEF_P(res)) {
3986 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
3992vm_call_attrset(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
3994 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
3998vm_call_bmethod_body(rb_execution_context_t *ec,
struct rb_calling_info *calling,
const VALUE *argv)
4003 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
4004 VALUE procv = cme->def->body.bmethod.proc;
4007 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4008 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4012 GetProcPtr(procv, proc);
4013 val = vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
4018static int vm_callee_setup_block_arg(rb_execution_context_t *ec,
struct rb_calling_info *calling,
const struct rb_callinfo *ci,
const rb_iseq_t *iseq,
VALUE *argv,
const enum arg_setup_type arg_setup_type);
4021vm_call_iseq_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
4023 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
4026 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
4027 VALUE procv = cme->def->body.bmethod.proc;
4030 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4031 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4035 GetProcPtr(procv, proc);
4036 const struct rb_block *block = &proc->block;
4038 while (vm_block_type(block) == block_type_proc) {
4039 block = vm_proc_block(block->as.proc);
4041 VM_ASSERT(vm_block_type(block) == block_type_iseq);
4044 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4045 VALUE *
const argv = cfp->sp - calling->argc;
4046 const int arg_size = ISEQ_BODY(iseq)->param.size;
4049 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
4050 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
4053 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
4058 vm_push_frame(ec, iseq,
4059 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
4061 VM_GUARDED_PREV_EP(captured->ep),
4063 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4065 ISEQ_BODY(iseq)->local_table_size - arg_size,
4066 ISEQ_BODY(iseq)->stack_max);
4072vm_call_noniseq_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
4074 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
4078 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
4079 if (UNLIKELY(calling->heap_argv)) {
4084 argc = calling->argc;
4087 cfp->sp += - argc - 1;
4090 return vm_call_bmethod_body(ec, calling, argv);
4094vm_call_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
4096 RB_DEBUG_COUNTER_INC(ccf_bmethod);
4099 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
4100 VALUE procv = cme->def->body.bmethod.proc;
4102 GetProcPtr(procv, proc);
4103 const struct rb_block *block = &proc->block;
4105 while (vm_block_type(block) == block_type_proc) {
4106 block = vm_proc_block(block->as.proc);
4108 if (vm_block_type(block) == block_type_iseq) {
4109 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
4110 return vm_call_iseq_bmethod(ec, cfp, calling);
4113 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
4114 return vm_call_noniseq_bmethod(ec, cfp, calling);
4118rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
4120 VALUE klass = current_class;
4128 while (
RTEST(klass)) {
4130 if (owner == target_owner) {
4136 return current_class;
4139static const rb_callable_method_entry_t *
4140aliased_callable_method_entry(
const rb_callable_method_entry_t *me)
4142 const rb_method_entry_t *orig_me = me->def->body.alias.original_me;
4143 const rb_callable_method_entry_t *cme;
4145 if (orig_me->defined_class == 0) {
4146 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
4147 VM_ASSERT_TYPE(orig_me->owner,
T_MODULE);
4148 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
4150 if (me->def->reference_count == 1) {
4151 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
4154 rb_method_definition_t *def =
4155 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
4156 rb_method_definition_set((rb_method_entry_t *)me, def, (
void *)cme);
4160 cme = (
const rb_callable_method_entry_t *)orig_me;
4163 VM_ASSERT(callable_method_entry_p(cme));
4167const rb_callable_method_entry_t *
4168rb_aliased_callable_method_entry(
const rb_callable_method_entry_t *me)
4170 return aliased_callable_method_entry(me);
4174vm_call_alias(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
4176 calling->cc = &VM_CC_ON_STACK(
Qundef,
4179 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
4181 return vm_call_method_each_type(ec, cfp, calling);
4184static enum method_missing_reason
4187 enum method_missing_reason stat = MISSING_NOENTRY;
4188 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4189 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
4190 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
4194static VALUE vm_call_method_missing(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling);
4197vm_call_symbol(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4200 ASSUME(calling->argc >= 0);
4202 enum method_missing_reason missing_reason = MISSING_NOENTRY;
4203 int argc = calling->argc;
4204 VALUE recv = calling->recv;
4207 flags |= VM_CALL_OPT_SEND;
4209 if (UNLIKELY(! mid)) {
4210 mid = idMethodMissing;
4211 missing_reason = ci_missing_reason(ci);
4212 ec->method_missing_reason = missing_reason;
4215 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4216 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4217 rb_ary_unshift(argv_ary, symbol);
4220 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4221 VALUE exc = rb_make_no_method_exception(
4243 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4246 argc = ++calling->argc;
4248 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4251 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4252 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
4253 VALUE exc = rb_make_no_method_exception(
4266 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
4272 if (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4273 calling->cd = &new_fcd.cd;
4277 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4278 new_fcd.caller_ci = caller_ci;
4281 calling->cc = &VM_CC_ON_STACK(klass,
4283 { .method_missing_reason = missing_reason },
4284 rb_callable_method_entry_with_refinements(klass, mid, NULL));
4286 if (flags & VM_CALL_FCALL) {
4287 return vm_call_method(ec, reg_cfp, calling);
4291 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4293 if (vm_cc_cme(cc) != NULL) {
4294 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4295 case METHOD_VISI_PUBLIC:
4296 return vm_call_method_each_type(ec, reg_cfp, calling);
4297 case METHOD_VISI_PRIVATE:
4298 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
4300 case METHOD_VISI_PROTECTED:
4301 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4304 VM_UNREACHABLE(vm_call_method);
4306 return vm_call_method_missing(ec, reg_cfp, calling);
4309 return vm_call_method_nome(ec, reg_cfp, calling);
4313vm_call_opt_send0(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling,
int flags)
4319 i = calling->argc - 1;
4321 if (calling->argc == 0) {
4322 rb_raise(rb_eArgError,
"no method name given");
4346 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4350vm_call_opt_send_complex(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4352 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4354 int flags = VM_CALL_FCALL;
4358 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4359 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4360 sym = rb_ary_shift(argv_ary);
4361 flags |= VM_CALL_ARGS_SPLAT;
4362 if (calling->kw_splat) {
4363 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4364 ((
struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4365 calling->kw_splat = 0;
4367 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4370 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4371 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4375vm_call_opt_send_simple(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4377 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4378 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4382vm_call_opt_send(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4384 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4387 int flags = vm_ci_flag(ci);
4389 if (UNLIKELY((flags & VM_CALL_FORWARDING) || (!(flags & VM_CALL_ARGS_SIMPLE) &&
4390 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4391 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4392 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc)))))) {
4393 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4394 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4397 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4398 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4402vm_call_method_missing_body(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling,
4403 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
4405 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4407 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4408 unsigned int argc, flag;
4410 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4411 argc = ++calling->argc;
4414 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4415 vm_check_canary(ec, reg_cfp->sp);
4419 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
4422 ec->method_missing_reason = reason;
4426 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4432 if (!(flag & VM_CALL_FORWARDING)) {
4433 calling->cd = &new_fcd.cd;
4437 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4438 new_fcd.caller_ci = caller_ci;
4442 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
4443 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
4444 return vm_call_method(ec, reg_cfp, calling);
4448vm_call_method_missing(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4450 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4453static const rb_callable_method_entry_t *refined_method_callable_without_refinement(
const rb_callable_method_entry_t *me);
4455vm_call_zsuper(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling,
VALUE klass)
4459 const rb_callable_method_entry_t *cme = klass ? rb_callable_method_entry(klass, vm_ci_mid(calling->cd->ci)) : NULL;
4461 return vm_call_method_nome(ec, cfp, calling);
4463 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4464 cme->def->body.refined.orig_me) {
4465 cme = refined_method_callable_without_refinement(cme);
4468 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
4470 return vm_call_method_each_type(ec, cfp, calling);
4474find_refinement(
VALUE refinements,
VALUE klass)
4476 if (
NIL_P(refinements)) {
4479 return rb_hash_lookup(refinements, klass);
4482PUREFUNC(
static rb_control_frame_t * current_method_entry(
const rb_execution_context_t *ec, rb_control_frame_t *cfp));
4483static rb_control_frame_t *
4484current_method_entry(
const rb_execution_context_t *ec, rb_control_frame_t *cfp)
4486 rb_control_frame_t *top_cfp = cfp;
4488 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
4489 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
4492 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4493 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4497 }
while (cfp->iseq != local_iseq);
4502static const rb_callable_method_entry_t *
4503refined_method_callable_without_refinement(
const rb_callable_method_entry_t *me)
4505 const rb_method_entry_t *orig_me = me->def->body.refined.orig_me;
4506 const rb_callable_method_entry_t *cme;
4508 if (orig_me->defined_class == 0) {
4513 cme = (
const rb_callable_method_entry_t *)orig_me;
4516 VM_ASSERT(callable_method_entry_p(cme));
4518 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4525static const rb_callable_method_entry_t *
4526search_refined_method(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
4528 ID mid = vm_ci_mid(calling->cd->ci);
4529 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4531 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
4533 for (; cref; cref = CREF_NEXT(cref)) {
4534 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4535 if (
NIL_P(refinement))
continue;
4537 const rb_callable_method_entry_t *
const ref_me =
4538 rb_callable_method_entry(refinement, mid);
4541 if (vm_cc_call(cc) == vm_call_super_method) {
4542 const rb_control_frame_t *top_cfp = current_method_entry(ec, cfp);
4543 const rb_callable_method_entry_t *top_me = rb_vm_frame_method_entry(top_cfp);
4544 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4549 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4550 cme->def != ref_me->def) {
4553 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4562 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4563 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4567 const rb_callable_method_entry_t *cme = klass ? rb_callable_method_entry(klass, mid) : NULL;
4573vm_call_refined(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
4575 const rb_callable_method_entry_t *ref_cme = search_refined_method(ec, cfp, calling);
4578 if (calling->cd->cc) {
4579 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4581 return vm_call_method(ec, cfp, calling);
4584 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, ref_cme);
4585 calling->cc= ref_cc;
4586 return vm_call_method(ec, cfp, calling);
4590 return vm_call_method_nome(ec, cfp, calling);
4594static inline VALUE vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling,
const struct rb_callinfo *ci,
bool is_lambda,
VALUE block_handler);
4596NOINLINE(
static VALUE
4597 vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4601vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4604 int argc = calling->argc;
4607 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
4610 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
4614vm_call_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4616 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4619 VALUE procval = calling->recv;
4620 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4624vm_call_opt_block_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4626 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4628 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4631 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4632 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4635 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4636 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
4637 return vm_call_general(ec, reg_cfp, calling);
4642vm_call_opt_struct_aref0(rb_execution_context_t *ec,
struct rb_calling_info *calling)
4644 VALUE recv = calling->recv;
4647 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4648 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4650 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4651 return internal_RSTRUCT_GET(recv,
off);
4655vm_call_opt_struct_aref(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4657 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4659 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4665vm_call_opt_struct_aset0(rb_execution_context_t *ec,
struct rb_calling_info *calling,
VALUE val)
4667 VALUE recv = calling->recv;
4670 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4671 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4673 rb_check_frozen(recv);
4675 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4676 internal_RSTRUCT_SET(recv,
off, val);
4682vm_call_opt_struct_aset(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4684 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4686 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4691NOINLINE(
static VALUE vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling,
4694#define VM_CALL_METHOD_ATTR(var, func, nohook) \
4695 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
4696 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4697 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4699 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4700 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4708vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling,
4711 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4712 case OPTIMIZED_METHOD_TYPE_SEND:
4713 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4714 return vm_call_opt_send(ec, cfp, calling);
4715 case OPTIMIZED_METHOD_TYPE_CALL:
4716 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4717 return vm_call_opt_call(ec, cfp, calling);
4718 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4719 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4720 return vm_call_opt_block_call(ec, cfp, calling);
4721 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4722 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4726 VM_CALL_METHOD_ATTR(v,
4727 vm_call_opt_struct_aref(ec, cfp, calling),
4728 set_vm_cc_ivar(cc); \
4729 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4732 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4733 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4737 VM_CALL_METHOD_ATTR(v,
4738 vm_call_opt_struct_aset(ec, cfp, calling),
4739 set_vm_cc_ivar(cc); \
4740 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4744 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4749vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
4753 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
4756 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4758 switch (cme->def->type) {
4759 case VM_METHOD_TYPE_ISEQ:
4760 if (ISEQ_BODY(def_iseq_ptr(cme->def))->param.flags.forwardable) {
4761 CC_SET_FASTPATH(cc, vm_call_iseq_fwd_setup, TRUE);
4762 return vm_call_iseq_fwd_setup(ec, cfp, calling);
4765 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4766 return vm_call_iseq_setup(ec, cfp, calling);
4769 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4770 case VM_METHOD_TYPE_CFUNC:
4771 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4772 return vm_call_cfunc(ec, cfp, calling);
4774 case VM_METHOD_TYPE_ATTRSET:
4775 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4779 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG | VM_CALL_FORWARDING);
4781 if (vm_cc_markable(cc)) {
4782 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4783 VM_CALL_METHOD_ATTR(v,
4784 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4785 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4791 VM_CALLCACHE_UNMARKABLE |
4792 VM_CALLCACHE_ON_STACK,
4798 .value = INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT,
4803 VM_CALL_METHOD_ATTR(v,
4804 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4805 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4809 case VM_METHOD_TYPE_IVAR:
4810 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4812 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4813 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING);
4814 VM_CALL_METHOD_ATTR(v,
4815 vm_call_ivar(ec, cfp, calling),
4816 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4819 case VM_METHOD_TYPE_MISSING:
4820 vm_cc_method_missing_reason_set(cc, 0);
4821 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4822 return vm_call_method_missing(ec, cfp, calling);
4824 case VM_METHOD_TYPE_BMETHOD:
4825 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4826 return vm_call_bmethod(ec, cfp, calling);
4828 case VM_METHOD_TYPE_ALIAS:
4829 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4830 return vm_call_alias(ec, cfp, calling);
4832 case VM_METHOD_TYPE_OPTIMIZED:
4833 return vm_call_optimized(ec, cfp, calling, ci, cc);
4835 case VM_METHOD_TYPE_UNDEF:
4838 case VM_METHOD_TYPE_ZSUPER:
4839 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4841 case VM_METHOD_TYPE_REFINED:
4844 return vm_call_refined(ec, cfp, calling);
4847 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4850NORETURN(
static void vm_raise_method_missing(rb_execution_context_t *ec,
int argc,
const VALUE *argv,
VALUE obj,
int call_status));
4853vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
4857 const int stat = ci_missing_reason(ci);
4859 if (vm_ci_mid(ci) == idMethodMissing) {
4860 if (UNLIKELY(calling->heap_argv)) {
4864 rb_control_frame_t *reg_cfp = cfp;
4865 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4866 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4870 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4880vm_defined_class_for_protected_call(
const rb_callable_method_entry_t *me)
4882 VALUE defined_class = me->defined_class;
4883 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4884 return NIL_P(refined_class) ? defined_class : refined_class;
4888vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
4893 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4895 if (vm_cc_cme(cc) != NULL) {
4896 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4897 case METHOD_VISI_PUBLIC:
4898 return vm_call_method_each_type(ec, cfp, calling);
4900 case METHOD_VISI_PRIVATE:
4901 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4902 enum method_missing_reason stat = MISSING_PRIVATE;
4903 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4905 vm_cc_method_missing_reason_set(cc, stat);
4906 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4907 return vm_call_method_missing(ec, cfp, calling);
4909 return vm_call_method_each_type(ec, cfp, calling);
4911 case METHOD_VISI_PROTECTED:
4912 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4913 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4915 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4916 return vm_call_method_missing(ec, cfp, calling);
4920 VM_ASSERT(vm_cc_cme(cc) != NULL);
4923 calling->cc = &cc_on_stack;
4924 return vm_call_method_each_type(ec, cfp, calling);
4927 return vm_call_method_each_type(ec, cfp, calling);
4930 rb_bug(
"unreachable");
4934 return vm_call_method_nome(ec, cfp, calling);
4939vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4941 RB_DEBUG_COUNTER_INC(ccf_general);
4942 return vm_call_method(ec, reg_cfp, calling);
4948 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4949 VM_ASSERT(cc != vm_cc_empty());
4951 *(vm_call_handler *)&cc->call_ = vm_call_general;
4955vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4957 RB_DEBUG_COUNTER_INC(ccf_super_method);
4962 if (ec == NULL) rb_bug(
"unreachable");
4965 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4966 return vm_call_method(ec, reg_cfp, calling);
4972vm_search_normal_superclass(
VALUE klass)
4977 klass =
RBASIC(klass)->klass;
4979 klass = RCLASS_ORIGIN(klass);
4983NORETURN(
static void vm_super_outside(
void));
4986vm_super_outside(
void)
4992empty_cc_for_super(
void)
4994 return &vm_empty_cc_for_super;
4998vm_search_super_method(
const rb_control_frame_t *reg_cfp,
struct rb_call_data *cd,
VALUE recv)
5000 VALUE current_defined_class;
5001 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(reg_cfp);
5007 current_defined_class = vm_defined_class_for_protected_call(me);
5010 reg_cfp->iseq != method_entry_iseqptr(me) &&
5013 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
5017 "self has wrong type to call super in this context: "
5018 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
5023 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
5025 "implicit argument passing of super from method defined"
5026 " by define_method() is not supported."
5027 " Specify all arguments explicitly.");
5030 ID mid = me->def->original_id;
5032 if (!vm_ci_markable(cd->ci)) {
5033 VM_FORCE_WRITE((
const VALUE *)&cd->ci->mid, (
VALUE)mid);
5037 cd->ci = vm_ci_new_runtime(mid,
5040 vm_ci_kwarg(cd->ci));
5047 VALUE klass = vm_search_normal_superclass(me->defined_class);
5051 cc = vm_cc_new(klass, NULL, vm_call_method_missing, cc_type_super);
5055 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
5056 const rb_callable_method_entry_t *cached_cme = vm_cc_cme(cc);
5059 if (cached_cme == NULL) {
5061 cd->cc = empty_cc_for_super();
5063 else if (cached_cme->called_id != mid) {
5064 const rb_callable_method_entry_t *cme = rb_callable_method_entry(klass, mid);
5066 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
5070 cd->cc = cc = empty_cc_for_super();
5074 switch (cached_cme->def->type) {
5076 case VM_METHOD_TYPE_REFINED:
5078 case VM_METHOD_TYPE_ATTRSET:
5079 case VM_METHOD_TYPE_IVAR:
5080 vm_cc_call_set(cc, vm_call_super_method);
5088 VM_ASSERT((vm_cc_cme(cc),
true));
5096block_proc_is_lambda(
const VALUE procval)
5101 GetProcPtr(procval, proc);
5102 return proc->is_lambda;
5110vm_yield_with_cfunc(rb_execution_context_t *ec,
5112 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
5113 const rb_callable_method_entry_t *me)
5115 int is_lambda = FALSE;
5116 VALUE val, arg, blockarg;
5118 const struct vm_ifunc *ifunc = captured->code.ifunc;
5123 else if (argc == 0) {
5130 blockarg = rb_vm_bh_to_procval(ec, block_handler);
5132 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
5134 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
5137 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
5140 VM_GUARDED_PREV_EP(captured->ep),
5142 0, ec->cfp->sp, 0, 0);
5143 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
5144 rb_vm_pop_frame(ec);
5150rb_vm_yield_with_cfunc(rb_execution_context_t *ec,
const struct rb_captured_block *captured,
int argc,
const VALUE *argv)
5152 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
5156vm_yield_with_symbol(rb_execution_context_t *ec,
VALUE symbol,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler)
5158 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
5162vm_callee_setup_block_arg_arg0_splat(rb_control_frame_t *cfp,
const rb_iseq_t *iseq,
VALUE *argv,
VALUE ary)
5167 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5169 for (i=0; i<
len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
5177vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
5179 VALUE ary, arg0 = argv[0];
5180 ary = rb_check_array_type(arg0);
5184 VM_ASSERT(argv[0] == arg0);
5190vm_callee_setup_block_arg(rb_execution_context_t *ec,
struct rb_calling_info *calling,
const struct rb_callinfo *ci,
const rb_iseq_t *iseq,
VALUE *argv,
const enum arg_setup_type arg_setup_type)
5192 if (rb_simple_iseq_p(iseq)) {
5193 rb_control_frame_t *cfp = ec->cfp;
5196 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
5198 if (arg_setup_type == arg_setup_block &&
5199 calling->argc == 1 &&
5200 ISEQ_BODY(iseq)->param.flags.has_lead &&
5201 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
5202 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
5203 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
5206 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
5207 if (arg_setup_type == arg_setup_block) {
5208 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
5210 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5211 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
5212 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5214 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
5215 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5219 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
5226 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
5231vm_yield_setup_args(rb_execution_context_t *ec,
const rb_iseq_t *iseq,
const int argc,
VALUE *argv,
int flags,
VALUE block_handler,
enum arg_setup_type arg_setup_type)
5235 calling = &calling_entry;
5236 calling->argc = argc;
5237 calling->block_handler = block_handler;
5238 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
5240 calling->heap_argv = 0;
5241 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
5243 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
5249vm_invoke_iseq_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
5251 bool is_lambda,
VALUE block_handler)
5254 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
5255 const int arg_size = ISEQ_BODY(iseq)->param.size;
5256 VALUE *
const rsp = GET_SP() - calling->argc;
5257 VALUE *
const argv = rsp;
5258 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
5262 vm_push_frame(ec, iseq,
5263 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
5265 VM_GUARDED_PREV_EP(captured->ep), 0,
5266 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
5268 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
5274vm_invoke_symbol_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
5276 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5278 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
5279 int flags = vm_ci_flag(ci);
5281 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
5282 ((calling->argc == 0) ||
5283 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
5284 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
5285 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
5286 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
5288 if (UNLIKELY(calling->heap_argv)) {
5289#if VM_ARGC_STACK_MAX < 0
5291 rb_raise(rb_eArgError,
"no receiver given");
5294 calling->recv = rb_ary_shift(calling->heap_argv);
5297 reg_cfp->sp[-1] = reg_cfp->sp[-2];
5298 reg_cfp->sp[-2] = calling->recv;
5299 flags |= VM_CALL_ARGS_SPLAT;
5302 if (calling->argc < 1) {
5303 rb_raise(rb_eArgError,
"no receiver given");
5305 calling->recv = TOPN(--calling->argc);
5307 if (calling->kw_splat) {
5308 flags |= VM_CALL_KW_SPLAT;
5312 if (calling->argc < 1) {
5313 rb_raise(rb_eArgError,
"no receiver given");
5315 calling->recv = TOPN(--calling->argc);
5318 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
5322vm_invoke_ifunc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
5324 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5329 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
5330 argc = calling->argc;
5331 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ?
RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
5337vm_proc_to_block_handler(
VALUE procval)
5339 const struct rb_block *block = vm_proc_block(procval);
5341 switch (vm_block_type(block)) {
5342 case block_type_iseq:
5343 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
5344 case block_type_ifunc:
5345 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
5346 case block_type_symbol:
5347 return VM_BH_FROM_SYMBOL(block->as.symbol);
5348 case block_type_proc:
5349 return VM_BH_FROM_PROC(block->as.proc);
5351 VM_UNREACHABLE(vm_yield_with_proc);
5356vm_invoke_proc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
5358 bool is_lambda,
VALUE block_handler)
5360 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5361 VALUE proc = VM_BH_TO_PROC(block_handler);
5362 is_lambda = block_proc_is_lambda(proc);
5363 block_handler = vm_proc_to_block_handler(proc);
5366 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5370vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
5372 bool is_lambda,
VALUE block_handler)
5374 VALUE (*func)(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
5376 bool is_lambda,
VALUE block_handler);
5378 switch (vm_block_handler_type(block_handler)) {
5379 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
5380 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
5381 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
5382 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
5383 default: rb_bug(
"vm_invoke_block: unreachable");
5386 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5390vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
5392 const rb_execution_context_t *ec = GET_EC();
5393 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
5397 rb_bug(
"vm_make_proc_with_iseq: unreachable");
5400 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5401 captured->code.iseq = blockiseq;
5403 return rb_vm_make_proc(ec, captured,
rb_cProc);
5407vm_once_exec(
VALUE iseq)
5409 VALUE proc = vm_make_proc_with_iseq((rb_iseq_t *)iseq);
5414vm_once_clear(
VALUE data)
5417 is->once.running_thread = NULL;
5429 args[0] = obj; args[1] =
Qfalse;
5431 if (!UNDEF_P(r) &&
RTEST(r)) {
5440vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type,
VALUE obj,
VALUE v)
5443 enum defined_type
type = (
enum defined_type)op_type;
5450 return rb_gvar_defined(
SYM2ID(obj));
5452 case DEFINED_CVAR: {
5453 const rb_cref_t *cref = vm_get_cref(GET_EP());
5454 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5459 case DEFINED_CONST_FROM: {
5460 bool allow_nil =
type == DEFINED_CONST;
5462 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
5467 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
5469 case DEFINED_METHOD:{
5471 const rb_method_entry_t *me = rb_method_entry_with_refinements(klass,
SYM2ID(obj), NULL);
5474 switch (METHOD_ENTRY_VISI(me)) {
5475 case METHOD_VISI_PRIVATE:
5477 case METHOD_VISI_PROTECTED:
5481 case METHOD_VISI_PUBLIC:
5485 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
5489 return check_respond_to_missing(obj, v);
5494 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5498 case DEFINED_ZSUPER:
5500 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(GET_CFP());
5503 VALUE klass = vm_search_normal_superclass(me->defined_class);
5504 if (!klass)
return false;
5506 ID id = me->def->original_id;
5513 return RTEST(vm_backref_defined(ec, GET_LEP(),
FIX2INT(obj)));
5515 rb_bug(
"unimplemented defined? type (VM)");
5523rb_vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type,
VALUE obj,
VALUE v)
5525 return vm_defined(ec, reg_cfp, op_type, obj, v);
5529vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
5532 const VALUE *ep = reg_ep;
5533 for (i = 0; i < lv; i++) {
5534 ep = GET_PREV_EP(ep);
5540vm_get_special_object(
const VALUE *
const reg_ep,
5541 enum vm_special_object_type
type)
5544 case VM_SPECIAL_OBJECT_VMCORE:
5545 return rb_mRubyVMFrozenCore;
5546 case VM_SPECIAL_OBJECT_CBASE:
5547 return vm_get_cbase(reg_ep);
5548 case VM_SPECIAL_OBJECT_CONST_BASE:
5549 return vm_get_const_base(reg_ep);
5551 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
5558 const VALUE ary2 = ary2st;
5559 VALUE tmp1 = rb_check_to_array(ary1);
5560 VALUE tmp2 = rb_check_to_array(ary2);
5566 tmp1 = rb_ary_dup(ary1);
5570 return rb_ary_push(tmp1, ary2);
5572 return rb_ary_concat(tmp1, tmp2);
5580 const VALUE ary2 = ary2st;
5581 VALUE tmp2 = rb_check_to_array(ary2);
5584 return rb_ary_push(ary1, ary2);
5586 return rb_ary_concat(ary1, tmp2);
5595 return vm_concat_array(ary1, ary2st);
5599rb_vm_concat_to_array(
VALUE ary1,
VALUE ary2st)
5601 return vm_concat_to_array(ary1, ary2st);
5607 VALUE tmp = rb_check_to_array(ary);
5611 else if (
RTEST(flag)) {
5612 return rb_ary_dup(tmp);
5624 return vm_splat_array(flag, ary);
5628vm_check_match(rb_execution_context_t *ec,
VALUE target,
VALUE pattern, rb_num_t flag)
5630 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5632 if (flag & VM_CHECKMATCH_ARRAY) {
5636 for (i = 0; i < n; i++) {
5638 VALUE c = check_match(ec, v, target,
type);
5647 return check_match(ec, pattern, target,
type);
5652rb_vm_check_match(rb_execution_context_t *ec,
VALUE target,
VALUE pattern, rb_num_t flag)
5654 return vm_check_match(ec, target, pattern, flag);
5658vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
5660 const VALUE kw_bits = *(ep - bits);
5663 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
5664 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5677 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5678 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5679 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5680 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5684 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5687 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5690 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5693 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5700vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
5705 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5706 return rb_public_const_get_at(cbase,
id);
5714vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
5719 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5724 "superclass mismatch for class %"PRIsVALUE
"",
5737vm_check_if_module(
ID id,
VALUE mod)
5756vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5759 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
5767vm_declare_module(
ID id,
VALUE cbase)
5773NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
5777 VALUE name = rb_id2str(
id);
5778 VALUE message = rb_sprintf(
"%"PRIsVALUE
" is not a %s",
5780 VALUE location = rb_const_source_location_at(cbase,
id);
5781 if (!
NIL_P(location)) {
5782 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
5783 " previous definition of %"PRIsVALUE
" was here",
5784 rb_ary_entry(location, 0), rb_ary_entry(location, 1), name);
5790vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5794 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
5796 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
5800 vm_check_if_namespace(cbase);
5804 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
5805 if (!vm_check_if_class(
id, flags, super, klass))
5806 unmatched_redefinition(
"class", cbase,
id, klass);
5810 return vm_declare_class(
id, flags, cbase, super);
5815vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
5819 vm_check_if_namespace(cbase);
5820 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
5821 if (!vm_check_if_module(
id, mod))
5822 unmatched_redefinition(
"module", cbase,
id, mod);
5826 return vm_declare_module(
id, cbase);
5831vm_find_or_create_class_by_id(
ID id,
5836 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
5839 case VM_DEFINECLASS_TYPE_CLASS:
5841 return vm_define_class(
id, flags, cbase, super);
5843 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5847 case VM_DEFINECLASS_TYPE_MODULE:
5849 return vm_define_module(
id, flags, cbase);
5852 rb_bug(
"unknown defineclass type: %d", (
int)
type);
5856static rb_method_visibility_t
5857vm_scope_visibility_get(
const rb_execution_context_t *ec)
5859 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
5861 if (!vm_env_cref_by_cref(cfp->ep)) {
5862 return METHOD_VISI_PUBLIC;
5865 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
5870vm_scope_module_func_check(
const rb_execution_context_t *ec)
5872 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
5874 if (!vm_env_cref_by_cref(cfp->ep)) {
5878 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
5883vm_define_method(
const rb_execution_context_t *ec,
VALUE obj,
ID id,
VALUE iseqval,
int is_singleton)
5886 rb_method_visibility_t visi;
5887 rb_cref_t *cref = vm_ec_cref(ec);
5891 visi = METHOD_VISI_PUBLIC;
5894 klass = CREF_CLASS_FOR_DEFINITION(cref);
5895 visi = vm_scope_visibility_get(ec);
5902 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
5906 RCLASS_EXT(klass)->max_iv_count = rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval);
5909 if (!is_singleton && vm_scope_module_func_check(ec)) {
5911 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5921 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5923 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5924 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
5927 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
5931enum method_explorer_type {
5933 mexp_search_invokeblock,
5942 VALUE block_handler,
5943 enum method_explorer_type method_explorer
5948 int argc = vm_ci_argc(ci);
5949 VALUE recv = TOPN(argc);
5951 .block_handler = block_handler,
5952 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5958 switch (method_explorer) {
5959 case mexp_search_method:
5960 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
5961 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5963 case mexp_search_super:
5964 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5965 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5967 case mexp_search_invokeblock:
5968 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
5975rb_vm_send(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd, ISEQ blockiseq)
5985 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
5986 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
false, &adjusted_cd, &adjusted_ci);
5988 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_method);
5990 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
5995 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
false);
5996 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6004rb_vm_opt_send_without_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd)
6007 VALUE bh = VM_BLOCK_HANDLER_NONE;
6008 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6014rb_vm_invokesuper(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd, ISEQ blockiseq)
6023 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
6024 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
true, &adjusted_cd, &adjusted_ci);
6026 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_super);
6028 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6033 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
true);
6034 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
6042rb_vm_invokeblock(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd)
6045 VALUE bh = VM_BLOCK_HANDLER_NONE;
6046 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
6063vm_objtostring(
const rb_iseq_t *iseq,
VALUE recv, CALL_DATA cd)
6074 if (check_method_basic_definition(vm_cc_cme(cc))) {
6083 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
6089 val = rb_mod_to_s(recv);
6095 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
6096 return rb_nil_to_s(recv);
6100 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
6101 return rb_true_to_s(recv);
6105 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
6106 return rb_false_to_s(recv);
6110 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
6111 return rb_fix_to_s(recv);
6119vm_opt_ary_freeze(
VALUE ary,
int bop,
ID id)
6121 if (BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6130vm_opt_hash_freeze(
VALUE hash,
int bop,
ID id)
6132 if (BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6141vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
6143 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6155vm_opt_duparray_include_p(rb_execution_context_t *ec,
const VALUE ary,
VALUE target)
6157 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6158 return rb_ary_includes(ary, target);
6161 VALUE args[1] = {target};
6164 RUBY_DTRACE_CREATE_HOOK(ARRAY,
RARRAY_LEN(ary));
6165 VALUE dupary = rb_ary_resurrect(ary);
6167 return rb_vm_call_with_refinements(ec, dupary, idIncludeP, 1, args,
RB_NO_KEYWORDS);
6172rb_vm_opt_duparray_include_p(rb_execution_context_t *ec,
const VALUE ary,
VALUE target)
6174 return vm_opt_duparray_include_p(ec, ary, target);
6178vm_opt_newarray_max(rb_execution_context_t *ec, rb_num_t num,
const VALUE *ptr)
6180 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
6185 VALUE result = *ptr;
6186 rb_snum_t i = num - 1;
6188 const VALUE v = *++ptr;
6189 if (OPTIMIZED_CMP(v, result) > 0) {
6202rb_vm_opt_newarray_max(rb_execution_context_t *ec, rb_num_t num,
const VALUE *ptr)
6204 return vm_opt_newarray_max(ec, num, ptr);
6208vm_opt_newarray_min(rb_execution_context_t *ec, rb_num_t num,
const VALUE *ptr)
6210 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
6215 VALUE result = *ptr;
6216 rb_snum_t i = num - 1;
6218 const VALUE v = *++ptr;
6219 if (OPTIMIZED_CMP(v, result) < 0) {
6232rb_vm_opt_newarray_min(rb_execution_context_t *ec, rb_num_t num,
const VALUE *ptr)
6234 return vm_opt_newarray_min(ec, num, ptr);
6238vm_opt_newarray_hash(rb_execution_context_t *ec, rb_num_t num,
const VALUE *ptr)
6241 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
6242 return rb_ary_hash_values(num, ptr);
6250rb_vm_opt_newarray_hash(rb_execution_context_t *ec, rb_num_t num,
const VALUE *ptr)
6252 return vm_opt_newarray_hash(ec, num, ptr);
6259vm_opt_newarray_include_p(rb_execution_context_t *ec, rb_num_t num,
const VALUE *ptr,
VALUE target)
6261 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6263 VALUE ary = rb_setup_fake_ary(&fake_ary, ptr, num);
6264 return rb_ary_includes(
ary, target);
6267 VALUE args[1] = {target};
6273rb_vm_opt_newarray_include_p(rb_execution_context_t *ec, rb_num_t num,
const VALUE *ptr,
VALUE target)
6275 return vm_opt_newarray_include_p(ec, num, ptr, target);
6279vm_opt_newarray_pack_buffer(rb_execution_context_t *ec, rb_num_t num,
const VALUE *ptr,
VALUE fmt,
VALUE buffer)
6281 if (BASIC_OP_UNREDEFINED_P(BOP_PACK, ARRAY_REDEFINED_OP_FLAG)) {
6283 VALUE ary = rb_setup_fake_ary(&fake_ary, ptr, num);
6284 return rb_ec_pack_ary(ec,
ary, fmt, (UNDEF_P(buffer) ?
Qnil : buffer));
6294 if (!UNDEF_P(buffer)) {
6295 args[1] = rb_hash_new_with_size(1);
6296 rb_hash_aset(args[1],
ID2SYM(idBuffer), buffer);
6301 return rb_vm_call_with_refinements(ec,
rb_ary_new4(num, ptr), idPack, argc, args, kw_splat);
6306rb_vm_opt_newarray_pack_buffer(rb_execution_context_t *ec, rb_num_t num,
const VALUE *ptr,
VALUE fmt,
VALUE buffer)
6308 return vm_opt_newarray_pack_buffer(ec, num, ptr, fmt, buffer);
6312rb_vm_opt_newarray_pack(rb_execution_context_t *ec, rb_num_t num,
const VALUE *ptr,
VALUE fmt)
6314 return vm_opt_newarray_pack_buffer(ec, num, ptr, fmt,
Qundef);
6320vm_track_constant_cache(
ID id,
void *ic)
6322 rb_vm_t *vm = GET_VM();
6323 struct rb_id_table *const_cache = vm->constant_cache;
6324 VALUE lookup_result;
6327 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
6328 ics = (st_table *)lookup_result;
6331 ics = st_init_numtable();
6332 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
6347 vm->inserting_constant_cache_id = id;
6349 st_insert(ics, (st_data_t) ic, (st_data_t)
Qtrue);
6351 vm->inserting_constant_cache_id = (
ID)0;
6355vm_ic_track_const_chain(rb_control_frame_t *cfp, IC ic,
const ID *segments)
6359 for (
int i = 0; segments[i]; i++) {
6360 ID id = segments[i];
6361 if (
id == idNULL)
continue;
6362 vm_track_constant_cache(
id, ic);
6370vm_inlined_ic_hit_p(
VALUE flags,
VALUE value,
const rb_cref_t *ic_cref,
const VALUE *reg_ep)
6372 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
6373 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
6375 return (ic_cref == NULL ||
6376 ic_cref == vm_get_cref(reg_ep));
6384 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
6385 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
6390rb_vm_ic_hit_p(IC ic,
const VALUE *reg_ep)
6392 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
6396vm_ic_update(
const rb_iseq_t *iseq, IC ic,
VALUE val,
const VALUE *reg_ep,
const VALUE *pc)
6398 if (ruby_vm_const_missing_count > 0) {
6399 ruby_vm_const_missing_count = 0;
6406 ice->ic_cref = vm_get_const_key_cref(reg_ep);
6411 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
6412 rb_yjit_constant_ic_update(iseq, ic, pos);
6413 rb_rjit_constant_ic_update(iseq, ic, pos);
6417rb_vm_opt_getconstant_path(rb_execution_context_t *ec, rb_control_frame_t *
const reg_cfp, IC ic)
6422 if (ice && vm_ic_hit_p(ice, GET_EP())) {
6425 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
6428 ruby_vm_constant_cache_misses++;
6429 val = vm_get_ev_const_chain(ec, segments);
6430 vm_ic_track_const_chain(GET_CFP(), ic, segments);
6433 vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
6439vm_once_dispatch(rb_execution_context_t *ec, ISEQ iseq, ISE is)
6441 rb_thread_t *th = rb_ec_thread_ptr(ec);
6442 rb_thread_t *
const RUNNING_THREAD_ONCE_DONE = (rb_thread_t *)(0x1);
6445 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
6446 return is->once.value;
6448 else if (is->once.running_thread == NULL) {
6450 is->once.running_thread = th;
6454 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
6457 else if (is->once.running_thread == th) {
6459 return vm_once_exec((
VALUE)iseq);
6463 RUBY_VM_CHECK_INTS(ec);
6470vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
6472 switch (OBJ_BUILTIN_TYPE(key)) {
6478 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
6479 SYMBOL_REDEFINED_OP_FLAG |
6480 INTEGER_REDEFINED_OP_FLAG |
6481 FLOAT_REDEFINED_OP_FLAG |
6482 NIL_REDEFINED_OP_FLAG |
6483 TRUE_REDEFINED_OP_FLAG |
6484 FALSE_REDEFINED_OP_FLAG |
6485 STRING_REDEFINED_OP_FLAG)) {
6489 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
6493 if (rb_hash_stlike_lookup(hash, key, &val)) {
6505 vm_stack_consistency_error(
const rb_execution_context_t *ec,
6506 const rb_control_frame_t *,
6509vm_stack_consistency_error(
const rb_execution_context_t *ec,
6510 const rb_control_frame_t *cfp,
6513 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
6514 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
6515 static const char stack_consistency_error[] =
6516 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
6517#if defined RUBY_DEVEL
6518 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
6523 rb_bug(stack_consistency_error, nsp, nbp);
6530 if (FIXNUM_2_P(recv, obj) &&
6531 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
6532 return rb_fix_plus_fix(recv, obj);
6534 else if (FLONUM_2_P(recv, obj) &&
6535 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6543 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6548 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6549 return rb_str_opt_plus(recv, obj);
6553 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6554 return rb_ary_plus(recv, obj);
6564 if (FIXNUM_2_P(recv, obj) &&
6565 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6566 return rb_fix_minus_fix(recv, obj);
6568 else if (FLONUM_2_P(recv, obj) &&
6569 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6577 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6588 if (FIXNUM_2_P(recv, obj) &&
6589 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6590 return rb_fix_mul_fix(recv, obj);
6592 else if (FLONUM_2_P(recv, obj) &&
6593 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6601 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6612 if (FIXNUM_2_P(recv, obj) &&
6613 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6614 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
6616 else if (FLONUM_2_P(recv, obj) &&
6617 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6618 return rb_flo_div_flo(recv, obj);
6625 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6626 return rb_flo_div_flo(recv, obj);
6636 if (FIXNUM_2_P(recv, obj) &&
6637 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6638 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
6640 else if (FLONUM_2_P(recv, obj) &&
6641 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6649 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6658vm_opt_neq(
const rb_iseq_t *iseq, CALL_DATA cd, CALL_DATA cd_eq,
VALUE recv,
VALUE obj)
6660 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
6661 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
6663 if (!UNDEF_P(val)) {
6664 return RBOOL(!
RTEST(val));
6674 if (FIXNUM_2_P(recv, obj) &&
6675 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6678 else if (FLONUM_2_P(recv, obj) &&
6679 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6687 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6699 if (FIXNUM_2_P(recv, obj) &&
6700 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6703 else if (FLONUM_2_P(recv, obj) &&
6704 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6712 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6724 if (FIXNUM_2_P(recv, obj) &&
6725 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6728 else if (FLONUM_2_P(recv, obj) &&
6729 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6737 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6749 if (FIXNUM_2_P(recv, obj) &&
6750 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6753 else if (FLONUM_2_P(recv, obj) &&
6754 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6762 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6779 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6788 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6789 return rb_ary_push(recv, obj);
6806 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
6817 if (FIXNUM_2_P(recv, obj) &&
6818 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
6830 if (FIXNUM_2_P(recv, obj) &&
6831 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
6832 return rb_fix_aref(recv, obj);
6837 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
6839 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
6842 return rb_ary_aref1(recv, obj);
6846 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
6847 return rb_hash_aref(recv, obj);
6861 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
6863 rb_ary_store(recv,
FIX2LONG(obj), set);
6867 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
6868 rb_hash_aset(recv, obj, set);
6880 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
6881 rb_hash_compare_by_id_p(recv) ==
Qfalse &&
6882 !
FL_TEST(recv, RHASH_PROC_DEFAULT)) {
6883 return rb_hash_aref(recv, key);
6893 return vm_opt_aref_with(recv, key);
6900 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
6901 rb_hash_compare_by_id_p(recv) ==
Qfalse) {
6902 return rb_hash_aset(recv, key, val);
6910vm_opt_length(
VALUE recv,
int bop)
6916 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6917 if (bop == BOP_EMPTY_P) {
6918 return LONG2NUM(RSTRING_LEN(recv));
6925 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6929 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6938vm_opt_empty_p(
VALUE recv)
6940 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
6950vm_opt_nil_p(
const rb_iseq_t *iseq, CALL_DATA cd,
VALUE recv)
6953 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
6956 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
6972 case RSHIFT(~0UL, 1):
6975 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
6993vm_opt_succ(
VALUE recv)
6996 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
6997 return fix_succ(recv);
7003 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
7012vm_opt_not(
const rb_iseq_t *iseq, CALL_DATA cd,
VALUE recv)
7014 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
7015 return RBOOL(!
RTEST(recv));
7030 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
7034 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
7042rb_event_flag_t rb_iseq_event_flags(
const rb_iseq_t *iseq,
size_t pos);
7044NOINLINE(
static void vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp));
7047vm_trace_hook(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
const VALUE *pc,
7049 rb_hook_list_t *global_hooks, rb_hook_list_t *
const *local_hooks_ptr,
VALUE val)
7052 VALUE self = GET_SELF();
7054 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
7056 if (event & global_hooks->events) {
7059 vm_dtrace(event, ec);
7060 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
7065 rb_hook_list_t *local_hooks = *local_hooks_ptr;
7066 if (local_hooks != NULL) {
7067 if (event & local_hooks->events) {
7070 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
7076#define VM_TRACE_HOOK(target_event, val) do { \
7077 if ((pc_events & (target_event)) & enabled_flags) { \
7078 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
7083rescue_errinfo(rb_execution_context_t *ec, rb_control_frame_t *cfp)
7085 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
7086 VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
7087 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
7091vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp)
7093 const VALUE *pc = reg_cfp->pc;
7094 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
7097 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
7101 const rb_iseq_t *iseq = reg_cfp->iseq;
7103 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
7105 rb_hook_list_t *local_hooks = iseq->aux.exec.local_hooks;
7106 rb_hook_list_t *
const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
7107 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
7108 rb_hook_list_t *bmethod_local_hooks = NULL;
7109 rb_hook_list_t **bmethod_local_hooks_ptr = NULL;
7111 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
7112 enabled_flags |= iseq_local_events;
7114 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
7116 if (bmethod_frame) {
7117 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(reg_cfp);
7118 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
7119 bmethod_local_hooks = me->def->body.bmethod.hooks;
7120 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
7121 if (bmethod_local_hooks) {
7122 bmethod_local_events = bmethod_local_hooks->events;
7127 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
7131 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
7139 else if (ec->trace_arg != NULL) {
7144 rb_hook_list_t *global_hooks = rb_ec_ractor_hooks(ec);
7147 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
7150 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
7153 RSTRING_PTR(rb_iseq_path(iseq)),
7154 (
int)rb_iseq_line_no(iseq, pos),
7155 RSTRING_PTR(rb_iseq_label(iseq)));
7157 VM_ASSERT(reg_cfp->pc == pc);
7158 VM_ASSERT(pc_events != 0);
7168 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
7169 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
7187#if VM_CHECK_MODE > 0
7188NORETURN( NOINLINE( COLDFUNC
7189void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
7192Init_vm_stack_canary(
void)
7195 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
7196 vm_stack_canary |= 0x01;
7198 vm_stack_canary_was_born =
true;
7203rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
7207 const char *insn = rb_insns_name(i);
7211 rb_bug(
"dead canary found at %s: %s", insn, str);
7215void Init_vm_stack_canary(
void) { }
7244builtin_invoker0(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
7246 typedef VALUE (*rb_invoke_funcptr0_t)(rb_execution_context_t *ec,
VALUE self);
7247 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
7251builtin_invoker1(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
7253 typedef VALUE (*rb_invoke_funcptr1_t)(rb_execution_context_t *ec,
VALUE self,
VALUE v1);
7254 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
7258builtin_invoker2(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
7260 typedef VALUE (*rb_invoke_funcptr2_t)(rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2);
7261 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
7265builtin_invoker3(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
7268 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
7272builtin_invoker4(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
7275 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
7279builtin_invoker5(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
7282 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
7286builtin_invoker6(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
7289 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
7293builtin_invoker7(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
7296 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
7300builtin_invoker8(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
7303 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
7307builtin_invoker9(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
7309 typedef VALUE (*rb_invoke_funcptr9_t)(rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
7310 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
7314builtin_invoker10(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
7316 typedef VALUE (*rb_invoke_funcptr10_t)(rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
7317 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
7321builtin_invoker11(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
7323 typedef VALUE (*rb_invoke_funcptr11_t)(rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
7324 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
7328builtin_invoker12(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
7330 typedef VALUE (*rb_invoke_funcptr12_t)(rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
7331 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
7335builtin_invoker13(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
7337 typedef VALUE (*rb_invoke_funcptr13_t)(rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
7338 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
7342builtin_invoker14(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
7344 typedef VALUE (*rb_invoke_funcptr14_t)(rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
7345 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
7349builtin_invoker15(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
7351 typedef VALUE (*rb_invoke_funcptr15_t)(rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
7352 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
7355typedef VALUE (*builtin_invoker)(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr);
7357static builtin_invoker
7358lookup_builtin_invoker(
int argc)
7360 static const builtin_invoker invokers[] = {
7379 return invokers[argc];
7383invoke_bf(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
const struct rb_builtin_function* bf,
const VALUE *argv)
7385 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF;
7386 SETUP_CANARY(canary_p);
7387 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
7388 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, func_ptr);
7389 CHECK_CANARY(canary_p, BIN(invokebuiltin));
7394vm_invoke_builtin(rb_execution_context_t *ec, rb_control_frame_t *cfp,
const struct rb_builtin_function* bf,
const VALUE *argv)
7396 return invoke_bf(ec, cfp, bf, argv);
7400vm_invoke_builtin_delegate(rb_execution_context_t *ec, rb_control_frame_t *cfp,
const struct rb_builtin_function *bf,
unsigned int start_index)
7403 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
7404 for (
int i=0; i<bf->argc; i++) {
7405 ruby_debug_printf(
":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
7407 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc,
7408 (
void *)(uintptr_t)bf->func_ptr);
7411 if (bf->argc == 0) {
7412 return invoke_bf(ec, cfp, bf, NULL);
7415 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
7416 return invoke_bf(ec, cfp, bf, argv);
7423rb_vm_lvar_exposed(rb_execution_context_t *ec,
int index)
7425 const rb_control_frame_t *cfp = ec->cfp;
7426 return cfp->ep[index];
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class#inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
#define REALLOC_N
Old name of RB_REALLOC_N.
#define ALLOC
Old name of RB_ALLOC.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
#define ruby_verbose
This variable controls whether the interpreter is in debug mode.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
int off
Offset inside of ptr.
int len
Length of the buffer.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_PASS_KEYWORDS
Pass keywords, final argument should be a hash of keywords.
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const VALUE ary[1]
Embedded elements.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.