Ruby 3.3.0p0 (2023-12-25 revision 5124f9ac7513eb590c37717337c430cb93caa151)
vm_insnhelper.c
1/**********************************************************************
2
3 vm_insnhelper.c - instruction helper functions.
4
5 $Author$
6
7 Copyright (C) 2007 Koichi Sasada
8
9**********************************************************************/
10
11#include "ruby/internal/config.h"
12
13#include <math.h>
14
15#include "constant.h"
16#include "debug_counter.h"
17#include "internal.h"
18#include "internal/class.h"
19#include "internal/compar.h"
20#include "internal/hash.h"
21#include "internal/numeric.h"
22#include "internal/proc.h"
23#include "internal/random.h"
24#include "internal/variable.h"
25#include "internal/struct.h"
26#include "variable.h"
27
28/* finish iseq array */
29#include "insns.inc"
30#include "insns_info.inc"
31
32extern rb_method_definition_t *rb_method_definition_create(rb_method_type_t type, ID mid);
33extern void rb_method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *def, void *opts);
34extern int rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_definition_t *d2);
35extern VALUE rb_make_no_method_exception(VALUE exc, VALUE format, VALUE obj,
36 int argc, const VALUE *argv, int priv);
37
38static const struct rb_callcache vm_empty_cc;
39static const struct rb_callcache vm_empty_cc_for_super;
40
41/* control stack frame */
42
43static rb_control_frame_t *vm_get_ruby_level_caller_cfp(const rb_execution_context_t *ec, const rb_control_frame_t *cfp);
44
46ruby_vm_special_exception_copy(VALUE exc)
47{
49 rb_obj_copy_ivar(e, exc);
50 return e;
51}
52
53NORETURN(static void ec_stack_overflow(rb_execution_context_t *ec, int));
54static void
55ec_stack_overflow(rb_execution_context_t *ec, int setup)
56{
57 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
58 ec->raised_flag = RAISED_STACKOVERFLOW;
59 if (setup) {
60 VALUE at = rb_ec_backtrace_object(ec);
61 mesg = ruby_vm_special_exception_copy(mesg);
62 rb_ivar_set(mesg, idBt, at);
63 rb_ivar_set(mesg, idBt_locations, at);
64 }
65 ec->errinfo = mesg;
66 EC_JUMP_TAG(ec, TAG_RAISE);
67}
68
69NORETURN(static void vm_stackoverflow(void));
70
71static void
72vm_stackoverflow(void)
73{
74 ec_stack_overflow(GET_EC(), TRUE);
75}
76
77NORETURN(void rb_ec_stack_overflow(rb_execution_context_t *ec, int crit));
78void
79rb_ec_stack_overflow(rb_execution_context_t *ec, int crit)
80{
81 if (rb_during_gc()) {
82 rb_bug("system stack overflow during GC. Faulty native extension?");
83 }
84 if (crit) {
85 ec->raised_flag = RAISED_STACKOVERFLOW;
86 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
87 EC_JUMP_TAG(ec, TAG_RAISE);
88 }
89#ifdef USE_SIGALTSTACK
90 ec_stack_overflow(ec, TRUE);
91#else
92 ec_stack_overflow(ec, FALSE);
93#endif
94}
95
96static inline void stack_check(rb_execution_context_t *ec);
97
98#if VM_CHECK_MODE > 0
99static int
100callable_class_p(VALUE klass)
101{
102#if VM_CHECK_MODE >= 2
103 if (!klass) return FALSE;
104 switch (RB_BUILTIN_TYPE(klass)) {
105 default:
106 break;
107 case T_ICLASS:
108 if (!RB_TYPE_P(RCLASS_SUPER(klass), T_MODULE)) break;
109 case T_MODULE:
110 return TRUE;
111 }
112 while (klass) {
113 if (klass == rb_cBasicObject) {
114 return TRUE;
115 }
116 klass = RCLASS_SUPER(klass);
117 }
118 return FALSE;
119#else
120 return klass != 0;
121#endif
122}
123
124static int
125callable_method_entry_p(const rb_callable_method_entry_t *cme)
126{
127 if (cme == NULL) {
128 return TRUE;
129 }
130 else {
131 VM_ASSERT(IMEMO_TYPE_P((VALUE)cme, imemo_ment));
132
133 if (callable_class_p(cme->defined_class)) {
134 return TRUE;
135 }
136 else {
137 return FALSE;
138 }
139 }
140}
141
142static void
143vm_check_frame_detail(VALUE type, int req_block, int req_me, int req_cref, VALUE specval, VALUE cref_or_me, int is_cframe, const rb_iseq_t *iseq)
144{
145 unsigned int magic = (unsigned int)(type & VM_FRAME_MAGIC_MASK);
146 enum imemo_type cref_or_me_type = imemo_env; /* impossible value */
147
148 if (RB_TYPE_P(cref_or_me, T_IMEMO)) {
149 cref_or_me_type = imemo_type(cref_or_me);
150 }
151 if (type & VM_FRAME_FLAG_BMETHOD) {
152 req_me = TRUE;
153 }
154
155 if (req_block && (type & VM_ENV_FLAG_LOCAL) == 0) {
156 rb_bug("vm_push_frame: specval (%p) should be a block_ptr on %x frame", (void *)specval, magic);
157 }
158 if (!req_block && (type & VM_ENV_FLAG_LOCAL) != 0) {
159 rb_bug("vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (void *)specval, magic);
160 }
161
162 if (req_me) {
163 if (cref_or_me_type != imemo_ment) {
164 rb_bug("vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
165 }
166 }
167 else {
168 if (req_cref && cref_or_me_type != imemo_cref) {
169 rb_bug("vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
170 }
171 else { /* cref or Qfalse */
172 if (cref_or_me != Qfalse && cref_or_me_type != imemo_cref) {
173 if (((type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
174 /* ignore */
175 }
176 else {
177 rb_bug("vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
178 }
179 }
180 }
181 }
182
183 if (cref_or_me_type == imemo_ment) {
184 const rb_callable_method_entry_t *me = (const rb_callable_method_entry_t *)cref_or_me;
185
186 if (!callable_method_entry_p(me)) {
187 rb_bug("vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
188 }
189 }
190
191 if ((type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
192 VM_ASSERT(iseq == NULL ||
193 RBASIC_CLASS((VALUE)iseq) == 0 || // dummy frame for loading
194 RUBY_VM_NORMAL_ISEQ_P(iseq) //argument error
195 );
196 }
197 else {
198 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
199 }
200}
201
202static void
203vm_check_frame(VALUE type,
204 VALUE specval,
205 VALUE cref_or_me,
206 const rb_iseq_t *iseq)
207{
208 VALUE given_magic = type & VM_FRAME_MAGIC_MASK;
209 VM_ASSERT(FIXNUM_P(type));
210
211#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
212 case magic: \
213 vm_check_frame_detail(type, req_block, req_me, req_cref, \
214 specval, cref_or_me, is_cframe, iseq); \
215 break
216 switch (given_magic) {
217 /* BLK ME CREF CFRAME */
218 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
219 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
220 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
221 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
222 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
224 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
225 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
226 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
227 default:
228 rb_bug("vm_push_frame: unknown type (%x)", (unsigned int)given_magic);
229 }
230#undef CHECK
231}
232
233static VALUE vm_stack_canary; /* Initialized later */
234static bool vm_stack_canary_was_born = false;
235
236void
237rb_vm_check_canary(const rb_execution_context_t *ec, VALUE *sp)
238{
239 const struct rb_control_frame_struct *reg_cfp = ec->cfp;
240 const struct rb_iseq_struct *iseq;
241
242 if (! LIKELY(vm_stack_canary_was_born)) {
243 return; /* :FIXME: isn't it rather fatal to enter this branch? */
244 }
245 else if ((VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
246 /* This is at the very beginning of a thread. cfp does not exist. */
247 return;
248 }
249 else if (! (iseq = GET_ISEQ())) {
250 return;
251 }
252 else if (LIKELY(sp[0] != vm_stack_canary)) {
253 return;
254 }
255 else {
256 /* we are going to call methods below; squash the canary to
257 * prevent infinite loop. */
258 sp[0] = Qundef;
259 }
260
261 const VALUE *orig = rb_iseq_original_iseq(iseq);
262 const VALUE *encoded = ISEQ_BODY(iseq)->iseq_encoded;
263 const ptrdiff_t pos = GET_PC() - encoded;
264 const enum ruby_vminsn_type insn = (enum ruby_vminsn_type)orig[pos];
265 const char *name = insn_name(insn);
266 const VALUE iseqw = rb_iseqw_new(iseq);
267 const VALUE inspection = rb_inspect(iseqw);
268 const char *stri = rb_str_to_cstr(inspection);
269 const VALUE disasm = rb_iseq_disasm(iseq);
270 const char *strd = rb_str_to_cstr(disasm);
271
272 /* rb_bug() is not capable of outputting this large contents. It
273 is designed to run form a SIGSEGV handler, which tends to be
274 very restricted. */
275 ruby_debug_printf(
276 "We are killing the stack canary set by %s, "
277 "at %s@pc=%"PRIdPTR"\n"
278 "watch out the C stack trace.\n"
279 "%s",
280 name, stri, pos, strd);
281 rb_bug("see above.");
282}
283#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
284
285#else
286#define vm_check_canary(ec, sp)
287#define vm_check_frame(a, b, c, d)
288#endif /* VM_CHECK_MODE > 0 */
289
290#if USE_DEBUG_COUNTER
291static void
292vm_push_frame_debug_counter_inc(
293 const struct rb_execution_context_struct *ec,
294 const struct rb_control_frame_struct *reg_cfp,
295 VALUE type)
296{
297 const struct rb_control_frame_struct *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(reg_cfp);
298
299 RB_DEBUG_COUNTER_INC(frame_push);
300
301 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
302 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
303 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
304 if (prev) {
305 if (curr) {
306 RB_DEBUG_COUNTER_INC(frame_R2R);
307 }
308 else {
309 RB_DEBUG_COUNTER_INC(frame_R2C);
310 }
311 }
312 else {
313 if (curr) {
314 RB_DEBUG_COUNTER_INC(frame_C2R);
315 }
316 else {
317 RB_DEBUG_COUNTER_INC(frame_C2C);
318 }
319 }
320 }
321
322 switch (type & VM_FRAME_MAGIC_MASK) {
323 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method); return;
324 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block); return;
325 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class); return;
326 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top); return;
327 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc); return;
328 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc); return;
329 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval); return;
330 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue); return;
331 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy); return;
332 }
333
334 rb_bug("unreachable");
335}
336#else
337#define vm_push_frame_debug_counter_inc(ec, cfp, t) /* void */
338#endif
339
340STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
341STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
342STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
343
344static void
345vm_push_frame(rb_execution_context_t *ec,
346 const rb_iseq_t *iseq,
347 VALUE type,
348 VALUE self,
349 VALUE specval,
350 VALUE cref_or_me,
351 const VALUE *pc,
352 VALUE *sp,
353 int local_size,
354 int stack_max)
355{
356 rb_control_frame_t *const cfp = RUBY_VM_NEXT_CONTROL_FRAME(ec->cfp);
357
358 vm_check_frame(type, specval, cref_or_me, iseq);
359 VM_ASSERT(local_size >= 0);
360
361 /* check stack overflow */
362 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
363 vm_check_canary(ec, sp);
364
365 /* setup vm value stack */
366
367 /* initialize local variables */
368 for (int i=0; i < local_size; i++) {
369 *sp++ = Qnil;
370 }
371
372 /* setup ep with managing data */
373 *sp++ = cref_or_me; /* ep[-2] / Qnil or T_IMEMO(cref) or T_IMEMO(ment) */
374 *sp++ = specval /* ep[-1] / block handler or prev env ptr */;
375 *sp++ = type; /* ep[-0] / ENV_FLAGS */
376
377 /* setup new frame */
378 *cfp = (const struct rb_control_frame_struct) {
379 .pc = pc,
380 .sp = sp,
381 .iseq = iseq,
382 .self = self,
383 .ep = sp - 1,
384 .block_code = NULL,
385#if VM_DEBUG_BP_CHECK
386 .bp_check = sp,
387#endif
388 .jit_return = NULL
389 };
390
391 ec->cfp = cfp;
392
393 if (VMDEBUG == 2) {
394 SDR();
395 }
396 vm_push_frame_debug_counter_inc(ec, cfp, type);
397}
398
399void
400rb_vm_pop_frame_no_int(rb_execution_context_t *ec)
401{
402 rb_control_frame_t *cfp = ec->cfp;
403
404 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
405 if (VMDEBUG == 2) SDR();
406
407 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
408}
409
410/* return TRUE if the frame is finished */
411static inline int
412vm_pop_frame(rb_execution_context_t *ec, rb_control_frame_t *cfp, const VALUE *ep)
413{
414 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
415
416 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
417 if (VMDEBUG == 2) SDR();
418
419 RUBY_VM_CHECK_INTS(ec);
420 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
421
422 return flags & VM_FRAME_FLAG_FINISH;
423}
424
425void
426rb_vm_pop_frame(rb_execution_context_t *ec)
427{
428 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
429}
430
431// it pushes pseudo-frame with fname filename.
432VALUE
433rb_vm_push_frame_fname(rb_execution_context_t *ec, VALUE fname)
434{
435 VALUE tmpbuf = rb_imemo_tmpbuf_auto_free_pointer();
436 void *ptr = ruby_xcalloc(sizeof(struct rb_iseq_constant_body) + sizeof(struct rb_iseq_struct), 1);
437 rb_imemo_tmpbuf_set_ptr(tmpbuf, ptr);
438
439 struct rb_iseq_struct *dmy_iseq = (struct rb_iseq_struct *)ptr;
440 struct rb_iseq_constant_body *dmy_body = (struct rb_iseq_constant_body *)&dmy_iseq[1];
441 dmy_iseq->body = dmy_body;
442 dmy_body->type = ISEQ_TYPE_TOP;
443 dmy_body->location.pathobj = fname;
444
445 vm_push_frame(ec,
446 dmy_iseq, //const rb_iseq_t *iseq,
447 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH, // VALUE type,
448 ec->cfp->self, // VALUE self,
449 VM_BLOCK_HANDLER_NONE, // VALUE specval,
450 Qfalse, // VALUE cref_or_me,
451 NULL, // const VALUE *pc,
452 ec->cfp->sp, // VALUE *sp,
453 0, // int local_size,
454 0); // int stack_max
455
456 return tmpbuf;
457}
458
459/* method dispatch */
460static inline VALUE
461rb_arity_error_new(int argc, int min, int max)
462{
463 VALUE err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d", argc, min);
464 if (min == max) {
465 /* max is not needed */
466 }
467 else if (max == UNLIMITED_ARGUMENTS) {
468 rb_str_cat_cstr(err_mess, "+");
469 }
470 else {
471 rb_str_catf(err_mess, "..%d", max);
472 }
473 rb_str_cat_cstr(err_mess, ")");
474 return rb_exc_new3(rb_eArgError, err_mess);
475}
476
477void
478rb_error_arity(int argc, int min, int max)
479{
480 rb_exc_raise(rb_arity_error_new(argc, min, max));
481}
482
483/* lvar */
484
485NOINLINE(static void vm_env_write_slowpath(const VALUE *ep, int index, VALUE v));
486
487static void
488vm_env_write_slowpath(const VALUE *ep, int index, VALUE v)
489{
490 /* remember env value forcely */
491 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
492 VM_FORCE_WRITE(&ep[index], v);
493 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
494 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
495}
496
497static inline void
498vm_env_write(const VALUE *ep, int index, VALUE v)
499{
500 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
501 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
502 VM_STACK_ENV_WRITE(ep, index, v);
503 }
504 else {
505 vm_env_write_slowpath(ep, index, v);
506 }
507}
508
509void
510rb_vm_env_write(const VALUE *ep, int index, VALUE v)
511{
512 vm_env_write(ep, index, v);
513}
514
515VALUE
516rb_vm_bh_to_procval(const rb_execution_context_t *ec, VALUE block_handler)
517{
518 if (block_handler == VM_BLOCK_HANDLER_NONE) {
519 return Qnil;
520 }
521 else {
522 switch (vm_block_handler_type(block_handler)) {
523 case block_handler_type_iseq:
524 case block_handler_type_ifunc:
525 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler), rb_cProc);
526 case block_handler_type_symbol:
527 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
528 case block_handler_type_proc:
529 return VM_BH_TO_PROC(block_handler);
530 default:
531 VM_UNREACHABLE(rb_vm_bh_to_procval);
532 }
533 }
534}
535
536/* svar */
537
538#if VM_CHECK_MODE > 0
539static int
540vm_svar_valid_p(VALUE svar)
541{
542 if (RB_TYPE_P((VALUE)svar, T_IMEMO)) {
543 switch (imemo_type(svar)) {
544 case imemo_svar:
545 case imemo_cref:
546 case imemo_ment:
547 return TRUE;
548 default:
549 break;
550 }
551 }
552 rb_bug("vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
553 return FALSE;
554}
555#endif
556
557static inline struct vm_svar *
558lep_svar(const rb_execution_context_t *ec, const VALUE *lep)
559{
560 VALUE svar;
561
562 if (lep && (ec == NULL || ec->root_lep != lep)) {
563 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
564 }
565 else {
566 svar = ec->root_svar;
567 }
568
569 VM_ASSERT(svar == Qfalse || vm_svar_valid_p(svar));
570
571 return (struct vm_svar *)svar;
572}
573
574static inline void
575lep_svar_write(const rb_execution_context_t *ec, const VALUE *lep, const struct vm_svar *svar)
576{
577 VM_ASSERT(vm_svar_valid_p((VALUE)svar));
578
579 if (lep && (ec == NULL || ec->root_lep != lep)) {
580 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (VALUE)svar);
581 }
582 else {
583 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
584 }
585}
586
587static VALUE
588lep_svar_get(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key)
589{
590 const struct vm_svar *svar = lep_svar(ec, lep);
591
592 if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) return Qnil;
593
594 switch (key) {
595 case VM_SVAR_LASTLINE:
596 return svar->lastline;
597 case VM_SVAR_BACKREF:
598 return svar->backref;
599 default: {
600 const VALUE ary = svar->others;
601
602 if (NIL_P(ary)) {
603 return Qnil;
604 }
605 else {
606 return rb_ary_entry(ary, key - VM_SVAR_EXTRA_START);
607 }
608 }
609 }
610}
611
612static struct vm_svar *
613svar_new(VALUE obj)
614{
615 return (struct vm_svar *)rb_imemo_new(imemo_svar, Qnil, Qnil, Qnil, obj);
616}
617
618static void
619lep_svar_set(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, VALUE val)
620{
621 struct vm_svar *svar = lep_svar(ec, lep);
622
623 if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) {
624 lep_svar_write(ec, lep, svar = svar_new((VALUE)svar));
625 }
626
627 switch (key) {
628 case VM_SVAR_LASTLINE:
629 RB_OBJ_WRITE(svar, &svar->lastline, val);
630 return;
631 case VM_SVAR_BACKREF:
632 RB_OBJ_WRITE(svar, &svar->backref, val);
633 return;
634 default: {
635 VALUE ary = svar->others;
636
637 if (NIL_P(ary)) {
638 RB_OBJ_WRITE(svar, &svar->others, ary = rb_ary_new());
639 }
640 rb_ary_store(ary, key - VM_SVAR_EXTRA_START, val);
641 }
642 }
643}
644
645static inline VALUE
646vm_getspecial(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, rb_num_t type)
647{
648 VALUE val;
649
650 if (type == 0) {
651 val = lep_svar_get(ec, lep, key);
652 }
653 else {
654 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
655
656 if (type & 0x01) {
657 switch (type >> 1) {
658 case '&':
659 val = rb_reg_last_match(backref);
660 break;
661 case '`':
662 val = rb_reg_match_pre(backref);
663 break;
664 case '\'':
665 val = rb_reg_match_post(backref);
666 break;
667 case '+':
668 val = rb_reg_match_last(backref);
669 break;
670 default:
671 rb_bug("unexpected back-ref");
672 }
673 }
674 else {
675 val = rb_reg_nth_match((int)(type >> 1), backref);
676 }
677 }
678 return val;
679}
680
681static inline VALUE
682vm_backref_defined(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t type)
683{
684 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
685 int nth = 0;
686
687 if (type & 0x01) {
688 switch (type >> 1) {
689 case '&':
690 case '`':
691 case '\'':
692 break;
693 case '+':
694 return rb_reg_last_defined(backref);
695 default:
696 rb_bug("unexpected back-ref");
697 }
698 }
699 else {
700 nth = (int)(type >> 1);
701 }
702 return rb_reg_nth_defined(nth, backref);
703}
704
705PUREFUNC(static rb_callable_method_entry_t *check_method_entry(VALUE obj, int can_be_svar));
707check_method_entry(VALUE obj, int can_be_svar)
708{
709 if (obj == Qfalse) return NULL;
710
711#if VM_CHECK_MODE > 0
712 if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_method_entry: unknown type: %s", rb_obj_info(obj));
713#endif
714
715 switch (imemo_type(obj)) {
716 case imemo_ment:
717 return (rb_callable_method_entry_t *)obj;
718 case imemo_cref:
719 return NULL;
720 case imemo_svar:
721 if (can_be_svar) {
722 return check_method_entry(((struct vm_svar *)obj)->cref_or_me, FALSE);
723 }
724 default:
725#if VM_CHECK_MODE > 0
726 rb_bug("check_method_entry: svar should not be there:");
727#endif
728 return NULL;
729 }
730}
731
733rb_vm_frame_method_entry(const rb_control_frame_t *cfp)
734{
735 const VALUE *ep = cfp->ep;
737
738 while (!VM_ENV_LOCAL_P(ep)) {
739 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return me;
740 ep = VM_ENV_PREV_EP(ep);
741 }
742
743 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
744}
745
746static const rb_iseq_t *
747method_entry_iseqptr(const rb_callable_method_entry_t *me)
748{
749 switch (me->def->type) {
750 case VM_METHOD_TYPE_ISEQ:
751 return me->def->body.iseq.iseqptr;
752 default:
753 return NULL;
754 }
755}
756
757static rb_cref_t *
758method_entry_cref(const rb_callable_method_entry_t *me)
759{
760 switch (me->def->type) {
761 case VM_METHOD_TYPE_ISEQ:
762 return me->def->body.iseq.cref;
763 default:
764 return NULL;
765 }
766}
767
768#if VM_CHECK_MODE == 0
769PUREFUNC(static rb_cref_t *check_cref(VALUE, int));
770#endif
771static rb_cref_t *
772check_cref(VALUE obj, int can_be_svar)
773{
774 if (obj == Qfalse) return NULL;
775
776#if VM_CHECK_MODE > 0
777 if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_cref: unknown type: %s", rb_obj_info(obj));
778#endif
779
780 switch (imemo_type(obj)) {
781 case imemo_ment:
782 return method_entry_cref((rb_callable_method_entry_t *)obj);
783 case imemo_cref:
784 return (rb_cref_t *)obj;
785 case imemo_svar:
786 if (can_be_svar) {
787 return check_cref(((struct vm_svar *)obj)->cref_or_me, FALSE);
788 }
789 default:
790#if VM_CHECK_MODE > 0
791 rb_bug("check_method_entry: svar should not be there:");
792#endif
793 return NULL;
794 }
795}
796
797static inline rb_cref_t *
798vm_env_cref(const VALUE *ep)
799{
800 rb_cref_t *cref;
801
802 while (!VM_ENV_LOCAL_P(ep)) {
803 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return cref;
804 ep = VM_ENV_PREV_EP(ep);
805 }
806
807 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
808}
809
810static int
811is_cref(const VALUE v, int can_be_svar)
812{
813 if (RB_TYPE_P(v, T_IMEMO)) {
814 switch (imemo_type(v)) {
815 case imemo_cref:
816 return TRUE;
817 case imemo_svar:
818 if (can_be_svar) return is_cref(((struct vm_svar *)v)->cref_or_me, FALSE);
819 default:
820 break;
821 }
822 }
823 return FALSE;
824}
825
826static int
827vm_env_cref_by_cref(const VALUE *ep)
828{
829 while (!VM_ENV_LOCAL_P(ep)) {
830 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) return TRUE;
831 ep = VM_ENV_PREV_EP(ep);
832 }
833 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
834}
835
836static rb_cref_t *
837cref_replace_with_duplicated_cref_each_frame(const VALUE *vptr, int can_be_svar, VALUE parent)
838{
839 const VALUE v = *vptr;
840 rb_cref_t *cref, *new_cref;
841
842 if (RB_TYPE_P(v, T_IMEMO)) {
843 switch (imemo_type(v)) {
844 case imemo_cref:
845 cref = (rb_cref_t *)v;
846 new_cref = vm_cref_dup(cref);
847 if (parent) {
848 RB_OBJ_WRITE(parent, vptr, new_cref);
849 }
850 else {
851 VM_FORCE_WRITE(vptr, (VALUE)new_cref);
852 }
853 return (rb_cref_t *)new_cref;
854 case imemo_svar:
855 if (can_be_svar) {
856 return cref_replace_with_duplicated_cref_each_frame(&((struct vm_svar *)v)->cref_or_me, FALSE, v);
857 }
858 /* fall through */
859 case imemo_ment:
860 rb_bug("cref_replace_with_duplicated_cref_each_frame: unreachable");
861 default:
862 break;
863 }
864 }
865 return NULL;
866}
867
868static rb_cref_t *
869vm_cref_replace_with_duplicated_cref(const VALUE *ep)
870{
871 if (vm_env_cref_by_cref(ep)) {
872 rb_cref_t *cref;
873 VALUE envval;
874
875 while (!VM_ENV_LOCAL_P(ep)) {
876 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
877 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
878 return cref;
879 }
880 ep = VM_ENV_PREV_EP(ep);
881 }
882 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
883 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
884 }
885 else {
886 rb_bug("vm_cref_dup: unreachable");
887 }
888}
889
890static rb_cref_t *
891vm_get_cref(const VALUE *ep)
892{
893 rb_cref_t *cref = vm_env_cref(ep);
894
895 if (cref != NULL) {
896 return cref;
897 }
898 else {
899 rb_bug("vm_get_cref: unreachable");
900 }
901}
902
903rb_cref_t *
904rb_vm_get_cref(const VALUE *ep)
905{
906 return vm_get_cref(ep);
907}
908
909static rb_cref_t *
910vm_ec_cref(const rb_execution_context_t *ec)
911{
912 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
913
914 if (cfp == NULL) {
915 return NULL;
916 }
917 return vm_get_cref(cfp->ep);
918}
919
920static const rb_cref_t *
921vm_get_const_key_cref(const VALUE *ep)
922{
923 const rb_cref_t *cref = vm_get_cref(ep);
924 const rb_cref_t *key_cref = cref;
925
926 while (cref) {
927 if (FL_TEST(CREF_CLASS(cref), FL_SINGLETON) ||
928 RCLASS_EXT(CREF_CLASS(cref))->cloned) {
929 return key_cref;
930 }
931 cref = CREF_NEXT(cref);
932 }
933
934 /* does not include singleton class */
935 return NULL;
936}
937
938void
939rb_vm_rewrite_cref(rb_cref_t *cref, VALUE old_klass, VALUE new_klass, rb_cref_t **new_cref_ptr)
940{
941 rb_cref_t *new_cref;
942
943 while (cref) {
944 if (CREF_CLASS(cref) == old_klass) {
945 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
946 *new_cref_ptr = new_cref;
947 return;
948 }
949 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
950 cref = CREF_NEXT(cref);
951 *new_cref_ptr = new_cref;
952 new_cref_ptr = &new_cref->next;
953 }
954 *new_cref_ptr = NULL;
955}
956
957static rb_cref_t *
958vm_cref_push(const rb_execution_context_t *ec, VALUE klass, const VALUE *ep, int pushed_by_eval, int singleton)
959{
960 rb_cref_t *prev_cref = NULL;
961
962 if (ep) {
963 prev_cref = vm_env_cref(ep);
964 }
965 else {
966 rb_control_frame_t *cfp = vm_get_ruby_level_caller_cfp(ec, ec->cfp);
967
968 if (cfp) {
969 prev_cref = vm_env_cref(cfp->ep);
970 }
971 }
972
973 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
974}
975
976static inline VALUE
977vm_get_cbase(const VALUE *ep)
978{
979 const rb_cref_t *cref = vm_get_cref(ep);
980
981 return CREF_CLASS_FOR_DEFINITION(cref);
982}
983
984static inline VALUE
985vm_get_const_base(const VALUE *ep)
986{
987 const rb_cref_t *cref = vm_get_cref(ep);
988
989 while (cref) {
990 if (!CREF_PUSHED_BY_EVAL(cref)) {
991 return CREF_CLASS_FOR_DEFINITION(cref);
992 }
993 cref = CREF_NEXT(cref);
994 }
995
996 return Qundef;
997}
998
999static inline void
1000vm_check_if_namespace(VALUE klass)
1001{
1002 if (!RB_TYPE_P(klass, T_CLASS) && !RB_TYPE_P(klass, T_MODULE)) {
1003 rb_raise(rb_eTypeError, "%+"PRIsVALUE" is not a class/module", klass);
1004 }
1005}
1006
1007static inline void
1008vm_ensure_not_refinement_module(VALUE self)
1009{
1010 if (RB_TYPE_P(self, T_MODULE) && FL_TEST(self, RMODULE_IS_REFINEMENT)) {
1011 rb_warn("not defined at the refinement, but at the outer class/module");
1012 }
1013}
1014
1015static inline VALUE
1016vm_get_iclass(const rb_control_frame_t *cfp, VALUE klass)
1017{
1018 return klass;
1019}
1020
1021static inline VALUE
1022vm_get_ev_const(rb_execution_context_t *ec, VALUE orig_klass, ID id, bool allow_nil, int is_defined)
1023{
1024 void rb_const_warn_if_deprecated(const rb_const_entry_t *ce, VALUE klass, ID id);
1025 VALUE val;
1026
1027 if (NIL_P(orig_klass) && allow_nil) {
1028 /* in current lexical scope */
1029 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1030 const rb_cref_t *cref;
1031 VALUE klass = Qnil;
1032
1033 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1034 root_cref = CREF_NEXT(root_cref);
1035 }
1036 cref = root_cref;
1037 while (cref && CREF_NEXT(cref)) {
1038 if (CREF_PUSHED_BY_EVAL(cref)) {
1039 klass = Qnil;
1040 }
1041 else {
1042 klass = CREF_CLASS(cref);
1043 }
1044 cref = CREF_NEXT(cref);
1045
1046 if (!NIL_P(klass)) {
1047 VALUE av, am = 0;
1048 rb_const_entry_t *ce;
1049 search_continue:
1050 if ((ce = rb_const_lookup(klass, id))) {
1051 rb_const_warn_if_deprecated(ce, klass, id);
1052 val = ce->value;
1053 if (UNDEF_P(val)) {
1054 if (am == klass) break;
1055 am = klass;
1056 if (is_defined) return 1;
1057 if (rb_autoloading_value(klass, id, &av, NULL)) return av;
1058 rb_autoload_load(klass, id);
1059 goto search_continue;
1060 }
1061 else {
1062 if (is_defined) {
1063 return 1;
1064 }
1065 else {
1066 if (UNLIKELY(!rb_ractor_main_p())) {
1067 if (!rb_ractor_shareable_p(val)) {
1068 rb_raise(rb_eRactorIsolationError,
1069 "can not access non-shareable objects in constant %"PRIsVALUE"::%s by non-main ractor.", rb_class_path(klass), rb_id2name(id));
1070 }
1071 }
1072 return val;
1073 }
1074 }
1075 }
1076 }
1077 }
1078
1079 /* search self */
1080 if (root_cref && !NIL_P(CREF_CLASS(root_cref))) {
1081 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1082 }
1083 else {
1084 klass = CLASS_OF(ec->cfp->self);
1085 }
1086
1087 if (is_defined) {
1088 return rb_const_defined(klass, id);
1089 }
1090 else {
1091 return rb_const_get(klass, id);
1092 }
1093 }
1094 else {
1095 vm_check_if_namespace(orig_klass);
1096 if (is_defined) {
1097 return rb_public_const_defined_from(orig_klass, id);
1098 }
1099 else {
1100 return rb_public_const_get_from(orig_klass, id);
1101 }
1102 }
1103}
1104
1105VALUE
1106rb_vm_get_ev_const(rb_execution_context_t *ec, VALUE orig_klass, ID id, VALUE allow_nil)
1107{
1108 return vm_get_ev_const(ec, orig_klass, id, allow_nil == Qtrue, 0);
1109}
1110
1111static inline VALUE
1112vm_get_ev_const_chain(rb_execution_context_t *ec, const ID *segments)
1113{
1114 VALUE val = Qnil;
1115 int idx = 0;
1116 int allow_nil = TRUE;
1117 if (segments[0] == idNULL) {
1118 val = rb_cObject;
1119 idx++;
1120 allow_nil = FALSE;
1121 }
1122 while (segments[idx]) {
1123 ID id = segments[idx++];
1124 val = vm_get_ev_const(ec, val, id, allow_nil, 0);
1125 allow_nil = FALSE;
1126 }
1127 return val;
1128}
1129
1130
1131static inline VALUE
1132vm_get_cvar_base(const rb_cref_t *cref, const rb_control_frame_t *cfp, int top_level_raise)
1133{
1134 VALUE klass;
1135
1136 if (!cref) {
1137 rb_bug("vm_get_cvar_base: no cref");
1138 }
1139
1140 while (CREF_NEXT(cref) &&
1141 (NIL_P(CREF_CLASS(cref)) || FL_TEST(CREF_CLASS(cref), FL_SINGLETON) ||
1142 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1143 cref = CREF_NEXT(cref);
1144 }
1145 if (top_level_raise && !CREF_NEXT(cref)) {
1146 rb_raise(rb_eRuntimeError, "class variable access from toplevel");
1147 }
1148
1149 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1150
1151 if (NIL_P(klass)) {
1152 rb_raise(rb_eTypeError, "no class variables available");
1153 }
1154 return klass;
1155}
1156
1157ALWAYS_INLINE(static void fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, attr_index_t index, shape_id_t shape_id));
1158static inline void
1159fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, attr_index_t index, shape_id_t shape_id)
1160{
1161 if (is_attr) {
1162 vm_cc_attr_index_set(cc, index, shape_id);
1163 }
1164 else {
1165 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1166 }
1167}
1168
1169#define ractor_incidental_shareable_p(cond, val) \
1170 (!(cond) || rb_ractor_shareable_p(val))
1171#define ractor_object_incidental_shareable_p(obj, val) \
1172 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1173
1174#define ATTR_INDEX_NOT_SET (attr_index_t)-1
1175
1176ALWAYS_INLINE(static VALUE vm_getivar(VALUE, ID, const rb_iseq_t *, IVC, const struct rb_callcache *, int, VALUE));
1177static inline VALUE
1178vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, VALUE default_value)
1179{
1180#if OPT_IC_FOR_IVAR
1181 VALUE val = Qundef;
1182 shape_id_t shape_id;
1183 VALUE * ivar_list;
1184
1185 if (SPECIAL_CONST_P(obj)) {
1186 return default_value;
1187 }
1188
1189#if SHAPE_IN_BASIC_FLAGS
1190 shape_id = RBASIC_SHAPE_ID(obj);
1191#endif
1192
1193 switch (BUILTIN_TYPE(obj)) {
1194 case T_OBJECT:
1195 ivar_list = ROBJECT_IVPTR(obj);
1196 VM_ASSERT(rb_ractor_shareable_p(obj) ? rb_ractor_shareable_p(val) : true);
1197
1198#if !SHAPE_IN_BASIC_FLAGS
1199 shape_id = ROBJECT_SHAPE_ID(obj);
1200#endif
1201 break;
1202 case T_CLASS:
1203 case T_MODULE:
1204 {
1205 if (UNLIKELY(!rb_ractor_main_p())) {
1206 // For two reasons we can only use the fast path on the main
1207 // ractor.
1208 // First, only the main ractor is allowed to set ivars on classes
1209 // and modules. So we can skip locking.
1210 // Second, other ractors need to check the shareability of the
1211 // values returned from the class ivars.
1212 goto general_path;
1213 }
1214
1215 ivar_list = RCLASS_IVPTR(obj);
1216
1217#if !SHAPE_IN_BASIC_FLAGS
1218 shape_id = RCLASS_SHAPE_ID(obj);
1219#endif
1220
1221 break;
1222 }
1223 default:
1224 if (FL_TEST_RAW(obj, FL_EXIVAR)) {
1225 struct gen_ivtbl *ivtbl;
1226 rb_gen_ivtbl_get(obj, id, &ivtbl);
1227#if !SHAPE_IN_BASIC_FLAGS
1228 shape_id = ivtbl->shape_id;
1229#endif
1230 ivar_list = ivtbl->as.shape.ivptr;
1231 }
1232 else {
1233 return default_value;
1234 }
1235 }
1236
1237 shape_id_t cached_id;
1238 attr_index_t index;
1239
1240 if (is_attr) {
1241 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1242 }
1243 else {
1244 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1245 }
1246
1247 if (LIKELY(cached_id == shape_id)) {
1248 RUBY_ASSERT(cached_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1249
1250 if (index == ATTR_INDEX_NOT_SET) {
1251 return default_value;
1252 }
1253
1254 val = ivar_list[index];
1255#if USE_DEBUG_COUNTER
1256 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1257
1258 if (RB_TYPE_P(obj, T_OBJECT)) {
1259 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1260 }
1261#endif
1262 RUBY_ASSERT(!UNDEF_P(val));
1263 }
1264 else { // cache miss case
1265#if USE_DEBUG_COUNTER
1266 if (is_attr) {
1267 if (cached_id != INVALID_SHAPE_ID) {
1268 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1269 }
1270 else {
1271 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1272 }
1273 }
1274 else {
1275 if (cached_id != INVALID_SHAPE_ID) {
1276 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1277 }
1278 else {
1279 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1280 }
1281 }
1282 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1283
1284 if (RB_TYPE_P(obj, T_OBJECT)) {
1285 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1286 }
1287#endif
1288
1289 if (shape_id == OBJ_TOO_COMPLEX_SHAPE_ID) {
1290 st_table *table = NULL;
1291 switch (BUILTIN_TYPE(obj)) {
1292 case T_CLASS:
1293 case T_MODULE:
1294 table = (st_table *)RCLASS_IVPTR(obj);
1295 break;
1296
1297 case T_OBJECT:
1298 table = ROBJECT_IV_HASH(obj);
1299 break;
1300
1301 default: {
1302 struct gen_ivtbl *ivtbl;
1303 if (rb_gen_ivtbl_get(obj, 0, &ivtbl)) {
1304 table = ivtbl->as.complex.table;
1305 }
1306 break;
1307 }
1308 }
1309
1310 if (!table || !st_lookup(table, id, &val)) {
1311 val = default_value;
1312 }
1313 }
1314 else {
1315 shape_id_t previous_cached_id = cached_id;
1316 if (rb_shape_get_iv_index_with_hint(shape_id, id, &index, &cached_id)) {
1317 // This fills in the cache with the shared cache object.
1318 // "ent" is the shared cache object
1319 if (cached_id != previous_cached_id) {
1320 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1321 }
1322
1323 if (index == ATTR_INDEX_NOT_SET) {
1324 val = default_value;
1325 }
1326 else {
1327 // We fetched the ivar list above
1328 val = ivar_list[index];
1329 RUBY_ASSERT(!UNDEF_P(val));
1330 }
1331 }
1332 else {
1333 if (is_attr) {
1334 vm_cc_attr_index_initialize(cc, shape_id);
1335 }
1336 else {
1337 vm_ic_attr_index_initialize(ic, shape_id);
1338 }
1339
1340 val = default_value;
1341 }
1342 }
1343
1344 }
1345
1346 if (default_value != Qundef) {
1347 RUBY_ASSERT(!UNDEF_P(val));
1348 }
1349
1350 return val;
1351
1352general_path:
1353#endif /* OPT_IC_FOR_IVAR */
1354 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1355
1356 if (is_attr) {
1357 return rb_attr_get(obj, id);
1358 }
1359 else {
1360 return rb_ivar_get(obj, id);
1361 }
1362}
1363
1364static void
1365populate_cache(attr_index_t index, shape_id_t next_shape_id, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, bool is_attr)
1366{
1367 RUBY_ASSERT(next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1368
1369 // Cache population code
1370 if (is_attr) {
1371 vm_cc_attr_index_set(cc, index, next_shape_id);
1372 }
1373 else {
1374 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1375 }
1376}
1377
1378ALWAYS_INLINE(static VALUE vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr));
1379NOINLINE(static VALUE vm_setivar_slowpath_ivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic));
1380NOINLINE(static VALUE vm_setivar_slowpath_attr(VALUE obj, ID id, VALUE val, const struct rb_callcache *cc));
1381
1382static VALUE
1383vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr)
1384{
1385#if OPT_IC_FOR_IVAR
1386 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1387
1388 if (BUILTIN_TYPE(obj) == T_OBJECT) {
1390
1391 attr_index_t index = rb_obj_ivar_set(obj, id, val);
1392
1393 shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
1394
1395 if (next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID) {
1396 populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr);
1397 }
1398
1399 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1400 return val;
1401 }
1402#endif
1403 return rb_ivar_set(obj, id, val);
1404}
1405
1406static VALUE
1407vm_setivar_slowpath_ivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic)
1408{
1409 return vm_setivar_slowpath(obj, id, val, iseq, ic, NULL, false);
1410}
1411
1412static VALUE
1413vm_setivar_slowpath_attr(VALUE obj, ID id, VALUE val, const struct rb_callcache *cc)
1414{
1415 return vm_setivar_slowpath(obj, id, val, NULL, NULL, cc, true);
1416}
1417
1418NOINLINE(static VALUE vm_setivar_default(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1419static VALUE
1420vm_setivar_default(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1421{
1422#if SHAPE_IN_BASIC_FLAGS
1423 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1424#else
1425 shape_id_t shape_id = rb_generic_shape_id(obj);
1426#endif
1427
1428 struct gen_ivtbl *ivtbl = 0;
1429
1430 // Cache hit case
1431 if (shape_id == dest_shape_id) {
1432 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1433 }
1434 else if (dest_shape_id != INVALID_SHAPE_ID) {
1435 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1436 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1437
1438 if (shape_id == dest_shape->parent_id && dest_shape->edge_name == id && shape->capacity == dest_shape->capacity) {
1439 RUBY_ASSERT(index < dest_shape->capacity);
1440 }
1441 else {
1442 return Qundef;
1443 }
1444 }
1445 else {
1446 return Qundef;
1447 }
1448
1449 rb_gen_ivtbl_get(obj, 0, &ivtbl);
1450
1451 if (shape_id != dest_shape_id) {
1452#if SHAPE_IN_BASIC_FLAGS
1453 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1454#else
1455 ivtbl->shape_id = dest_shape_id;
1456#endif
1457 }
1458
1459 RB_OBJ_WRITE(obj, &ivtbl->as.shape.ivptr[index], val);
1460
1461 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1462
1463 return val;
1464}
1465
1466static inline VALUE
1467vm_setivar(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1468{
1469#if OPT_IC_FOR_IVAR
1470 switch (BUILTIN_TYPE(obj)) {
1471 case T_OBJECT:
1472 {
1473 VM_ASSERT(!rb_ractor_shareable_p(obj) || rb_obj_frozen_p(obj));
1474
1475 shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
1476 RUBY_ASSERT(dest_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1477
1478 if (LIKELY(shape_id == dest_shape_id)) {
1479 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1480 VM_ASSERT(!rb_ractor_shareable_p(obj));
1481 }
1482 else if (dest_shape_id != INVALID_SHAPE_ID) {
1483 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1484 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1485 shape_id_t source_shape_id = dest_shape->parent_id;
1486
1487 if (shape_id == source_shape_id && dest_shape->edge_name == id && shape->capacity == dest_shape->capacity) {
1488 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1489
1490 ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
1491
1492 RUBY_ASSERT(rb_shape_get_next_iv_shape(rb_shape_get_shape_by_id(source_shape_id), id) == dest_shape);
1493 RUBY_ASSERT(index < dest_shape->capacity);
1494 }
1495 else {
1496 break;
1497 }
1498 }
1499 else {
1500 break;
1501 }
1502
1503 VALUE *ptr = ROBJECT_IVPTR(obj);
1504
1505 RUBY_ASSERT(!rb_shape_obj_too_complex(obj));
1506 RB_OBJ_WRITE(obj, &ptr[index], val);
1507
1508 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1509 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1510 return val;
1511 }
1512 break;
1513 case T_CLASS:
1514 case T_MODULE:
1515 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1516 default:
1517 break;
1518 }
1519
1520 return Qundef;
1521#endif /* OPT_IC_FOR_IVAR */
1522}
1523
1524static VALUE
1525update_classvariable_cache(const rb_iseq_t *iseq, VALUE klass, ID id, const rb_cref_t * cref, ICVARC ic)
1526{
1527 VALUE defined_class = 0;
1528 VALUE cvar_value = rb_cvar_find(klass, id, &defined_class);
1529
1530 if (RB_TYPE_P(defined_class, T_ICLASS)) {
1531 defined_class = RBASIC(defined_class)->klass;
1532 }
1533
1534 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1535 if (!rb_cvc_tbl) {
1536 rb_bug("the cvc table should be set");
1537 }
1538
1539 VALUE ent_data;
1540 if (!rb_id_table_lookup(rb_cvc_tbl, id, &ent_data)) {
1541 rb_bug("should have cvar cache entry");
1542 }
1543
1544 struct rb_cvar_class_tbl_entry *ent = (void *)ent_data;
1545
1546 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1547 ent->cref = cref;
1548 ic->entry = ent;
1549
1550 RUBY_ASSERT(BUILTIN_TYPE((VALUE)cref) == T_IMEMO && IMEMO_TYPE_P(cref, imemo_cref));
1551 RB_OBJ_WRITTEN(iseq, Qundef, ent->cref);
1552 RB_OBJ_WRITTEN(iseq, Qundef, ent->class_value);
1553 RB_OBJ_WRITTEN(ent->class_value, Qundef, ent->cref);
1554
1555 return cvar_value;
1556}
1557
1558static inline VALUE
1559vm_getclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *reg_cfp, ID id, ICVARC ic)
1560{
1561 const rb_cref_t *cref;
1562 cref = vm_get_cref(GET_EP());
1563
1564 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1565 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1566
1567 VALUE v = rb_ivar_lookup(ic->entry->class_value, id, Qundef);
1568 RUBY_ASSERT(!UNDEF_P(v));
1569
1570 return v;
1571 }
1572
1573 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1574
1575 return update_classvariable_cache(iseq, klass, id, cref, ic);
1576}
1577
1578VALUE
1579rb_vm_getclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *cfp, ID id, ICVARC ic)
1580{
1581 return vm_getclassvariable(iseq, cfp, id, ic);
1582}
1583
1584static inline void
1585vm_setclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *reg_cfp, ID id, VALUE val, ICVARC ic)
1586{
1587 const rb_cref_t *cref;
1588 cref = vm_get_cref(GET_EP());
1589
1590 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1591 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1592
1593 rb_class_ivar_set(ic->entry->class_value, id, val);
1594 return;
1595 }
1596
1597 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1598
1599 rb_cvar_set(klass, id, val);
1600
1601 update_classvariable_cache(iseq, klass, id, cref, ic);
1602}
1603
1604void
1605rb_vm_setclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *cfp, ID id, VALUE val, ICVARC ic)
1606{
1607 vm_setclassvariable(iseq, cfp, id, val, ic);
1608}
1609
1610static inline VALUE
1611vm_getinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, IVC ic)
1612{
1613 return vm_getivar(obj, id, iseq, ic, NULL, FALSE, Qnil);
1614}
1615
1616static inline void
1617vm_setinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, VALUE val, IVC ic)
1618{
1619 if (RB_SPECIAL_CONST_P(obj)) {
1621 return;
1622 }
1623
1624 shape_id_t dest_shape_id;
1625 attr_index_t index;
1626 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1627
1628 if (UNLIKELY(UNDEF_P(vm_setivar(obj, id, val, dest_shape_id, index)))) {
1629 switch (BUILTIN_TYPE(obj)) {
1630 case T_OBJECT:
1631 case T_CLASS:
1632 case T_MODULE:
1633 break;
1634 default:
1635 if (!UNDEF_P(vm_setivar_default(obj, id, val, dest_shape_id, index))) {
1636 return;
1637 }
1638 }
1639 vm_setivar_slowpath_ivar(obj, id, val, iseq, ic);
1640 }
1641}
1642
1643void
1644rb_vm_setinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, VALUE val, IVC ic)
1645{
1646 vm_setinstancevariable(iseq, obj, id, val, ic);
1647}
1648
1649static VALUE
1650vm_throw_continue(const rb_execution_context_t *ec, VALUE err)
1651{
1652 /* continue throw */
1653
1654 if (FIXNUM_P(err)) {
1655 ec->tag->state = RUBY_TAG_FATAL;
1656 }
1657 else if (SYMBOL_P(err)) {
1658 ec->tag->state = TAG_THROW;
1659 }
1660 else if (THROW_DATA_P(err)) {
1661 ec->tag->state = THROW_DATA_STATE((struct vm_throw_data *)err);
1662 }
1663 else {
1664 ec->tag->state = TAG_RAISE;
1665 }
1666 return err;
1667}
1668
1669static VALUE
1670vm_throw_start(const rb_execution_context_t *ec, rb_control_frame_t *const reg_cfp, enum ruby_tag_type state,
1671 const int flag, const VALUE throwobj)
1672{
1673 const rb_control_frame_t *escape_cfp = NULL;
1674 const rb_control_frame_t * const eocfp = RUBY_VM_END_CONTROL_FRAME(ec); /* end of control frame pointer */
1675
1676 if (flag != 0) {
1677 /* do nothing */
1678 }
1679 else if (state == TAG_BREAK) {
1680 int is_orphan = 1;
1681 const VALUE *ep = GET_EP();
1682 const rb_iseq_t *base_iseq = GET_ISEQ();
1683 escape_cfp = reg_cfp;
1684
1685 while (ISEQ_BODY(base_iseq)->type != ISEQ_TYPE_BLOCK) {
1686 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1687 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1688 ep = escape_cfp->ep;
1689 base_iseq = escape_cfp->iseq;
1690 }
1691 else {
1692 ep = VM_ENV_PREV_EP(ep);
1693 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1694 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1695 VM_ASSERT(escape_cfp->iseq == base_iseq);
1696 }
1697 }
1698
1699 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1700 /* lambda{... break ...} */
1701 is_orphan = 0;
1702 state = TAG_RETURN;
1703 }
1704 else {
1705 ep = VM_ENV_PREV_EP(ep);
1706
1707 while (escape_cfp < eocfp) {
1708 if (escape_cfp->ep == ep) {
1709 const rb_iseq_t *const iseq = escape_cfp->iseq;
1710 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1711 const struct iseq_catch_table *const ct = ISEQ_BODY(iseq)->catch_table;
1712 unsigned int i;
1713
1714 if (!ct) break;
1715 for (i=0; i < ct->size; i++) {
1716 const struct iseq_catch_table_entry *const entry =
1717 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1718
1719 if (entry->type == CATCH_TYPE_BREAK &&
1720 entry->iseq == base_iseq &&
1721 entry->start < epc && entry->end >= epc) {
1722 if (entry->cont == epc) { /* found! */
1723 is_orphan = 0;
1724 }
1725 break;
1726 }
1727 }
1728 break;
1729 }
1730
1731 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1732 }
1733 }
1734
1735 if (is_orphan) {
1736 rb_vm_localjump_error("break from proc-closure", throwobj, TAG_BREAK);
1737 }
1738 }
1739 else if (state == TAG_RETRY) {
1740 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1741
1742 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1743 }
1744 else if (state == TAG_RETURN) {
1745 const VALUE *current_ep = GET_EP();
1746 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1747 int in_class_frame = 0;
1748 int toplevel = 1;
1749 escape_cfp = reg_cfp;
1750
1751 // find target_lep, target_ep
1752 while (!VM_ENV_LOCAL_P(ep)) {
1753 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1754 target_ep = ep;
1755 }
1756 ep = VM_ENV_PREV_EP(ep);
1757 }
1758 target_lep = ep;
1759
1760 while (escape_cfp < eocfp) {
1761 const VALUE *lep = VM_CF_LEP(escape_cfp);
1762
1763 if (!target_lep) {
1764 target_lep = lep;
1765 }
1766
1767 if (lep == target_lep &&
1768 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1769 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1770 in_class_frame = 1;
1771 target_lep = 0;
1772 }
1773
1774 if (lep == target_lep) {
1775 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1776 toplevel = 0;
1777 if (in_class_frame) {
1778 /* lambda {class A; ... return ...; end} */
1779 goto valid_return;
1780 }
1781 else {
1782 const VALUE *tep = current_ep;
1783
1784 while (target_lep != tep) {
1785 if (escape_cfp->ep == tep) {
1786 /* in lambda */
1787 if (tep == target_ep) {
1788 goto valid_return;
1789 }
1790 else {
1791 goto unexpected_return;
1792 }
1793 }
1794 tep = VM_ENV_PREV_EP(tep);
1795 }
1796 }
1797 }
1798 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1799 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1800 case ISEQ_TYPE_TOP:
1801 case ISEQ_TYPE_MAIN:
1802 if (toplevel) {
1803 if (in_class_frame) goto unexpected_return;
1804 if (target_ep == NULL) {
1805 goto valid_return;
1806 }
1807 else {
1808 goto unexpected_return;
1809 }
1810 }
1811 break;
1812 case ISEQ_TYPE_EVAL: {
1813 const rb_iseq_t *is = escape_cfp->iseq;
1814 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1815 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1816 if (!(is = ISEQ_BODY(is)->parent_iseq)) break;
1817 t = ISEQ_BODY(is)->type;
1818 }
1819 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1820 break;
1821 }
1822 case ISEQ_TYPE_CLASS:
1823 toplevel = 0;
1824 break;
1825 default:
1826 break;
1827 }
1828 }
1829 }
1830
1831 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1832 if (target_ep == NULL) {
1833 goto valid_return;
1834 }
1835 else {
1836 goto unexpected_return;
1837 }
1838 }
1839
1840 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1841 }
1842 unexpected_return:;
1843 rb_vm_localjump_error("unexpected return", throwobj, TAG_RETURN);
1844
1845 valid_return:;
1846 /* do nothing */
1847 }
1848 else {
1849 rb_bug("isns(throw): unsupported throw type");
1850 }
1851
1852 ec->tag->state = state;
1853 return (VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1854}
1855
1856static VALUE
1857vm_throw(const rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
1858 rb_num_t throw_state, VALUE throwobj)
1859{
1860 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1861 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1862
1863 if (state != 0) {
1864 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1865 }
1866 else {
1867 return vm_throw_continue(ec, throwobj);
1868 }
1869}
1870
1871VALUE
1872rb_vm_throw(const rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t throw_state, VALUE throwobj)
1873{
1874 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1875}
1876
1877static inline void
1878vm_expandarray(struct rb_control_frame_struct *cfp, VALUE ary, rb_num_t num, int flag)
1879{
1880 int is_splat = flag & 0x01;
1881 const VALUE *ptr;
1882 rb_num_t len;
1883 const VALUE obj = ary;
1884
1885 if (!RB_TYPE_P(ary, T_ARRAY) && NIL_P(ary = rb_check_array_type(ary))) {
1886 ary = obj;
1887 ptr = &ary;
1888 len = 1;
1889 }
1890 else {
1891 ptr = RARRAY_CONST_PTR(ary);
1892 len = (rb_num_t)RARRAY_LEN(ary);
1893 }
1894
1895 if (num + is_splat == 0) {
1896 /* no space left on stack */
1897 }
1898 else if (flag & 0x02) {
1899 /* post: ..., nil ,ary[-1], ..., ary[0..-num] # top */
1900 rb_num_t i = 0, j;
1901
1902 if (len < num) {
1903 for (i = 0; i < num - len; i++) {
1904 *cfp->sp++ = Qnil;
1905 }
1906 }
1907
1908 for (j = 0; i < num; i++, j++) {
1909 VALUE v = ptr[len - j - 1];
1910 *cfp->sp++ = v;
1911 }
1912
1913 if (is_splat) {
1914 *cfp->sp++ = rb_ary_new4(len - j, ptr);
1915 }
1916 }
1917 else {
1918 /* normal: ary[num..-1], ary[num-2], ary[num-3], ..., ary[0] # top */
1919 if (is_splat) {
1920 if (num > len) {
1921 *cfp->sp++ = rb_ary_new();
1922 }
1923 else {
1924 *cfp->sp++ = rb_ary_new4(len - num, ptr + num);
1925 }
1926 }
1927
1928 if (num > len) {
1929 rb_num_t i = 0;
1930 for (; i < num - len; i++) {
1931 *cfp->sp++ = Qnil;
1932 }
1933
1934 for (rb_num_t j = 0; i < num; i++, j++) {
1935 *cfp->sp++ = ptr[len - j - 1];
1936 }
1937 }
1938 else {
1939 for (rb_num_t j = 0; j < num; j++) {
1940 *cfp->sp++ = ptr[num - j - 1];
1941 }
1942 }
1943 }
1944
1945 RB_GC_GUARD(ary);
1946}
1947
1948static VALUE vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
1949
1950static VALUE vm_mtbl_dump(VALUE klass, ID target_mid);
1951
1952static struct rb_class_cc_entries *
1953vm_ccs_create(VALUE klass, struct rb_id_table *cc_tbl, ID mid, const rb_callable_method_entry_t *cme)
1954{
1955 struct rb_class_cc_entries *ccs = ALLOC(struct rb_class_cc_entries);
1956#if VM_CHECK_MODE > 0
1957 ccs->debug_sig = ~(VALUE)ccs;
1958#endif
1959 ccs->capa = 0;
1960 ccs->len = 0;
1961 ccs->cme = cme;
1962 METHOD_ENTRY_CACHED_SET((rb_callable_method_entry_t *)cme);
1963 ccs->entries = NULL;
1964
1965 rb_id_table_insert(cc_tbl, mid, (VALUE)ccs);
1966 RB_OBJ_WRITTEN(klass, Qundef, cme);
1967 return ccs;
1968}
1969
1970static void
1971vm_ccs_push(VALUE klass, struct rb_class_cc_entries *ccs, const struct rb_callinfo *ci, const struct rb_callcache *cc)
1972{
1973 if (! vm_cc_markable(cc)) {
1974 return;
1975 }
1976 else if (! vm_ci_markable(ci)) {
1977 return;
1978 }
1979
1980 if (UNLIKELY(ccs->len == ccs->capa)) {
1981 if (ccs->capa == 0) {
1982 ccs->capa = 1;
1983 ccs->entries = ALLOC_N(struct rb_class_cc_entries_entry, ccs->capa);
1984 }
1985 else {
1986 ccs->capa *= 2;
1987 REALLOC_N(ccs->entries, struct rb_class_cc_entries_entry, ccs->capa);
1988 }
1989 }
1990 VM_ASSERT(ccs->len < ccs->capa);
1991
1992 const int pos = ccs->len++;
1993 RB_OBJ_WRITE(klass, &ccs->entries[pos].ci, ci);
1994 RB_OBJ_WRITE(klass, &ccs->entries[pos].cc, cc);
1995
1996 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
1997 // for tuning
1998 // vm_mtbl_dump(klass, 0);
1999 }
2000}
2001
2002#if VM_CHECK_MODE > 0
2003void
2004rb_vm_ccs_dump(struct rb_class_cc_entries *ccs)
2005{
2006 ruby_debug_printf("ccs:%p (%d,%d)\n", (void *)ccs, ccs->len, ccs->capa);
2007 for (int i=0; i<ccs->len; i++) {
2008 vm_ci_dump(ccs->entries[i].ci);
2009 rp(ccs->entries[i].cc);
2010 }
2011}
2012
2013static int
2014vm_ccs_verify(struct rb_class_cc_entries *ccs, ID mid, VALUE klass)
2015{
2016 VM_ASSERT(vm_ccs_p(ccs));
2017 VM_ASSERT(ccs->len <= ccs->capa);
2018
2019 for (int i=0; i<ccs->len; i++) {
2020 const struct rb_callinfo *ci = ccs->entries[i].ci;
2021 const struct rb_callcache *cc = ccs->entries[i].cc;
2022
2023 VM_ASSERT(vm_ci_p(ci));
2024 VM_ASSERT(vm_ci_mid(ci) == mid);
2025 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2026 VM_ASSERT(vm_cc_class_check(cc, klass));
2027 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2028 VM_ASSERT(!vm_cc_super_p(cc));
2029 VM_ASSERT(!vm_cc_refinement_p(cc));
2030 }
2031 return TRUE;
2032}
2033#endif
2034
2035static const rb_callable_method_entry_t *check_overloaded_cme(const rb_callable_method_entry_t *cme, const struct rb_callinfo * const ci);
2036
2037static const struct rb_callcache *
2038vm_search_cc(const VALUE klass, const struct rb_callinfo * const ci)
2039{
2040 const ID mid = vm_ci_mid(ci);
2041 struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
2042 struct rb_class_cc_entries *ccs = NULL;
2043 VALUE ccs_data;
2044
2045 if (cc_tbl) {
2046 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
2047 ccs = (struct rb_class_cc_entries *)ccs_data;
2048 const int ccs_len = ccs->len;
2049
2050 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2051 rb_vm_ccs_free(ccs);
2052 rb_id_table_delete(cc_tbl, mid);
2053 ccs = NULL;
2054 }
2055 else {
2056 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2057
2058 for (int i=0; i<ccs_len; i++) {
2059 const struct rb_callinfo *ccs_ci = ccs->entries[i].ci;
2060 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2061
2062 VM_ASSERT(vm_ci_p(ccs_ci));
2063 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2064
2065 if (ccs_ci == ci) { // TODO: equality
2066 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2067
2068 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2069 VM_ASSERT(ccs_cc->klass == klass);
2070 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2071
2072 return ccs_cc;
2073 }
2074 }
2075 }
2076 }
2077 }
2078 else {
2079 cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
2080 }
2081
2082 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2083
2084 const rb_callable_method_entry_t *cme;
2085
2086 if (ccs) {
2087 cme = ccs->cme;
2088 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2089
2090 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2091 }
2092 else {
2093 cme = rb_callable_method_entry(klass, mid);
2094 }
2095
2096 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2097
2098 if (cme == NULL) {
2099 // undef or not found: can't cache the information
2100 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2101 return &vm_empty_cc;
2102 }
2103
2104 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2105
2106 METHOD_ENTRY_CACHED_SET((struct rb_callable_method_entry_struct *)cme);
2107
2108 if (ccs == NULL) {
2109 VM_ASSERT(cc_tbl != NULL);
2110
2111 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2112 // rb_callable_method_entry() prepares ccs.
2113 ccs = (struct rb_class_cc_entries *)ccs_data;
2114 }
2115 else {
2116 // TODO: required?
2117 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2118 }
2119 }
2120
2121 cme = check_overloaded_cme(cme, ci);
2122
2123 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2124 vm_ccs_push(klass, ccs, ci, cc);
2125
2126 VM_ASSERT(vm_cc_cme(cc) != NULL);
2127 VM_ASSERT(cme->called_id == mid);
2128 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2129
2130 return cc;
2131}
2132
2133const struct rb_callcache *
2134rb_vm_search_method_slowpath(const struct rb_callinfo *ci, VALUE klass)
2135{
2136 const struct rb_callcache *cc;
2137
2138 VM_ASSERT(RB_TYPE_P(klass, T_CLASS) || RB_TYPE_P(klass, T_ICLASS));
2139
2140 RB_VM_LOCK_ENTER();
2141 {
2142 cc = vm_search_cc(klass, ci);
2143
2144 VM_ASSERT(cc);
2145 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2146 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2147 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2148 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2149 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2150 }
2151 RB_VM_LOCK_LEAVE();
2152
2153 return cc;
2154}
2155
2156static const struct rb_callcache *
2157vm_search_method_slowpath0(VALUE cd_owner, struct rb_call_data *cd, VALUE klass)
2158{
2159#if USE_DEBUG_COUNTER
2160 const struct rb_callcache *old_cc = cd->cc;
2161#endif
2162
2163 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2164
2165#if OPT_INLINE_METHOD_CACHE
2166 cd->cc = cc;
2167
2168 const struct rb_callcache *empty_cc = &vm_empty_cc;
2169 if (cd_owner && cc != empty_cc) {
2170 RB_OBJ_WRITTEN(cd_owner, Qundef, cc);
2171 }
2172
2173#if USE_DEBUG_COUNTER
2174 if (old_cc == empty_cc) {
2175 // empty
2176 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2177 }
2178 else if (old_cc == cc) {
2179 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2180 }
2181 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2182 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2183 }
2184 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2185 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2186 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2187 }
2188 else {
2189 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2190 }
2191#endif
2192#endif // OPT_INLINE_METHOD_CACHE
2193
2194 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2195 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2196
2197 return cc;
2198}
2199
2200ALWAYS_INLINE(static const struct rb_callcache *vm_search_method_fastpath(VALUE cd_owner, struct rb_call_data *cd, VALUE klass));
2201static const struct rb_callcache *
2202vm_search_method_fastpath(VALUE cd_owner, struct rb_call_data *cd, VALUE klass)
2203{
2204 const struct rb_callcache *cc = cd->cc;
2205
2206#if OPT_INLINE_METHOD_CACHE
2207 if (LIKELY(vm_cc_class_check(cc, klass))) {
2208 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2209 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2210 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2211 VM_ASSERT(vm_cc_cme(cc) == NULL || // not found
2212 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) || // search_super w/ define_method
2213 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci)); // cme->called_id == ci->mid
2214
2215 return cc;
2216 }
2217 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2218 }
2219 else {
2220 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2221 }
2222#endif
2223
2224 return vm_search_method_slowpath0(cd_owner, cd, klass);
2225}
2226
2227static const struct rb_callcache *
2228vm_search_method(VALUE cd_owner, struct rb_call_data *cd, VALUE recv)
2229{
2230 VALUE klass = CLASS_OF(recv);
2231 VM_ASSERT(klass != Qfalse);
2232 VM_ASSERT(RBASIC_CLASS(klass) == 0 || rb_obj_is_kind_of(klass, rb_cClass));
2233
2234 return vm_search_method_fastpath(cd_owner, cd, klass);
2235}
2236
2237#if __has_attribute(transparent_union)
2238typedef union {
2239 VALUE (*anyargs)(ANYARGS);
2240 VALUE (*f00)(VALUE);
2241 VALUE (*f01)(VALUE, VALUE);
2242 VALUE (*f02)(VALUE, VALUE, VALUE);
2243 VALUE (*f03)(VALUE, VALUE, VALUE, VALUE);
2244 VALUE (*f04)(VALUE, VALUE, VALUE, VALUE, VALUE);
2245 VALUE (*f05)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2246 VALUE (*f06)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2247 VALUE (*f07)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2256 VALUE (*fm1)(int, union { VALUE *x; const VALUE *y; } __attribute__((__transparent_union__)), VALUE);
2257} __attribute__((__transparent_union__)) cfunc_type;
2258#else
2259typedef VALUE (*cfunc_type)(ANYARGS);
2260#endif
2261
2262static inline int
2263check_cfunc(const rb_callable_method_entry_t *me, cfunc_type func)
2264{
2265 if (! me) {
2266 return false;
2267 }
2268 else {
2269 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2270 VM_ASSERT(callable_method_entry_p(me));
2271 VM_ASSERT(me->def);
2272 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2273 return false;
2274 }
2275 else {
2276#if __has_attribute(transparent_union)
2277 return me->def->body.cfunc.func == func.anyargs;
2278#else
2279 return me->def->body.cfunc.func == func;
2280#endif
2281 }
2282 }
2283}
2284
2285static inline int
2286vm_method_cfunc_is(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv, cfunc_type func)
2287{
2288 VM_ASSERT(iseq != NULL);
2289 const struct rb_callcache *cc = vm_search_method((VALUE)iseq, cd, recv);
2290 return check_cfunc(vm_cc_cme(cc), func);
2291}
2292
2293#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2294
2295static inline bool
2296FIXNUM_2_P(VALUE a, VALUE b)
2297{
2298 /* FIXNUM_P(a) && FIXNUM_P(b)
2299 * == ((a & 1) && (b & 1))
2300 * == a & b & 1 */
2301 SIGNED_VALUE x = a;
2302 SIGNED_VALUE y = b;
2303 SIGNED_VALUE z = x & y & 1;
2304 return z == 1;
2305}
2306
2307static inline bool
2308FLONUM_2_P(VALUE a, VALUE b)
2309{
2310#if USE_FLONUM
2311 /* FLONUM_P(a) && FLONUM_P(b)
2312 * == ((a & 3) == 2) && ((b & 3) == 2)
2313 * == ! ((a ^ 2) | (b ^ 2) & 3)
2314 */
2315 SIGNED_VALUE x = a;
2316 SIGNED_VALUE y = b;
2317 SIGNED_VALUE z = ((x ^ 2) | (y ^ 2)) & 3;
2318 return !z;
2319#else
2320 return false;
2321#endif
2322}
2323
2324static VALUE
2325opt_equality_specialized(VALUE recv, VALUE obj)
2326{
2327 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2328 goto compare_by_identity;
2329 }
2330 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2331 goto compare_by_identity;
2332 }
2333 else if (STATIC_SYM_P(recv) && STATIC_SYM_P(obj) && EQ_UNREDEFINED_P(SYMBOL)) {
2334 goto compare_by_identity;
2335 }
2336 else if (SPECIAL_CONST_P(recv)) {
2337 //
2338 }
2339 else if (RBASIC_CLASS(recv) == rb_cFloat && RB_FLOAT_TYPE_P(obj) && EQ_UNREDEFINED_P(FLOAT)) {
2340 double a = RFLOAT_VALUE(recv);
2341 double b = RFLOAT_VALUE(obj);
2342
2343#if MSC_VERSION_BEFORE(1300)
2344 if (isnan(a)) {
2345 return Qfalse;
2346 }
2347 else if (isnan(b)) {
2348 return Qfalse;
2349 }
2350 else
2351#endif
2352 return RBOOL(a == b);
2353 }
2354 else if (RBASIC_CLASS(recv) == rb_cString && EQ_UNREDEFINED_P(STRING)) {
2355 if (recv == obj) {
2356 return Qtrue;
2357 }
2358 else if (RB_TYPE_P(obj, T_STRING)) {
2359 return rb_str_eql_internal(obj, recv);
2360 }
2361 }
2362 return Qundef;
2363
2364 compare_by_identity:
2365 return RBOOL(recv == obj);
2366}
2367
2368static VALUE
2369opt_equality(const rb_iseq_t *cd_owner, VALUE recv, VALUE obj, CALL_DATA cd)
2370{
2371 VM_ASSERT(cd_owner != NULL);
2372
2373 VALUE val = opt_equality_specialized(recv, obj);
2374 if (!UNDEF_P(val)) return val;
2375
2376 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2377 return Qundef;
2378 }
2379 else {
2380 return RBOOL(recv == obj);
2381 }
2382}
2383
2384#undef EQ_UNREDEFINED_P
2385
2386static inline const struct rb_callcache *gccct_method_search(rb_execution_context_t *ec, VALUE recv, ID mid, int argc); // vm_eval.c
2387NOINLINE(static VALUE opt_equality_by_mid_slowpath(VALUE recv, VALUE obj, ID mid));
2388
2389static VALUE
2390opt_equality_by_mid_slowpath(VALUE recv, VALUE obj, ID mid)
2391{
2392 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, 1);
2393
2394 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2395 return RBOOL(recv == obj);
2396 }
2397 else {
2398 return Qundef;
2399 }
2400}
2401
2402static VALUE
2403opt_equality_by_mid(VALUE recv, VALUE obj, ID mid)
2404{
2405 VALUE val = opt_equality_specialized(recv, obj);
2406 if (!UNDEF_P(val)) {
2407 return val;
2408 }
2409 else {
2410 return opt_equality_by_mid_slowpath(recv, obj, mid);
2411 }
2412}
2413
2414VALUE
2415rb_equal_opt(VALUE obj1, VALUE obj2)
2416{
2417 return opt_equality_by_mid(obj1, obj2, idEq);
2418}
2419
2420VALUE
2421rb_eql_opt(VALUE obj1, VALUE obj2)
2422{
2423 return opt_equality_by_mid(obj1, obj2, idEqlP);
2424}
2425
2426extern VALUE rb_vm_call0(rb_execution_context_t *ec, VALUE, ID, int, const VALUE*, const rb_callable_method_entry_t *, int kw_splat);
2427extern VALUE rb_vm_call_with_refinements(rb_execution_context_t *, VALUE, ID, int, const VALUE *, int);
2428
2429static VALUE
2430check_match(rb_execution_context_t *ec, VALUE pattern, VALUE target, enum vm_check_match_type type)
2431{
2432 switch (type) {
2433 case VM_CHECKMATCH_TYPE_WHEN:
2434 return pattern;
2435 case VM_CHECKMATCH_TYPE_RESCUE:
2436 if (!rb_obj_is_kind_of(pattern, rb_cModule)) {
2437 rb_raise(rb_eTypeError, "class or module required for rescue clause");
2438 }
2439 /* fall through */
2440 case VM_CHECKMATCH_TYPE_CASE: {
2441 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target, RB_NO_KEYWORDS);
2442 }
2443 default:
2444 rb_bug("check_match: unreachable");
2445 }
2446}
2447
2448
2449#if MSC_VERSION_BEFORE(1300)
2450#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2451#else
2452#define CHECK_CMP_NAN(a, b) /* do nothing */
2453#endif
2454
2455static inline VALUE
2456double_cmp_lt(double a, double b)
2457{
2458 CHECK_CMP_NAN(a, b);
2459 return RBOOL(a < b);
2460}
2461
2462static inline VALUE
2463double_cmp_le(double a, double b)
2464{
2465 CHECK_CMP_NAN(a, b);
2466 return RBOOL(a <= b);
2467}
2468
2469static inline VALUE
2470double_cmp_gt(double a, double b)
2471{
2472 CHECK_CMP_NAN(a, b);
2473 return RBOOL(a > b);
2474}
2475
2476static inline VALUE
2477double_cmp_ge(double a, double b)
2478{
2479 CHECK_CMP_NAN(a, b);
2480 return RBOOL(a >= b);
2481}
2482
2483// Copied by vm_dump.c
2484static inline VALUE *
2485vm_base_ptr(const rb_control_frame_t *cfp)
2486{
2487 const rb_control_frame_t *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
2488
2489 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2490 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2491 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2492 /* adjust `self' */
2493 bp += 1;
2494 }
2495#if VM_DEBUG_BP_CHECK
2496 if (bp != cfp->bp_check) {
2497 ruby_debug_printf("bp_check: %ld, bp: %ld\n",
2498 (long)(cfp->bp_check - GET_EC()->vm_stack),
2499 (long)(bp - GET_EC()->vm_stack));
2500 rb_bug("vm_base_ptr: unreachable");
2501 }
2502#endif
2503 return bp;
2504 }
2505 else {
2506 return NULL;
2507 }
2508}
2509
2510VALUE *
2511rb_vm_base_ptr(const rb_control_frame_t *cfp)
2512{
2513 return vm_base_ptr(cfp);
2514}
2515
2516/* method call processes with call_info */
2517
2518#include "vm_args.c"
2519
2520static inline VALUE vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc, int param_size, int local_size);
2521ALWAYS_INLINE(static VALUE vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const rb_callable_method_entry_t *me, int opt_pc, int param_size, int local_size));
2522static inline VALUE vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc);
2523static VALUE vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
2524static VALUE vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
2525static VALUE vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
2526static inline VALUE vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
2527
2528static vm_call_handler vm_call_iseq_setup_func(const struct rb_callinfo *ci, const int param_size, const int local_size);
2529
2530static VALUE
2531vm_call_iseq_setup_tailcall_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
2532{
2533 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2534
2535 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2536}
2537
2538static VALUE
2539vm_call_iseq_setup_normal_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
2540{
2541 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2542
2543 const struct rb_callcache *cc = calling->cc;
2544 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2545 int param = ISEQ_BODY(iseq)->param.size;
2546 int local = ISEQ_BODY(iseq)->local_table_size;
2547 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2548}
2549
2550bool
2551rb_simple_iseq_p(const rb_iseq_t *iseq)
2552{
2553 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2554 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2555 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2556 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2557 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2558 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2559 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2560}
2561
2562bool
2563rb_iseq_only_optparam_p(const rb_iseq_t *iseq)
2564{
2565 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2566 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2567 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2568 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2569 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2570 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2571 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2572}
2573
2574bool
2575rb_iseq_only_kwparam_p(const rb_iseq_t *iseq)
2576{
2577 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2578 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2579 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2580 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2581 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2582 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2583}
2584
2585#define ALLOW_HEAP_ARGV (-2)
2586#define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2587
2588static inline bool
2589vm_caller_setup_arg_splat(rb_control_frame_t *cfp, struct rb_calling_info *calling, VALUE ary, int max_args)
2590{
2591 vm_check_canary(GET_EC(), cfp->sp);
2592 bool ret = false;
2593
2594 if (!NIL_P(ary)) {
2595 const VALUE *ptr = RARRAY_CONST_PTR(ary);
2596 long len = RARRAY_LEN(ary);
2597 int argc = calling->argc;
2598
2599 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2600 /* Avoid SystemStackError when splatting large arrays by storing arguments in
2601 * a temporary array, instead of trying to keeping arguments on the VM stack.
2602 */
2603 VALUE *argv = cfp->sp - argc;
2604 VALUE argv_ary = rb_ary_hidden_new(len + argc + 1);
2605 rb_ary_cat(argv_ary, argv, argc);
2606 rb_ary_cat(argv_ary, ptr, len);
2607 cfp->sp -= argc - 1;
2608 cfp->sp[-1] = argv_ary;
2609 calling->argc = 1;
2610 calling->heap_argv = argv_ary;
2611 RB_GC_GUARD(ary);
2612 }
2613 else {
2614 long i;
2615
2616 if (max_args >= 0 && len + argc > max_args) {
2617 /* If only a given max_args is allowed, copy up to max args.
2618 * Used by vm_callee_setup_block_arg for non-lambda blocks,
2619 * where additional arguments are ignored.
2620 *
2621 * Also, copy up to one more argument than the maximum,
2622 * in case it is an empty keyword hash that will be removed.
2623 */
2624 calling->argc += len - (max_args - argc + 1);
2625 len = max_args - argc + 1;
2626 ret = true;
2627 }
2628 else {
2629 /* Unset heap_argv if set originally. Can happen when
2630 * forwarding modified arguments, where heap_argv was used
2631 * originally, but heap_argv not supported by the forwarded
2632 * method in all cases.
2633 */
2634 calling->heap_argv = 0;
2635 }
2636 CHECK_VM_STACK_OVERFLOW(cfp, len);
2637
2638 for (i = 0; i < len; i++) {
2639 *cfp->sp++ = ptr[i];
2640 }
2641 calling->argc += i;
2642 }
2643 }
2644
2645 return ret;
2646}
2647
2648static inline void
2649vm_caller_setup_arg_kw(rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_callinfo *ci)
2650{
2651 const VALUE *const passed_keywords = vm_ci_kwarg(ci)->keywords;
2652 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2653 const VALUE h = rb_hash_new_with_size(kw_len);
2654 VALUE *sp = cfp->sp;
2655 int i;
2656
2657 for (i=0; i<kw_len; i++) {
2658 rb_hash_aset(h, passed_keywords[i], (sp - kw_len)[i]);
2659 }
2660 (sp-kw_len)[0] = h;
2661
2662 cfp->sp -= kw_len - 1;
2663 calling->argc -= kw_len - 1;
2664 calling->kw_splat = 1;
2665}
2666
2667static inline VALUE
2668vm_caller_setup_keyword_hash(const struct rb_callinfo *ci, VALUE keyword_hash)
2669{
2670 if (UNLIKELY(!RB_TYPE_P(keyword_hash, T_HASH))) {
2671 /* Convert a non-hash keyword splat to a new hash */
2672 keyword_hash = rb_hash_dup(rb_to_hash_type(keyword_hash));
2673 }
2674 else if (!IS_ARGS_KW_SPLAT_MUT(ci)) {
2675 /* Convert a hash keyword splat to a new hash unless
2676 * a mutable keyword splat was passed.
2677 */
2678 keyword_hash = rb_hash_dup(keyword_hash);
2679 }
2680 return keyword_hash;
2681}
2682
2683static inline void
2684CALLER_SETUP_ARG(struct rb_control_frame_struct *restrict cfp,
2685 struct rb_calling_info *restrict calling,
2686 const struct rb_callinfo *restrict ci, int max_args)
2687{
2688 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2689 if (IS_ARGS_KW_SPLAT(ci)) {
2690 // f(*a, **kw)
2691 VM_ASSERT(calling->kw_splat == 1);
2692
2693 cfp->sp -= 2;
2694 calling->argc -= 2;
2695 VALUE ary = cfp->sp[0];
2696 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2697
2698 // splat a
2699 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) return;
2700
2701 // put kw
2702 if (!RHASH_EMPTY_P(kwh)) {
2703 if (UNLIKELY(calling->heap_argv)) {
2704 rb_ary_push(calling->heap_argv, kwh);
2705 ((struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2706 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2707 calling->kw_splat = 0;
2708 }
2709 }
2710 else {
2711 cfp->sp[0] = kwh;
2712 cfp->sp++;
2713 calling->argc++;
2714
2715 VM_ASSERT(calling->kw_splat == 1);
2716 }
2717 }
2718 else {
2719 calling->kw_splat = 0;
2720 }
2721 }
2722 else {
2723 // f(*a)
2724 VM_ASSERT(calling->kw_splat == 0);
2725
2726 cfp->sp -= 1;
2727 calling->argc -= 1;
2728 VALUE ary = cfp->sp[0];
2729
2730 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2731 goto check_keyword;
2732 }
2733
2734 // check the last argument
2735 VALUE last_hash, argv_ary;
2736 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2737 if (!IS_ARGS_KEYWORD(ci) &&
2738 RARRAY_LEN(argv_ary) > 0 &&
2739 RB_TYPE_P((last_hash = rb_ary_last(0, NULL, argv_ary)), T_HASH) &&
2740 (((struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2741
2742 rb_ary_pop(argv_ary);
2743 if (!RHASH_EMPTY_P(last_hash)) {
2744 rb_ary_push(argv_ary, rb_hash_dup(last_hash));
2745 calling->kw_splat = 1;
2746 }
2747 }
2748 }
2749 else {
2750check_keyword:
2751 if (!IS_ARGS_KEYWORD(ci) &&
2752 calling->argc > 0 &&
2753 RB_TYPE_P((last_hash = cfp->sp[-1]), T_HASH) &&
2754 (((struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2755
2756 if (RHASH_EMPTY_P(last_hash)) {
2757 calling->argc--;
2758 cfp->sp -= 1;
2759 }
2760 else {
2761 cfp->sp[-1] = rb_hash_dup(last_hash);
2762 calling->kw_splat = 1;
2763 }
2764 }
2765 }
2766 }
2767 }
2768 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2769 // f(**kw)
2770 VM_ASSERT(calling->kw_splat == 1);
2771 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2772
2773 if (RHASH_EMPTY_P(kwh)) {
2774 cfp->sp--;
2775 calling->argc--;
2776 calling->kw_splat = 0;
2777 }
2778 else {
2779 cfp->sp[-1] = kwh;
2780 }
2781 }
2782 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2783 // f(k1:1, k2:2)
2784 VM_ASSERT(calling->kw_splat == 0);
2785
2786 /* This converts VM_CALL_KWARG style to VM_CALL_KW_SPLAT style
2787 * by creating a keyword hash.
2788 * So, vm_ci_flag(ci) & VM_CALL_KWARG is now inconsistent.
2789 */
2790 vm_caller_setup_arg_kw(cfp, calling, ci);
2791 }
2792}
2793
2794#define USE_OPT_HIST 0
2795
2796#if USE_OPT_HIST
2797#define OPT_HIST_MAX 64
2798static int opt_hist[OPT_HIST_MAX+1];
2799
2800__attribute__((destructor))
2801static void
2802opt_hist_show_results_at_exit(void)
2803{
2804 for (int i=0; i<OPT_HIST_MAX; i++) {
2805 ruby_debug_printf("opt_hist\t%d\t%d\n", i, opt_hist[i]);
2806 }
2807}
2808#endif
2809
2810static VALUE
2811vm_call_iseq_setup_normal_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2812 struct rb_calling_info *calling)
2813{
2814 const struct rb_callcache *cc = calling->cc;
2815 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2816 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2817 const int opt = calling->argc - lead_num;
2818 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2819 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2820 const int param = ISEQ_BODY(iseq)->param.size;
2821 const int local = ISEQ_BODY(iseq)->local_table_size;
2822 const int delta = opt_num - opt;
2823
2824 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2825
2826#if USE_OPT_HIST
2827 if (opt_pc < OPT_HIST_MAX) {
2828 opt_hist[opt]++;
2829 }
2830 else {
2831 opt_hist[OPT_HIST_MAX]++;
2832 }
2833#endif
2834
2835 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2836}
2837
2838static VALUE
2839vm_call_iseq_setup_tailcall_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2840 struct rb_calling_info *calling)
2841{
2842 const struct rb_callcache *cc = calling->cc;
2843 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2844 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2845 const int opt = calling->argc - lead_num;
2846 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2847
2848 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2849
2850#if USE_OPT_HIST
2851 if (opt_pc < OPT_HIST_MAX) {
2852 opt_hist[opt]++;
2853 }
2854 else {
2855 opt_hist[OPT_HIST_MAX]++;
2856 }
2857#endif
2858
2859 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2860}
2861
2862static void
2863args_setup_kw_parameters(rb_execution_context_t *const ec, const rb_iseq_t *const iseq,
2864 VALUE *const passed_values, const int passed_keyword_len, const VALUE *const passed_keywords,
2865 VALUE *const locals);
2866
2867static VALUE
2868vm_call_iseq_setup_kwparm_kwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2869 struct rb_calling_info *calling)
2870{
2871 const struct rb_callinfo *ci = calling->cd->ci;
2872 const struct rb_callcache *cc = calling->cc;
2873
2874 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2875 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2876
2877 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2878 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2879 const struct rb_callinfo_kwarg *kw_arg = vm_ci_kwarg(ci);
2880 const int ci_kw_len = kw_arg->keyword_len;
2881 const VALUE * const ci_keywords = kw_arg->keywords;
2882 VALUE *argv = cfp->sp - calling->argc;
2883 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
2884 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2885 VALUE * const ci_kws = ALLOCA_N(VALUE, ci_kw_len);
2886 MEMCPY(ci_kws, argv + lead_num, VALUE, ci_kw_len);
2887 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2888
2889 int param = ISEQ_BODY(iseq)->param.size;
2890 int local = ISEQ_BODY(iseq)->local_table_size;
2891 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2892}
2893
2894static VALUE
2895vm_call_iseq_setup_kwparm_nokwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2896 struct rb_calling_info *calling)
2897{
2898 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
2899 const struct rb_callcache *cc = calling->cc;
2900
2901 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2902 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2903
2904 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2905 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2906 VALUE * const argv = cfp->sp - calling->argc;
2907 VALUE * const klocals = argv + kw_param->bits_start - kw_param->num;
2908
2909 int i;
2910 for (i=0; i<kw_param->num; i++) {
2911 klocals[i] = kw_param->default_values[i];
2912 }
2913 klocals[i] = INT2FIX(0); // kw specify flag
2914 // NOTE:
2915 // nobody check this value, but it should be cleared because it can
2916 // points invalid VALUE (T_NONE objects, raw pointer and so on).
2917
2918 int param = ISEQ_BODY(iseq)->param.size;
2919 int local = ISEQ_BODY(iseq)->local_table_size;
2920 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2921}
2922
2923static VALUE builtin_invoker0(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr);
2924
2925static VALUE
2926vm_call_single_noarg_inline_builtin(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2927 struct rb_calling_info *calling)
2928{
2929 const struct rb_builtin_function *bf = calling->cc->aux_.bf;
2930 cfp->sp -= (calling->argc + 1);
2931 return builtin_invoker0(ec, calling->recv, NULL, (rb_insn_func_t)bf->func_ptr);
2932}
2933
2934static inline int
2935vm_callee_setup_arg(rb_execution_context_t *ec, struct rb_calling_info *calling,
2936 const rb_iseq_t *iseq, VALUE *argv, int param_size, int local_size)
2937{
2938 const struct rb_callinfo *ci = calling->cd->ci;
2939 const struct rb_callcache *cc = calling->cc;
2940 bool cacheable_ci = vm_ci_markable(ci);
2941
2942 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
2943 if (LIKELY(rb_simple_iseq_p(iseq))) {
2944 rb_control_frame_t *cfp = ec->cfp;
2945 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2946 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
2947
2948 if (calling->argc != lead_num) {
2949 argument_arity_error(ec, iseq, calling->argc, lead_num, lead_num);
2950 }
2951
2952 VM_ASSERT(ci == calling->cd->ci);
2953 VM_ASSERT(cc == calling->cc);
2954
2955 if (cacheable_ci && vm_call_iseq_optimizable_p(ci, cc)) {
2956 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_INLINE) &&
2957 !(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) {
2958 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
2959 vm_cc_bf_set(cc, (void *)iseq->body->iseq_encoded[1]);
2960 CC_SET_FASTPATH(cc, vm_call_single_noarg_inline_builtin, true);
2961 }
2962 else {
2963 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size), true);
2964 }
2965 }
2966 return 0;
2967 }
2968 else if (rb_iseq_only_optparam_p(iseq)) {
2969 rb_control_frame_t *cfp = ec->cfp;
2970
2971 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2972 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2973
2974 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
2975 const int argc = calling->argc;
2976 const int opt = argc - lead_num;
2977
2978 if (opt < 0 || opt > opt_num) {
2979 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
2980 }
2981
2982 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2983 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
2984 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2985 cacheable_ci && vm_call_cacheable(ci, cc));
2986 }
2987 else {
2988 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
2989 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2990 cacheable_ci && vm_call_cacheable(ci, cc));
2991 }
2992
2993 /* initialize opt vars for self-references */
2994 VM_ASSERT((int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
2995 for (int i=argc; i<lead_num + opt_num; i++) {
2996 argv[i] = Qnil;
2997 }
2998 return (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2999 }
3000 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3001 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3002 const int argc = calling->argc;
3003 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3004
3005 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3006 const struct rb_callinfo_kwarg *kw_arg = vm_ci_kwarg(ci);
3007
3008 if (argc - kw_arg->keyword_len == lead_num) {
3009 const int ci_kw_len = kw_arg->keyword_len;
3010 const VALUE * const ci_keywords = kw_arg->keywords;
3011 VALUE * const ci_kws = ALLOCA_N(VALUE, ci_kw_len);
3012 MEMCPY(ci_kws, argv + lead_num, VALUE, ci_kw_len);
3013
3014 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
3015 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
3016
3017 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3018 cacheable_ci && vm_call_cacheable(ci, cc));
3019
3020 return 0;
3021 }
3022 }
3023 else if (argc == lead_num) {
3024 /* no kwarg */
3025 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
3026 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
3027
3028 if (klocals[kw_param->num] == INT2FIX(0)) {
3029 /* copy from default_values */
3030 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3031 cacheable_ci && vm_call_cacheable(ci, cc));
3032 }
3033
3034 return 0;
3035 }
3036 }
3037 }
3038
3039 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3040}
3041
3042static VALUE
3043vm_call_iseq_setup(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3044{
3045 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3046
3047 const struct rb_callcache *cc = calling->cc;
3048 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3049 const int param_size = ISEQ_BODY(iseq)->param.size;
3050 const int local_size = ISEQ_BODY(iseq)->local_table_size;
3051 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3052 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3053}
3054
3055static inline VALUE
3056vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
3057 int opt_pc, int param_size, int local_size)
3058{
3059 const struct rb_callinfo *ci = calling->cd->ci;
3060 const struct rb_callcache *cc = calling->cc;
3061
3062 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3063 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3064 }
3065 else {
3066 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3067 }
3068}
3069
3070static inline VALUE
3071vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const rb_callable_method_entry_t *me,
3072 int opt_pc, int param_size, int local_size)
3073{
3074 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3075 VALUE *argv = cfp->sp - calling->argc;
3076 VALUE *sp = argv + param_size;
3077 cfp->sp = argv - 1 /* recv */;
3078
3079 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3080 calling->block_handler, (VALUE)me,
3081 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3082 local_size - param_size,
3083 ISEQ_BODY(iseq)->stack_max);
3084 return Qundef;
3085}
3086
3087static inline VALUE
3088vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc)
3089{
3090 const struct rb_callcache *cc = calling->cc;
3091 unsigned int i;
3092 VALUE *argv = cfp->sp - calling->argc;
3093 const rb_callable_method_entry_t *me = vm_cc_cme(cc);
3094 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3095 VALUE *src_argv = argv;
3096 VALUE *sp_orig, *sp;
3097 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3098
3099 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3100 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3101 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3102 dst_captured->code.val = src_captured->code.val;
3103 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3104 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3105 }
3106 else {
3107 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3108 }
3109 }
3110
3111 vm_pop_frame(ec, cfp, cfp->ep);
3112 cfp = ec->cfp;
3113
3114 sp_orig = sp = cfp->sp;
3115
3116 /* push self */
3117 sp[0] = calling->recv;
3118 sp++;
3119
3120 /* copy arguments */
3121 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3122 *sp++ = src_argv[i];
3123 }
3124
3125 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3126 calling->recv, calling->block_handler, (VALUE)me,
3127 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3128 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3129 ISEQ_BODY(iseq)->stack_max);
3130
3131 cfp->sp = sp_orig;
3132
3133 return Qundef;
3134}
3135
3136static void
3137ractor_unsafe_check(void)
3138{
3139 if (!rb_ractor_main_p()) {
3140 rb_raise(rb_eRactorUnsafeError, "ractor unsafe method called from not main ractor");
3141 }
3142}
3143
3144static VALUE
3145call_cfunc_m2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3146{
3147 ractor_unsafe_check();
3148 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
3149 return (*f)(recv, rb_ary_new4(argc, argv));
3150}
3151
3152static VALUE
3153call_cfunc_m1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3154{
3155 ractor_unsafe_check();
3156 VALUE(*f)(int, const VALUE *, VALUE) = (VALUE(*)(int, const VALUE *, VALUE))func;
3157 return (*f)(argc, argv, recv);
3158}
3159
3160static VALUE
3161call_cfunc_0(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3162{
3163 ractor_unsafe_check();
3164 VALUE(*f)(VALUE) = (VALUE(*)(VALUE))func;
3165 return (*f)(recv);
3166}
3167
3168static VALUE
3169call_cfunc_1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3170{
3171 ractor_unsafe_check();
3172 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
3173 return (*f)(recv, argv[0]);
3174}
3175
3176static VALUE
3177call_cfunc_2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3178{
3179 ractor_unsafe_check();
3180 VALUE(*f)(VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE))func;
3181 return (*f)(recv, argv[0], argv[1]);
3182}
3183
3184static VALUE
3185call_cfunc_3(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3186{
3187 ractor_unsafe_check();
3188 VALUE(*f)(VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE))func;
3189 return (*f)(recv, argv[0], argv[1], argv[2]);
3190}
3191
3192static VALUE
3193call_cfunc_4(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3194{
3195 ractor_unsafe_check();
3196 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE))func;
3197 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3198}
3199
3200static VALUE
3201call_cfunc_5(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3202{
3203 ractor_unsafe_check();
3204 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
3205 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3206}
3207
3208static VALUE
3209call_cfunc_6(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3210{
3211 ractor_unsafe_check();
3213 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3214}
3215
3216static VALUE
3217call_cfunc_7(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3218{
3219 ractor_unsafe_check();
3221 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3222}
3223
3224static VALUE
3225call_cfunc_8(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3226{
3227 ractor_unsafe_check();
3229 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3230}
3231
3232static VALUE
3233call_cfunc_9(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3234{
3235 ractor_unsafe_check();
3237 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3238}
3239
3240static VALUE
3241call_cfunc_10(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3242{
3243 ractor_unsafe_check();
3245 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3246}
3247
3248static VALUE
3249call_cfunc_11(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3250{
3251 ractor_unsafe_check();
3253 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3254}
3255
3256static VALUE
3257call_cfunc_12(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3258{
3259 ractor_unsafe_check();
3261 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3262}
3263
3264static VALUE
3265call_cfunc_13(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3266{
3267 ractor_unsafe_check();
3269 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3270}
3271
3272static VALUE
3273call_cfunc_14(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3274{
3275 ractor_unsafe_check();
3277 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3278}
3279
3280static VALUE
3281call_cfunc_15(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3282{
3283 ractor_unsafe_check();
3285 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3286}
3287
3288static VALUE
3289ractor_safe_call_cfunc_m2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3290{
3291 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
3292 return (*f)(recv, rb_ary_new4(argc, argv));
3293}
3294
3295static VALUE
3296ractor_safe_call_cfunc_m1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3297{
3298 VALUE(*f)(int, const VALUE *, VALUE) = (VALUE(*)(int, const VALUE *, VALUE))func;
3299 return (*f)(argc, argv, recv);
3300}
3301
3302static VALUE
3303ractor_safe_call_cfunc_0(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3304{
3305 VALUE(*f)(VALUE) = (VALUE(*)(VALUE))func;
3306 return (*f)(recv);
3307}
3308
3309static VALUE
3310ractor_safe_call_cfunc_1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3311{
3312 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
3313 return (*f)(recv, argv[0]);
3314}
3315
3316static VALUE
3317ractor_safe_call_cfunc_2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3318{
3319 VALUE(*f)(VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE))func;
3320 return (*f)(recv, argv[0], argv[1]);
3321}
3322
3323static VALUE
3324ractor_safe_call_cfunc_3(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3325{
3326 VALUE(*f)(VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE))func;
3327 return (*f)(recv, argv[0], argv[1], argv[2]);
3328}
3329
3330static VALUE
3331ractor_safe_call_cfunc_4(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3332{
3333 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE))func;
3334 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3335}
3336
3337static VALUE
3338ractor_safe_call_cfunc_5(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3339{
3340 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
3341 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3342}
3343
3344static VALUE
3345ractor_safe_call_cfunc_6(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3346{
3348 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3349}
3350
3351static VALUE
3352ractor_safe_call_cfunc_7(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3353{
3355 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3356}
3357
3358static VALUE
3359ractor_safe_call_cfunc_8(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3360{
3362 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3363}
3364
3365static VALUE
3366ractor_safe_call_cfunc_9(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3367{
3369 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3370}
3371
3372static VALUE
3373ractor_safe_call_cfunc_10(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3374{
3376 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3377}
3378
3379static VALUE
3380ractor_safe_call_cfunc_11(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3381{
3383 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3384}
3385
3386static VALUE
3387ractor_safe_call_cfunc_12(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3388{
3390 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3391}
3392
3393static VALUE
3394ractor_safe_call_cfunc_13(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3395{
3397 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3398}
3399
3400static VALUE
3401ractor_safe_call_cfunc_14(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3402{
3404 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3405}
3406
3407static VALUE
3408ractor_safe_call_cfunc_15(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3409{
3411 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3412}
3413
3414static inline int
3415vm_cfp_consistent_p(rb_execution_context_t *ec, const rb_control_frame_t *reg_cfp)
3416{
3417 const int ov_flags = RAISED_STACKOVERFLOW;
3418 if (LIKELY(reg_cfp == ec->cfp + 1)) return TRUE;
3419 if (rb_ec_raised_p(ec, ov_flags)) {
3420 rb_ec_raised_reset(ec, ov_flags);
3421 return TRUE;
3422 }
3423 return FALSE;
3424}
3425
3426#define CHECK_CFP_CONSISTENCY(func) \
3427 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3428 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3429
3430static inline
3431const rb_method_cfunc_t *
3432vm_method_cfunc_entry(const rb_callable_method_entry_t *me)
3433{
3434#if VM_DEBUG_VERIFY_METHOD_CACHE
3435 switch (me->def->type) {
3436 case VM_METHOD_TYPE_CFUNC:
3437 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3438 break;
3439# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3440 METHOD_BUG(ISEQ);
3441 METHOD_BUG(ATTRSET);
3442 METHOD_BUG(IVAR);
3443 METHOD_BUG(BMETHOD);
3444 METHOD_BUG(ZSUPER);
3445 METHOD_BUG(UNDEF);
3446 METHOD_BUG(OPTIMIZED);
3447 METHOD_BUG(MISSING);
3448 METHOD_BUG(REFINED);
3449 METHOD_BUG(ALIAS);
3450# undef METHOD_BUG
3451 default:
3452 rb_bug("wrong method type: %d", me->def->type);
3453 }
3454#endif
3455 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3456}
3457
3458static inline VALUE
3459vm_call_cfunc_with_frame_(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling,
3460 int argc, VALUE *argv, VALUE *stack_bottom)
3461{
3462 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3463 const struct rb_callinfo *ci = calling->cd->ci;
3464 const struct rb_callcache *cc = calling->cc;
3465 VALUE val;
3466 const rb_callable_method_entry_t *me = vm_cc_cme(cc);
3467 const rb_method_cfunc_t *cfunc = vm_method_cfunc_entry(me);
3468
3469 VALUE recv = calling->recv;
3470 VALUE block_handler = calling->block_handler;
3471 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3472
3473 if (UNLIKELY(calling->kw_splat)) {
3474 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3475 }
3476
3477 VM_ASSERT(reg_cfp == ec->cfp);
3478
3479 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3480 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, recv, me->def->original_id, vm_ci_mid(ci), me->owner, Qundef);
3481
3482 vm_push_frame(ec, NULL, frame_type, recv,
3483 block_handler, (VALUE)me,
3484 0, ec->cfp->sp, 0, 0);
3485
3486 int len = cfunc->argc;
3487 if (len >= 0) rb_check_arity(argc, len, len);
3488
3489 reg_cfp->sp = stack_bottom;
3490 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3491
3492 CHECK_CFP_CONSISTENCY("vm_call_cfunc");
3493
3494 rb_vm_pop_frame(ec);
3495
3496 VM_ASSERT(ec->cfp->sp == stack_bottom);
3497
3498 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3499 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3500
3501 return val;
3502}
3503
3504// If true, cc->call needs to include `CALLER_SETUP_ARG` (i.e. can't be skipped in fastpath)
3505bool
3506rb_splat_or_kwargs_p(const struct rb_callinfo *restrict ci)
3507{
3508 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3509}
3510
3511static VALUE
3512vm_call_cfunc_with_frame(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3513{
3514 int argc = calling->argc;
3515 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3516 VALUE *argv = &stack_bottom[1];
3517
3518 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3519}
3520
3521static VALUE
3522vm_call_cfunc_other(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3523{
3524 const struct rb_callinfo *ci = calling->cd->ci;
3525 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3526
3527 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3528 VALUE argv_ary;
3529 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3530 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3531 int argc = RARRAY_LENINT(argv_ary);
3532 VALUE *argv = (VALUE *)RARRAY_CONST_PTR(argv_ary);
3533 VALUE *stack_bottom = reg_cfp->sp - 2;
3534
3535 VM_ASSERT(calling->argc == 1);
3536 VM_ASSERT(RB_TYPE_P(argv_ary, T_ARRAY));
3537 VM_ASSERT(RBASIC_CLASS(argv_ary) == 0); // hidden ary
3538
3539 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3540 }
3541 else {
3542 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat);
3543
3544 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3545 }
3546}
3547
3548static inline VALUE
3549vm_call_cfunc_array_argv(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, int stack_offset, int argc_offset)
3550{
3551 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3552 int argc = RARRAY_LENINT(argv_ary) - argc_offset;
3553
3554 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3555 return vm_call_cfunc_other(ec, reg_cfp, calling);
3556 }
3557
3558 VALUE *argv = (VALUE *)RARRAY_CONST_PTR(argv_ary);
3559 calling->kw_splat = 0;
3560 int i;
3561 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
3562 VALUE *sp = stack_bottom;
3563 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
3564 for(i = 0; i < argc; i++) {
3565 *++sp = argv[i];
3566 }
3567 reg_cfp->sp = sp+1;
3568
3569 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
3570}
3571
3572static inline VALUE
3573vm_call_cfunc_only_splat(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3574{
3575 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
3576 VALUE argv_ary = reg_cfp->sp[-1];
3577 int argc = RARRAY_LENINT(argv_ary);
3578 VALUE *argv = (VALUE *)RARRAY_CONST_PTR(argv_ary);
3579 VALUE last_hash;
3580 int argc_offset = 0;
3581
3582 if (UNLIKELY(argc > 0 &&
3583 RB_TYPE_P((last_hash = argv[argc-1]), T_HASH) &&
3584 (((struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
3585 if (!RHASH_EMPTY_P(last_hash)) {
3586 return vm_call_cfunc_other(ec, reg_cfp, calling);
3587 }
3588 argc_offset++;
3589 }
3590 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
3591}
3592
3593static inline VALUE
3594vm_call_cfunc_only_splat_kw(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3595{
3596 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
3597 VALUE keyword_hash = reg_cfp->sp[-1];
3598
3599 if (RB_TYPE_P(keyword_hash, T_HASH) && RHASH_EMPTY_P(keyword_hash)) {
3600 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
3601 }
3602
3603 return vm_call_cfunc_other(ec, reg_cfp, calling);
3604}
3605
3606static VALUE
3607vm_call_cfunc(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3608{
3609 const struct rb_callinfo *ci = calling->cd->ci;
3610 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3611
3612 if (IS_ARGS_SPLAT(ci)) {
3613 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
3614 // f(*a)
3615 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
3616 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
3617 }
3618 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
3619 // f(*a, **kw)
3620 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
3621 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
3622 }
3623 }
3624
3625 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
3626 return vm_call_cfunc_other(ec, reg_cfp, calling);
3627}
3628
3629static VALUE
3630vm_call_ivar(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3631{
3632 const struct rb_callcache *cc = calling->cc;
3633 RB_DEBUG_COUNTER_INC(ccf_ivar);
3634 cfp->sp -= 1;
3635 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE, Qnil);
3636 return ivar;
3637}
3638
3639static VALUE
3640vm_call_attrset_direct(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_callcache *cc, VALUE obj)
3641{
3642 RB_DEBUG_COUNTER_INC(ccf_attrset);
3643 VALUE val = *(cfp->sp - 1);
3644 cfp->sp -= 2;
3645 attr_index_t index = vm_cc_attr_index(cc);
3646 shape_id_t dest_shape_id = vm_cc_attr_index_dest_shape_id(cc);
3647 ID id = vm_cc_cme(cc)->def->body.attr.id;
3649 VALUE res = vm_setivar(obj, id, val, dest_shape_id, index);
3650 if (UNDEF_P(res)) {
3651 switch (BUILTIN_TYPE(obj)) {
3652 case T_OBJECT:
3653 case T_CLASS:
3654 case T_MODULE:
3655 break;
3656 default:
3657 {
3658 res = vm_setivar_default(obj, id, val, dest_shape_id, index);
3659 if (!UNDEF_P(res)) {
3660 return res;
3661 }
3662 }
3663 }
3664 res = vm_setivar_slowpath_attr(obj, id, val, cc);
3665 }
3666 return res;
3667}
3668
3669static VALUE
3670vm_call_attrset(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3671{
3672 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
3673}
3674
3675static inline VALUE
3676vm_call_bmethod_body(rb_execution_context_t *ec, struct rb_calling_info *calling, const VALUE *argv)
3677{
3678 rb_proc_t *proc;
3679 VALUE val;
3680 const struct rb_callcache *cc = calling->cc;
3681 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3682 VALUE procv = cme->def->body.bmethod.proc;
3683
3684 if (!RB_OBJ_SHAREABLE_P(procv) &&
3685 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
3686 rb_raise(rb_eRuntimeError, "defined with an un-shareable Proc in a different Ractor");
3687 }
3688
3689 /* control block frame */
3690 GetProcPtr(procv, proc);
3691 val = rb_vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
3692
3693 return val;
3694}
3695
3696static int vm_callee_setup_block_arg(rb_execution_context_t *ec, struct rb_calling_info *calling, const struct rb_callinfo *ci, const rb_iseq_t *iseq, VALUE *argv, const enum arg_setup_type arg_setup_type);
3697static VALUE invoke_bmethod(rb_execution_context_t *ec, const rb_iseq_t *iseq, VALUE self, const struct rb_captured_block *captured, const rb_callable_method_entry_t *me, VALUE type, int opt_pc);
3698
3699static VALUE
3700vm_call_iseq_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3701{
3702 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
3703
3704 const struct rb_callcache *cc = calling->cc;
3705 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3706 VALUE procv = cme->def->body.bmethod.proc;
3707
3708 if (!RB_OBJ_SHAREABLE_P(procv) &&
3709 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
3710 rb_raise(rb_eRuntimeError, "defined with an un-shareable Proc in a different Ractor");
3711 }
3712
3713 rb_proc_t *proc;
3714 GetProcPtr(procv, proc);
3715 const struct rb_block *block = &proc->block;
3716
3717 while (vm_block_type(block) == block_type_proc) {
3718 block = vm_proc_block(block->as.proc);
3719 }
3720 VM_ASSERT(vm_block_type(block) == block_type_iseq);
3721
3722 const struct rb_captured_block *captured = &block->as.captured;
3723 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
3724 VALUE * const argv = cfp->sp - calling->argc;
3725 const int arg_size = ISEQ_BODY(iseq)->param.size;
3726
3727 int opt_pc;
3728 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
3729 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
3730 }
3731 else {
3732 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
3733 }
3734
3735 cfp->sp = argv - 1; // -1 for the receiver
3736
3737 vm_push_frame(ec, iseq,
3738 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
3739 calling->recv,
3740 VM_GUARDED_PREV_EP(captured->ep),
3741 (VALUE)cme,
3742 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
3743 argv + arg_size,
3744 ISEQ_BODY(iseq)->local_table_size - arg_size,
3745 ISEQ_BODY(iseq)->stack_max);
3746
3747 return Qundef;
3748}
3749
3750static VALUE
3751vm_call_noniseq_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3752{
3753 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
3754
3755 VALUE *argv;
3756 int argc;
3757 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
3758 if (UNLIKELY(calling->heap_argv)) {
3759 argv = RARRAY_PTR(calling->heap_argv);
3760 cfp->sp -= 2;
3761 }
3762 else {
3763 argc = calling->argc;
3764 argv = ALLOCA_N(VALUE, argc);
3765 MEMCPY(argv, cfp->sp - argc, VALUE, argc);
3766 cfp->sp += - argc - 1;
3767 }
3768
3769 return vm_call_bmethod_body(ec, calling, argv);
3770}
3771
3772static VALUE
3773vm_call_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3774{
3775 RB_DEBUG_COUNTER_INC(ccf_bmethod);
3776
3777 const struct rb_callcache *cc = calling->cc;
3778 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3779 VALUE procv = cme->def->body.bmethod.proc;
3780 rb_proc_t *proc;
3781 GetProcPtr(procv, proc);
3782 const struct rb_block *block = &proc->block;
3783
3784 while (vm_block_type(block) == block_type_proc) {
3785 block = vm_proc_block(block->as.proc);
3786 }
3787 if (vm_block_type(block) == block_type_iseq) {
3788 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
3789 return vm_call_iseq_bmethod(ec, cfp, calling);
3790 }
3791
3792 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
3793 return vm_call_noniseq_bmethod(ec, cfp, calling);
3794}
3795
3796VALUE
3797rb_find_defined_class_by_owner(VALUE current_class, VALUE target_owner)
3798{
3799 VALUE klass = current_class;
3800
3801 /* for prepended Module, then start from cover class */
3802 if (RB_TYPE_P(klass, T_ICLASS) && FL_TEST(klass, RICLASS_IS_ORIGIN) &&
3803 RB_TYPE_P(RBASIC_CLASS(klass), T_CLASS)) {
3804 klass = RBASIC_CLASS(klass);
3805 }
3806
3807 while (RTEST(klass)) {
3808 VALUE owner = RB_TYPE_P(klass, T_ICLASS) ? RBASIC_CLASS(klass) : klass;
3809 if (owner == target_owner) {
3810 return klass;
3811 }
3812 klass = RCLASS_SUPER(klass);
3813 }
3814
3815 return current_class; /* maybe module function */
3816}
3817
3818static const rb_callable_method_entry_t *
3819aliased_callable_method_entry(const rb_callable_method_entry_t *me)
3820{
3821 const rb_method_entry_t *orig_me = me->def->body.alias.original_me;
3822 const rb_callable_method_entry_t *cme;
3823
3824 if (orig_me->defined_class == 0) {
3825 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
3826 VM_ASSERT(RB_TYPE_P(orig_me->owner, T_MODULE));
3827 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
3828
3829 if (me->def->reference_count == 1) {
3830 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
3831 }
3832 else {
3834 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
3835 rb_method_definition_set((rb_method_entry_t *)me, def, (void *)cme);
3836 }
3837 }
3838 else {
3839 cme = (const rb_callable_method_entry_t *)orig_me;
3840 }
3841
3842 VM_ASSERT(callable_method_entry_p(cme));
3843 return cme;
3844}
3845
3847rb_aliased_callable_method_entry(const rb_callable_method_entry_t *me)
3848{
3849 return aliased_callable_method_entry(me);
3850}
3851
3852static VALUE
3853vm_call_alias(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3854{
3855 calling->cc = &VM_CC_ON_STACK(Qundef,
3856 vm_call_general,
3857 {{0}},
3858 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
3859
3860 return vm_call_method_each_type(ec, cfp, calling);
3861}
3862
3863static enum method_missing_reason
3864ci_missing_reason(const struct rb_callinfo *ci)
3865{
3866 enum method_missing_reason stat = MISSING_NOENTRY;
3867 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
3868 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
3869 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
3870 return stat;
3871}
3872
3873static VALUE vm_call_method_missing(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
3874
3875static VALUE
3876vm_call_symbol(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
3877 struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE symbol, int flags)
3878{
3879 ASSUME(calling->argc >= 0);
3880
3881 enum method_missing_reason missing_reason = MISSING_NOENTRY;
3882 int argc = calling->argc;
3883 VALUE recv = calling->recv;
3884 VALUE klass = CLASS_OF(recv);
3885 ID mid = rb_check_id(&symbol);
3886 flags |= VM_CALL_OPT_SEND;
3887
3888 if (UNLIKELY(! mid)) {
3889 mid = idMethodMissing;
3890 missing_reason = ci_missing_reason(ci);
3891 ec->method_missing_reason = missing_reason;
3892
3893 VALUE argv_ary;
3894 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3895 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
3896 rb_ary_unshift(argv_ary, symbol);
3897
3898 /* Inadvertent symbol creation shall be forbidden, see [Feature #5112] */
3899 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
3900 VALUE exc = rb_make_no_method_exception(
3901 rb_eNoMethodError, 0, recv, RARRAY_LENINT(argv_ary), RARRAY_CONST_PTR(argv_ary), priv);
3902
3903 rb_exc_raise(exc);
3904 }
3905 rb_ary_unshift(argv_ary, rb_str_intern(symbol));
3906 }
3907 else {
3908 /* E.g. when argc == 2
3909 *
3910 * | | | | TOPN
3911 * | | +------+
3912 * | | +---> | arg1 | 0
3913 * +------+ | +------+
3914 * | arg1 | -+ +-> | arg0 | 1
3915 * +------+ | +------+
3916 * | arg0 | ---+ | sym | 2
3917 * +------+ +------+
3918 * | recv | | recv | 3
3919 * --+------+--------+------+------
3920 */
3921 int i = argc;
3922 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3923 INC_SP(1);
3924 MEMMOVE(&TOPN(i - 1), &TOPN(i), VALUE, i);
3925 argc = ++calling->argc;
3926
3927 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
3928 /* Inadvertent symbol creation shall be forbidden, see [Feature #5112] */
3929 TOPN(i) = symbol;
3930 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
3931 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
3932 VALUE exc = rb_make_no_method_exception(
3933 rb_eNoMethodError, 0, recv, argc, argv, priv);
3934
3935 rb_exc_raise(exc);
3936 }
3937 else {
3938 TOPN(i) = rb_str_intern(symbol);
3939 }
3940 }
3941 }
3942
3943 calling->cd = &(struct rb_call_data) {
3944 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
3945 .cc = NULL,
3946 };
3947 calling->cc = &VM_CC_ON_STACK(klass,
3948 vm_call_general,
3949 { .method_missing_reason = missing_reason },
3950 rb_callable_method_entry_with_refinements(klass, mid, NULL));
3951
3952 if (flags & VM_CALL_FCALL) {
3953 return vm_call_method(ec, reg_cfp, calling);
3954 }
3955
3956 const struct rb_callcache *cc = calling->cc;
3957 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
3958
3959 if (vm_cc_cme(cc) != NULL) {
3960 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
3961 case METHOD_VISI_PUBLIC: /* likely */
3962 return vm_call_method_each_type(ec, reg_cfp, calling);
3963 case METHOD_VISI_PRIVATE:
3964 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
3965 break;
3966 case METHOD_VISI_PROTECTED:
3967 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
3968 break;
3969 default:
3970 VM_UNREACHABLE(vm_call_method);
3971 }
3972 return vm_call_method_missing(ec, reg_cfp, calling);
3973 }
3974
3975 return vm_call_method_nome(ec, reg_cfp, calling);
3976}
3977
3978static VALUE
3979vm_call_opt_send0(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, int flags)
3980{
3981 const struct rb_callinfo *ci = calling->cd->ci;
3982 int i;
3983 VALUE sym;
3984
3985 i = calling->argc - 1;
3986
3987 if (calling->argc == 0) {
3988 rb_raise(rb_eArgError, "no method name given");
3989 }
3990
3991 sym = TOPN(i);
3992 /* E.g. when i == 2
3993 *
3994 * | | | | TOPN
3995 * +------+ | |
3996 * | arg1 | ---+ | | 0
3997 * +------+ | +------+
3998 * | arg0 | -+ +-> | arg1 | 1
3999 * +------+ | +------+
4000 * | sym | +---> | arg0 | 2
4001 * +------+ +------+
4002 * | recv | | recv | 3
4003 * --+------+--------+------+------
4004 */
4005 /* shift arguments */
4006 if (i > 0) {
4007 MEMMOVE(&TOPN(i), &TOPN(i-1), VALUE, i);
4008 }
4009 calling->argc -= 1;
4010 DEC_SP(1);
4011
4012 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4013}
4014
4015static VALUE
4016vm_call_opt_send_complex(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4017{
4018 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4019 const struct rb_callinfo *ci = calling->cd->ci;
4020 int flags = VM_CALL_FCALL;
4021 VALUE sym;
4022
4023 VALUE argv_ary;
4024 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4025 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4026 sym = rb_ary_shift(argv_ary);
4027 flags |= VM_CALL_ARGS_SPLAT;
4028 if (calling->kw_splat) {
4029 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4030 ((struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4031 calling->kw_splat = 0;
4032 }
4033 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4034 }
4035
4036 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4037 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4038}
4039
4040static VALUE
4041vm_call_opt_send_simple(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4042{
4043 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4044 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4045}
4046
4047static VALUE
4048vm_call_opt_send(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4049{
4050 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4051
4052 const struct rb_callinfo *ci = calling->cd->ci;
4053 int flags = vm_ci_flag(ci);
4054
4055 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
4056 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4057 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4058 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
4059 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4060 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4061 }
4062
4063 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4064 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4065}
4066
4067static VALUE
4068vm_call_method_missing_body(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling,
4069 const struct rb_callinfo *orig_ci, enum method_missing_reason reason)
4070{
4071 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4072
4073 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4074 unsigned int argc, flag;
4075
4076 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4077 argc = ++calling->argc;
4078
4079 /* shift arguments: m(a, b, c) #=> method_missing(:m, a, b, c) */
4080 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4081 vm_check_canary(ec, reg_cfp->sp);
4082 if (argc > 1) {
4083 MEMMOVE(argv+1, argv, VALUE, argc-1);
4084 }
4085 argv[0] = ID2SYM(vm_ci_mid(orig_ci));
4086 INC_SP(1);
4087
4088 ec->method_missing_reason = reason;
4089 calling->cd = &(struct rb_call_data) {
4090 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4091 .cc = NULL,
4092 };
4093 calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }},
4094 rb_callable_method_entry_without_refinements(CLASS_OF(calling->recv), idMethodMissing, NULL));
4095 return vm_call_method(ec, reg_cfp, calling);
4096}
4097
4098static VALUE
4099vm_call_method_missing(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4100{
4101 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4102}
4103
4104static const rb_callable_method_entry_t *refined_method_callable_without_refinement(const rb_callable_method_entry_t *me);
4105static VALUE
4106vm_call_zsuper(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, VALUE klass)
4107{
4108 klass = RCLASS_SUPER(klass);
4109
4110 const rb_callable_method_entry_t *cme = klass ? rb_callable_method_entry(klass, vm_ci_mid(calling->cd->ci)) : NULL;
4111 if (cme == NULL) {
4112 return vm_call_method_nome(ec, cfp, calling);
4113 }
4114 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4115 cme->def->body.refined.orig_me) {
4116 cme = refined_method_callable_without_refinement(cme);
4117 }
4118
4119 calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }}, cme);
4120
4121 return vm_call_method_each_type(ec, cfp, calling);
4122}
4123
4124static inline VALUE
4125find_refinement(VALUE refinements, VALUE klass)
4126{
4127 if (NIL_P(refinements)) {
4128 return Qnil;
4129 }
4130 return rb_hash_lookup(refinements, klass);
4131}
4132
4133PUREFUNC(static rb_control_frame_t * current_method_entry(const rb_execution_context_t *ec, rb_control_frame_t *cfp));
4134static rb_control_frame_t *
4135current_method_entry(const rb_execution_context_t *ec, rb_control_frame_t *cfp)
4136{
4137 rb_control_frame_t *top_cfp = cfp;
4138
4139 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
4140 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
4141
4142 do {
4143 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4144 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4145 /* TODO: orphan block */
4146 return top_cfp;
4147 }
4148 } while (cfp->iseq != local_iseq);
4149 }
4150 return cfp;
4151}
4152
4153static const rb_callable_method_entry_t *
4154refined_method_callable_without_refinement(const rb_callable_method_entry_t *me)
4155{
4156 const rb_method_entry_t *orig_me = me->def->body.refined.orig_me;
4157 const rb_callable_method_entry_t *cme;
4158
4159 if (orig_me->defined_class == 0) {
4160 cme = NULL;
4162 }
4163 else {
4164 cme = (const rb_callable_method_entry_t *)orig_me;
4165 }
4166
4167 VM_ASSERT(callable_method_entry_p(cme));
4168
4169 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4170 cme = NULL;
4171 }
4172
4173 return cme;
4174}
4175
4176static const rb_callable_method_entry_t *
4177search_refined_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
4178{
4179 ID mid = vm_ci_mid(calling->cd->ci);
4180 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4181 const struct rb_callcache * const cc = calling->cc;
4182 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
4183
4184 for (; cref; cref = CREF_NEXT(cref)) {
4185 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4186 if (NIL_P(refinement)) continue;
4187
4188 const rb_callable_method_entry_t *const ref_me =
4189 rb_callable_method_entry(refinement, mid);
4190
4191 if (ref_me) {
4192 if (vm_cc_call(cc) == vm_call_super_method) {
4193 const rb_control_frame_t *top_cfp = current_method_entry(ec, cfp);
4194 const rb_callable_method_entry_t *top_me = rb_vm_frame_method_entry(top_cfp);
4195 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4196 continue;
4197 }
4198 }
4199
4200 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4201 cme->def != ref_me->def) {
4202 cme = ref_me;
4203 }
4204 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4205 return cme;
4206 }
4207 }
4208 else {
4209 return NULL;
4210 }
4211 }
4212
4213 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4214 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4215 }
4216 else {
4217 VALUE klass = RCLASS_SUPER(vm_cc_cme(cc)->defined_class);
4218 const rb_callable_method_entry_t *cme = klass ? rb_callable_method_entry(klass, mid) : NULL;
4219 return cme;
4220 }
4221}
4222
4223static VALUE
4224vm_call_refined(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
4225{
4226 const rb_callable_method_entry_t *ref_cme = search_refined_method(ec, cfp, calling);
4227
4228 if (ref_cme) {
4229 if (calling->cd->cc) {
4230 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4231 RB_OBJ_WRITE(cfp->iseq, &calling->cd->cc, cc);
4232 return vm_call_method(ec, cfp, calling);
4233 }
4234 else {
4235 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }}, ref_cme);
4236 calling->cc= ref_cc;
4237 return vm_call_method(ec, cfp, calling);
4238 }
4239 }
4240 else {
4241 return vm_call_method_nome(ec, cfp, calling);
4242 }
4243}
4244
4245static inline VALUE vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_callinfo *ci, bool is_lambda, VALUE block_handler);
4246
4247NOINLINE(static VALUE
4248 vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4249 struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE block_handler));
4250
4251static VALUE
4252vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4253 struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE block_handler)
4254{
4255 int argc = calling->argc;
4256
4257 /* remove self */
4258 if (argc > 0) MEMMOVE(&TOPN(argc), &TOPN(argc-1), VALUE, argc);
4259 DEC_SP(1);
4260
4261 return vm_invoke_block(ec, reg_cfp, calling, ci, false, block_handler);
4262}
4263
4264static VALUE
4265vm_call_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4266{
4267 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4268
4269 const struct rb_callinfo *ci = calling->cd->ci;
4270 VALUE procval = calling->recv;
4271 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4272}
4273
4274static VALUE
4275vm_call_opt_block_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4276{
4277 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4278
4279 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4280 const struct rb_callinfo *ci = calling->cd->ci;
4281
4282 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4283 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4284 }
4285 else {
4286 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4287 calling->cc = rb_vm_search_method_slowpath(ci, CLASS_OF(calling->recv));
4288 return vm_call_general(ec, reg_cfp, calling);
4289 }
4290}
4291
4292static VALUE
4293vm_call_opt_struct_aref0(rb_execution_context_t *ec, struct rb_calling_info *calling)
4294{
4295 VALUE recv = calling->recv;
4296
4297 VM_ASSERT(RB_TYPE_P(recv, T_STRUCT));
4298 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4299 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4300
4301 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4302 return internal_RSTRUCT_GET(recv, off);
4303}
4304
4305static VALUE
4306vm_call_opt_struct_aref(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4307{
4308 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4309
4310 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4311 reg_cfp->sp -= 1;
4312 return ret;
4313}
4314
4315static VALUE
4316vm_call_opt_struct_aset0(rb_execution_context_t *ec, struct rb_calling_info *calling, VALUE val)
4317{
4318 VALUE recv = calling->recv;
4319
4320 VM_ASSERT(RB_TYPE_P(recv, T_STRUCT));
4321 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4322 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4323
4324 rb_check_frozen(recv);
4325
4326 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4327 internal_RSTRUCT_SET(recv, off, val);
4328
4329 return val;
4330}
4331
4332static VALUE
4333vm_call_opt_struct_aset(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4334{
4335 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4336
4337 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4338 reg_cfp->sp -= 2;
4339 return ret;
4340}
4341
4342NOINLINE(static VALUE vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
4343 const struct rb_callinfo *ci, const struct rb_callcache *cc));
4344
4345#define VM_CALL_METHOD_ATTR(var, func, nohook) \
4346 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
4347 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4348 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4349 var = func; \
4350 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4351 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4352 } \
4353 else { \
4354 nohook; \
4355 var = func; \
4356 }
4357
4358static VALUE
4359vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
4360 const struct rb_callinfo *ci, const struct rb_callcache *cc)
4361{
4362 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4363 case OPTIMIZED_METHOD_TYPE_SEND:
4364 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4365 return vm_call_opt_send(ec, cfp, calling);
4366 case OPTIMIZED_METHOD_TYPE_CALL:
4367 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4368 return vm_call_opt_call(ec, cfp, calling);
4369 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4370 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4371 return vm_call_opt_block_call(ec, cfp, calling);
4372 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4373 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4374 rb_check_arity(calling->argc, 0, 0);
4375
4376 VALUE v;
4377 VM_CALL_METHOD_ATTR(v,
4378 vm_call_opt_struct_aref(ec, cfp, calling),
4379 set_vm_cc_ivar(cc); \
4380 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4381 return v;
4382 }
4383 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4384 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4385 rb_check_arity(calling->argc, 1, 1);
4386
4387 VALUE v;
4388 VM_CALL_METHOD_ATTR(v,
4389 vm_call_opt_struct_aset(ec, cfp, calling),
4390 set_vm_cc_ivar(cc); \
4391 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4392 return v;
4393 }
4394 default:
4395 rb_bug("vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4396 }
4397}
4398
4399static VALUE
4400vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
4401{
4402 const struct rb_callinfo *ci = calling->cd->ci;
4403 const struct rb_callcache *cc = calling->cc;
4404 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
4405 VALUE v;
4406
4407 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4408
4409 switch (cme->def->type) {
4410 case VM_METHOD_TYPE_ISEQ:
4411 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4412 return vm_call_iseq_setup(ec, cfp, calling);
4413
4414 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4415 case VM_METHOD_TYPE_CFUNC:
4416 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4417 return vm_call_cfunc(ec, cfp, calling);
4418
4419 case VM_METHOD_TYPE_ATTRSET:
4420 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4421
4422 rb_check_arity(calling->argc, 1, 1);
4423
4424 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG);
4425
4426 if (vm_cc_markable(cc)) {
4427 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4428 VM_CALL_METHOD_ATTR(v,
4429 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4430 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4431 }
4432 else {
4433 cc = &((struct rb_callcache) {
4434 .flags = T_IMEMO |
4435 (imemo_callcache << FL_USHIFT) |
4436 VM_CALLCACHE_UNMARKABLE |
4437 VM_CALLCACHE_ON_STACK,
4438 .klass = cc->klass,
4439 .cme_ = cc->cme_,
4440 .call_ = cc->call_,
4441 .aux_ = {
4442 .attr = {
4443 .value = INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT,
4444 }
4445 },
4446 });
4447
4448 VM_CALL_METHOD_ATTR(v,
4449 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4450 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4451 }
4452 return v;
4453
4454 case VM_METHOD_TYPE_IVAR:
4455 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4456 rb_check_arity(calling->argc, 0, 0);
4457 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4458 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT);
4459 VM_CALL_METHOD_ATTR(v,
4460 vm_call_ivar(ec, cfp, calling),
4461 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4462 return v;
4463
4464 case VM_METHOD_TYPE_MISSING:
4465 vm_cc_method_missing_reason_set(cc, 0);
4466 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4467 return vm_call_method_missing(ec, cfp, calling);
4468
4469 case VM_METHOD_TYPE_BMETHOD:
4470 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4471 return vm_call_bmethod(ec, cfp, calling);
4472
4473 case VM_METHOD_TYPE_ALIAS:
4474 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4475 return vm_call_alias(ec, cfp, calling);
4476
4477 case VM_METHOD_TYPE_OPTIMIZED:
4478 return vm_call_optimized(ec, cfp, calling, ci, cc);
4479
4480 case VM_METHOD_TYPE_UNDEF:
4481 break;
4482
4483 case VM_METHOD_TYPE_ZSUPER:
4484 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4485
4486 case VM_METHOD_TYPE_REFINED:
4487 // CC_SET_FASTPATH(cc, vm_call_refined, TRUE);
4488 // should not set FASTPATH since vm_call_refined assumes cc->call is vm_call_super_method on invokesuper.
4489 return vm_call_refined(ec, cfp, calling);
4490 }
4491
4492 rb_bug("vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4493}
4494
4495NORETURN(static void vm_raise_method_missing(rb_execution_context_t *ec, int argc, const VALUE *argv, VALUE obj, int call_status));
4496
4497static VALUE
4498vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
4499{
4500 /* method missing */
4501 const struct rb_callinfo *ci = calling->cd->ci;
4502 const int stat = ci_missing_reason(ci);
4503
4504 if (vm_ci_mid(ci) == idMethodMissing) {
4505 if (UNLIKELY(calling->heap_argv)) {
4506 vm_raise_method_missing(ec, RARRAY_LENINT(calling->heap_argv), RARRAY_CONST_PTR(calling->heap_argv), calling->recv, stat);
4507 }
4508 else {
4509 rb_control_frame_t *reg_cfp = cfp;
4510 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4511 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4512 }
4513 }
4514 else {
4515 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4516 }
4517}
4518
4519/* Protected method calls and super invocations need to check that the receiver
4520 * (self for super) inherits the module on which the method is defined.
4521 * In the case of refinements, it should consider the original class not the
4522 * refinement.
4523 */
4524static VALUE
4525vm_defined_class_for_protected_call(const rb_callable_method_entry_t *me)
4526{
4527 VALUE defined_class = me->defined_class;
4528 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4529 return NIL_P(refined_class) ? defined_class : refined_class;
4530}
4531
4532static inline VALUE
4533vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
4534{
4535 const struct rb_callinfo *ci = calling->cd->ci;
4536 const struct rb_callcache *cc = calling->cc;
4537
4538 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4539
4540 if (vm_cc_cme(cc) != NULL) {
4541 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4542 case METHOD_VISI_PUBLIC: /* likely */
4543 return vm_call_method_each_type(ec, cfp, calling);
4544
4545 case METHOD_VISI_PRIVATE:
4546 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4547 enum method_missing_reason stat = MISSING_PRIVATE;
4548 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4549
4550 vm_cc_method_missing_reason_set(cc, stat);
4551 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4552 return vm_call_method_missing(ec, cfp, calling);
4553 }
4554 return vm_call_method_each_type(ec, cfp, calling);
4555
4556 case METHOD_VISI_PROTECTED:
4557 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4558 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4559 if (!rb_obj_is_kind_of(cfp->self, defined_class)) {
4560 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4561 return vm_call_method_missing(ec, cfp, calling);
4562 }
4563 else {
4564 /* caching method info to dummy cc */
4565 VM_ASSERT(vm_cc_cme(cc) != NULL);
4566 struct rb_callcache cc_on_stack = *cc;
4567 FL_SET_RAW((VALUE)&cc_on_stack, VM_CALLCACHE_UNMARKABLE);
4568 calling->cc = &cc_on_stack;
4569 return vm_call_method_each_type(ec, cfp, calling);
4570 }
4571 }
4572 return vm_call_method_each_type(ec, cfp, calling);
4573
4574 default:
4575 rb_bug("unreachable");
4576 }
4577 }
4578 else {
4579 return vm_call_method_nome(ec, cfp, calling);
4580 }
4581}
4582
4583static VALUE
4584vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4585{
4586 RB_DEBUG_COUNTER_INC(ccf_general);
4587 return vm_call_method(ec, reg_cfp, calling);
4588}
4589
4590void
4591rb_vm_cc_general(const struct rb_callcache *cc)
4592{
4593 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4594 VM_ASSERT(cc != vm_cc_empty());
4595
4596 *(vm_call_handler *)&cc->call_ = vm_call_general;
4597}
4598
4599static VALUE
4600vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4601{
4602 RB_DEBUG_COUNTER_INC(ccf_super_method);
4603
4604 // This line is introduced to make different from `vm_call_general` because some compilers (VC we found)
4605 // can merge the function and the address of the function becomes same.
4606 // The address of `vm_call_super_method` is used in `search_refined_method`, so it should be different.
4607 if (ec == NULL) rb_bug("unreachable");
4608
4609 /* this check is required to distinguish with other functions. */
4610 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4611 return vm_call_method(ec, reg_cfp, calling);
4612}
4613
4614/* super */
4615
4616static inline VALUE
4617vm_search_normal_superclass(VALUE klass)
4618{
4619 if (BUILTIN_TYPE(klass) == T_ICLASS &&
4620 RB_TYPE_P(RBASIC(klass)->klass, T_MODULE) &&
4621 FL_TEST_RAW(RBASIC(klass)->klass, RMODULE_IS_REFINEMENT)) {
4622 klass = RBASIC(klass)->klass;
4623 }
4624 klass = RCLASS_ORIGIN(klass);
4625 return RCLASS_SUPER(klass);
4626}
4627
4628NORETURN(static void vm_super_outside(void));
4629
4630static void
4631vm_super_outside(void)
4632{
4633 rb_raise(rb_eNoMethodError, "super called outside of method");
4634}
4635
4636static const struct rb_callcache *
4637empty_cc_for_super(void)
4638{
4639 return &vm_empty_cc_for_super;
4640}
4641
4642static const struct rb_callcache *
4643vm_search_super_method(const rb_control_frame_t *reg_cfp, struct rb_call_data *cd, VALUE recv)
4644{
4645 VALUE current_defined_class;
4646 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(reg_cfp);
4647
4648 if (!me) {
4649 vm_super_outside();
4650 }
4651
4652 current_defined_class = vm_defined_class_for_protected_call(me);
4653
4654 if (BUILTIN_TYPE(current_defined_class) != T_MODULE &&
4655 reg_cfp->iseq != method_entry_iseqptr(me) &&
4656 !rb_obj_is_kind_of(recv, current_defined_class)) {
4657 VALUE m = RB_TYPE_P(current_defined_class, T_ICLASS) ?
4658 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
4659
4660 if (m) { /* not bound UnboundMethod */
4661 rb_raise(rb_eTypeError,
4662 "self has wrong type to call super in this context: "
4663 "%"PRIsVALUE" (expected %"PRIsVALUE")",
4664 rb_obj_class(recv), m);
4665 }
4666 }
4667
4668 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
4669 rb_raise(rb_eRuntimeError,
4670 "implicit argument passing of super from method defined"
4671 " by define_method() is not supported."
4672 " Specify all arguments explicitly.");
4673 }
4674
4675 ID mid = me->def->original_id;
4676
4677 // update iseq. really? (TODO)
4678 cd->ci = vm_ci_new_runtime(mid,
4679 vm_ci_flag(cd->ci),
4680 vm_ci_argc(cd->ci),
4681 vm_ci_kwarg(cd->ci));
4682
4683 RB_OBJ_WRITTEN(reg_cfp->iseq, Qundef, cd->ci);
4684
4685 const struct rb_callcache *cc;
4686
4687 VALUE klass = vm_search_normal_superclass(me->defined_class);
4688
4689 if (!klass) {
4690 /* bound instance method of module */
4691 cc = vm_cc_new(klass, NULL, vm_call_method_missing, cc_type_super);
4692 RB_OBJ_WRITE(reg_cfp->iseq, &cd->cc, cc);
4693 }
4694 else {
4695 cc = vm_search_method_fastpath((VALUE)reg_cfp->iseq, cd, klass);
4696 const rb_callable_method_entry_t *cached_cme = vm_cc_cme(cc);
4697
4698 // define_method can cache for different method id
4699 if (cached_cme == NULL) {
4700 // empty_cc_for_super is not markable object
4701 cd->cc = empty_cc_for_super();
4702 }
4703 else if (cached_cme->called_id != mid) {
4704 const rb_callable_method_entry_t *cme = rb_callable_method_entry(klass, mid);
4705 if (cme) {
4706 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
4707 RB_OBJ_WRITE(reg_cfp->iseq, &cd->cc, cc);
4708 }
4709 else {
4710 cd->cc = cc = empty_cc_for_super();
4711 }
4712 }
4713 else {
4714 switch (cached_cme->def->type) {
4715 // vm_call_refined (search_refined_method) assumes cc->call is vm_call_super_method on invokesuper
4716 case VM_METHOD_TYPE_REFINED:
4717 // cc->klass is superclass of receiver class. Checking cc->klass is not enough to invalidate IVC for the receiver class.
4718 case VM_METHOD_TYPE_ATTRSET:
4719 case VM_METHOD_TYPE_IVAR:
4720 vm_cc_call_set(cc, vm_call_super_method); // invalidate fastpath
4721 break;
4722 default:
4723 break; // use fastpath
4724 }
4725 }
4726 }
4727
4728 VM_ASSERT((vm_cc_cme(cc), true));
4729
4730 return cc;
4731}
4732
4733/* yield */
4734
4735static inline int
4736block_proc_is_lambda(const VALUE procval)
4737{
4738 rb_proc_t *proc;
4739
4740 if (procval) {
4741 GetProcPtr(procval, proc);
4742 return proc->is_lambda;
4743 }
4744 else {
4745 return 0;
4746 }
4747}
4748
4749static VALUE
4750vm_yield_with_cfunc(rb_execution_context_t *ec,
4751 const struct rb_captured_block *captured,
4752 VALUE self, int argc, const VALUE *argv, int kw_splat, VALUE block_handler,
4754{
4755 int is_lambda = FALSE; /* TODO */
4756 VALUE val, arg, blockarg;
4757 int frame_flag;
4758 const struct vm_ifunc *ifunc = captured->code.ifunc;
4759
4760 if (is_lambda) {
4761 arg = rb_ary_new4(argc, argv);
4762 }
4763 else if (argc == 0) {
4764 arg = Qnil;
4765 }
4766 else {
4767 arg = argv[0];
4768 }
4769
4770 blockarg = rb_vm_bh_to_procval(ec, block_handler);
4771
4772 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
4773 if (kw_splat) {
4774 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
4775 }
4776
4777 vm_push_frame(ec, (const rb_iseq_t *)captured->code.ifunc,
4778 frame_flag,
4779 self,
4780 VM_GUARDED_PREV_EP(captured->ep),
4781 (VALUE)me,
4782 0, ec->cfp->sp, 0, 0);
4783 val = (*ifunc->func)(arg, (VALUE)ifunc->data, argc, argv, blockarg);
4784 rb_vm_pop_frame(ec);
4785
4786 return val;
4787}
4788
4789VALUE
4790rb_vm_yield_with_cfunc(rb_execution_context_t *ec, const struct rb_captured_block *captured, int argc, const VALUE *argv)
4791{
4792 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
4793}
4794
4795static VALUE
4796vm_yield_with_symbol(rb_execution_context_t *ec, VALUE symbol, int argc, const VALUE *argv, int kw_splat, VALUE block_handler)
4797{
4798 return rb_sym_proc_call(SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
4799}
4800
4801static inline int
4802vm_callee_setup_block_arg_arg0_splat(rb_control_frame_t *cfp, const rb_iseq_t *iseq, VALUE *argv, VALUE ary)
4803{
4804 int i;
4805 long len = RARRAY_LEN(ary);
4806
4807 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4808
4809 for (i=0; i<len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
4810 argv[i] = RARRAY_AREF(ary, i);
4811 }
4812
4813 return i;
4814}
4815
4816static inline VALUE
4817vm_callee_setup_block_arg_arg0_check(VALUE *argv)
4818{
4819 VALUE ary, arg0 = argv[0];
4820 ary = rb_check_array_type(arg0);
4821#if 0
4822 argv[0] = arg0;
4823#else
4824 VM_ASSERT(argv[0] == arg0);
4825#endif
4826 return ary;
4827}
4828
4829static int
4830vm_callee_setup_block_arg(rb_execution_context_t *ec, struct rb_calling_info *calling, const struct rb_callinfo *ci, const rb_iseq_t *iseq, VALUE *argv, const enum arg_setup_type arg_setup_type)
4831{
4832 if (rb_simple_iseq_p(iseq)) {
4833 rb_control_frame_t *cfp = ec->cfp;
4834 VALUE arg0;
4835
4836 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
4837
4838 if (arg_setup_type == arg_setup_block &&
4839 calling->argc == 1 &&
4840 ISEQ_BODY(iseq)->param.flags.has_lead &&
4841 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
4842 !NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
4843 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
4844 }
4845
4846 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
4847 if (arg_setup_type == arg_setup_block) {
4848 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
4849 int i;
4850 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4851 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] = Qnil;
4852 calling->argc = ISEQ_BODY(iseq)->param.lead_num; /* fill rest parameters */
4853 }
4854 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
4855 calling->argc = ISEQ_BODY(iseq)->param.lead_num; /* simply truncate arguments */
4856 }
4857 }
4858 else {
4859 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
4860 }
4861 }
4862
4863 return 0;
4864 }
4865 else {
4866 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
4867 }
4868}
4869
4870static int
4871vm_yield_setup_args(rb_execution_context_t *ec, const rb_iseq_t *iseq, const int argc, VALUE *argv, int flags, VALUE block_handler, enum arg_setup_type arg_setup_type)
4872{
4873 struct rb_calling_info calling_entry, *calling;
4874
4875 calling = &calling_entry;
4876 calling->argc = argc;
4877 calling->block_handler = block_handler;
4878 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
4879 calling->recv = Qundef;
4880 calling->heap_argv = 0;
4881 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
4882
4883 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
4884}
4885
4886/* ruby iseq -> ruby block */
4887
4888static VALUE
4889vm_invoke_iseq_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4890 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4891 bool is_lambda, VALUE block_handler)
4892{
4893 const struct rb_captured_block *captured = VM_BH_TO_ISEQ_BLOCK(block_handler);
4894 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4895 const int arg_size = ISEQ_BODY(iseq)->param.size;
4896 VALUE * const rsp = GET_SP() - calling->argc;
4897 VALUE * const argv = rsp;
4898 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
4899
4900 SET_SP(rsp);
4901
4902 vm_push_frame(ec, iseq,
4903 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
4904 captured->self,
4905 VM_GUARDED_PREV_EP(captured->ep), 0,
4906 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4907 rsp + arg_size,
4908 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
4909
4910 return Qundef;
4911}
4912
4913static VALUE
4914vm_invoke_symbol_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4915 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4916 MAYBE_UNUSED(bool is_lambda), VALUE block_handler)
4917{
4918 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
4919 int flags = vm_ci_flag(ci);
4920
4921 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
4922 ((calling->argc == 0) ||
4923 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4924 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4925 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
4926 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4927 flags = 0;
4928 if (UNLIKELY(calling->heap_argv)) {
4929#if VM_ARGC_STACK_MAX < 0
4930 if (RARRAY_LEN(calling->heap_argv) < 1) {
4931 rb_raise(rb_eArgError, "no receiver given");
4932 }
4933#endif
4934 calling->recv = rb_ary_shift(calling->heap_argv);
4935 // Modify stack to avoid cfp consistency error
4936 reg_cfp->sp++;
4937 reg_cfp->sp[-1] = reg_cfp->sp[-2];
4938 reg_cfp->sp[-2] = calling->recv;
4939 flags |= VM_CALL_ARGS_SPLAT;
4940 }
4941 else {
4942 if (calling->argc < 1) {
4943 rb_raise(rb_eArgError, "no receiver given");
4944 }
4945 calling->recv = TOPN(--calling->argc);
4946 }
4947 if (calling->kw_splat) {
4948 flags |= VM_CALL_KW_SPLAT;
4949 }
4950 }
4951 else {
4952 if (calling->argc < 1) {
4953 rb_raise(rb_eArgError, "no receiver given");
4954 }
4955 calling->recv = TOPN(--calling->argc);
4956 }
4957
4958 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
4959}
4960
4961static VALUE
4962vm_invoke_ifunc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4963 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4964 MAYBE_UNUSED(bool is_lambda), VALUE block_handler)
4965{
4966 VALUE val;
4967 int argc;
4968 const struct rb_captured_block *captured = VM_BH_TO_IFUNC_BLOCK(block_handler);
4969 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
4970 argc = calling->argc;
4971 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ? RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
4972 POPN(argc); /* TODO: should put before C/yield? */
4973 return val;
4974}
4975
4976static VALUE
4977vm_proc_to_block_handler(VALUE procval)
4978{
4979 const struct rb_block *block = vm_proc_block(procval);
4980
4981 switch (vm_block_type(block)) {
4982 case block_type_iseq:
4983 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
4984 case block_type_ifunc:
4985 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
4986 case block_type_symbol:
4987 return VM_BH_FROM_SYMBOL(block->as.symbol);
4988 case block_type_proc:
4989 return VM_BH_FROM_PROC(block->as.proc);
4990 }
4991 VM_UNREACHABLE(vm_yield_with_proc);
4992 return Qundef;
4993}
4994
4995static VALUE
4996vm_invoke_proc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4997 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4998 bool is_lambda, VALUE block_handler)
4999{
5000 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5001 VALUE proc = VM_BH_TO_PROC(block_handler);
5002 is_lambda = block_proc_is_lambda(proc);
5003 block_handler = vm_proc_to_block_handler(proc);
5004 }
5005
5006 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5007}
5008
5009static inline VALUE
5010vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
5011 struct rb_calling_info *calling, const struct rb_callinfo *ci,
5012 bool is_lambda, VALUE block_handler)
5013{
5014 VALUE (*func)(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
5015 struct rb_calling_info *calling, const struct rb_callinfo *ci,
5016 bool is_lambda, VALUE block_handler);
5017
5018 switch (vm_block_handler_type(block_handler)) {
5019 case block_handler_type_iseq: func = vm_invoke_iseq_block; break;
5020 case block_handler_type_ifunc: func = vm_invoke_ifunc_block; break;
5021 case block_handler_type_proc: func = vm_invoke_proc_block; break;
5022 case block_handler_type_symbol: func = vm_invoke_symbol_block; break;
5023 default: rb_bug("vm_invoke_block: unreachable");
5024 }
5025
5026 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5027}
5028
5029static VALUE
5030vm_make_proc_with_iseq(const rb_iseq_t *blockiseq)
5031{
5032 const rb_execution_context_t *ec = GET_EC();
5033 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
5034 struct rb_captured_block *captured;
5035
5036 if (cfp == 0) {
5037 rb_bug("vm_make_proc_with_iseq: unreachable");
5038 }
5039
5040 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5041 captured->code.iseq = blockiseq;
5042
5043 return rb_vm_make_proc(ec, captured, rb_cProc);
5044}
5045
5046static VALUE
5047vm_once_exec(VALUE iseq)
5048{
5049 VALUE proc = vm_make_proc_with_iseq((rb_iseq_t *)iseq);
5050 return rb_proc_call_with_block(proc, 0, 0, Qnil);
5051}
5052
5053static VALUE
5054vm_once_clear(VALUE data)
5055{
5056 union iseq_inline_storage_entry *is = (union iseq_inline_storage_entry *)data;
5057 is->once.running_thread = NULL;
5058 return Qnil;
5059}
5060
5061/* defined insn */
5062
5063static bool
5064check_respond_to_missing(VALUE obj, VALUE v)
5065{
5066 VALUE args[2];
5067 VALUE r;
5068
5069 args[0] = obj; args[1] = Qfalse;
5070 r = rb_check_funcall(v, idRespond_to_missing, 2, args);
5071 if (!UNDEF_P(r) && RTEST(r)) {
5072 return true;
5073 }
5074 else {
5075 return false;
5076 }
5077}
5078
5079static bool
5080vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type, VALUE obj, VALUE v)
5081{
5082 VALUE klass;
5083 enum defined_type type = (enum defined_type)op_type;
5084
5085 switch (type) {
5086 case DEFINED_IVAR:
5087 return rb_ivar_defined(GET_SELF(), SYM2ID(obj));
5088 break;
5089 case DEFINED_GVAR:
5090 return rb_gvar_defined(SYM2ID(obj));
5091 break;
5092 case DEFINED_CVAR: {
5093 const rb_cref_t *cref = vm_get_cref(GET_EP());
5094 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5095 return rb_cvar_defined(klass, SYM2ID(obj));
5096 break;
5097 }
5098 case DEFINED_CONST:
5099 case DEFINED_CONST_FROM: {
5100 bool allow_nil = type == DEFINED_CONST;
5101 klass = v;
5102 return vm_get_ev_const(ec, klass, SYM2ID(obj), allow_nil, true);
5103 break;
5104 }
5105 case DEFINED_FUNC:
5106 klass = CLASS_OF(v);
5107 return rb_ec_obj_respond_to(ec, v, SYM2ID(obj), TRUE);
5108 break;
5109 case DEFINED_METHOD:{
5110 VALUE klass = CLASS_OF(v);
5111 const rb_method_entry_t *me = rb_method_entry_with_refinements(klass, SYM2ID(obj), NULL);
5112
5113 if (me) {
5114 switch (METHOD_ENTRY_VISI(me)) {
5115 case METHOD_VISI_PRIVATE:
5116 break;
5117 case METHOD_VISI_PROTECTED:
5118 if (!rb_obj_is_kind_of(GET_SELF(), rb_class_real(me->defined_class))) {
5119 break;
5120 }
5121 case METHOD_VISI_PUBLIC:
5122 return true;
5123 break;
5124 default:
5125 rb_bug("vm_defined: unreachable: %u", (unsigned int)METHOD_ENTRY_VISI(me));
5126 }
5127 }
5128 else {
5129 return check_respond_to_missing(obj, v);
5130 }
5131 break;
5132 }
5133 case DEFINED_YIELD:
5134 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5135 return true;
5136 }
5137 break;
5138 case DEFINED_ZSUPER:
5139 {
5140 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(GET_CFP());
5141
5142 if (me) {
5143 VALUE klass = vm_search_normal_superclass(me->defined_class);
5144 if (!klass) return false;
5145
5146 ID id = me->def->original_id;
5147
5148 return rb_method_boundp(klass, id, 0);
5149 }
5150 }
5151 break;
5152 case DEFINED_REF:
5153 return RTEST(vm_backref_defined(ec, GET_LEP(), FIX2INT(obj)));
5154 default:
5155 rb_bug("unimplemented defined? type (VM)");
5156 break;
5157 }
5158
5159 return false;
5160}
5161
5162bool
5163rb_vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type, VALUE obj, VALUE v)
5164{
5165 return vm_defined(ec, reg_cfp, op_type, obj, v);
5166}
5167
5168static const VALUE *
5169vm_get_ep(const VALUE *const reg_ep, rb_num_t lv)
5170{
5171 rb_num_t i;
5172 const VALUE *ep = reg_ep;
5173 for (i = 0; i < lv; i++) {
5174 ep = GET_PREV_EP(ep);
5175 }
5176 return ep;
5177}
5178
5179static VALUE
5180vm_get_special_object(const VALUE *const reg_ep,
5181 enum vm_special_object_type type)
5182{
5183 switch (type) {
5184 case VM_SPECIAL_OBJECT_VMCORE:
5185 return rb_mRubyVMFrozenCore;
5186 case VM_SPECIAL_OBJECT_CBASE:
5187 return vm_get_cbase(reg_ep);
5188 case VM_SPECIAL_OBJECT_CONST_BASE:
5189 return vm_get_const_base(reg_ep);
5190 default:
5191 rb_bug("putspecialobject insn: unknown value_type %d", type);
5192 }
5193}
5194
5195static VALUE
5196vm_concat_array(VALUE ary1, VALUE ary2st)
5197{
5198 const VALUE ary2 = ary2st;
5199 VALUE tmp1 = rb_check_to_array(ary1);
5200 VALUE tmp2 = rb_check_to_array(ary2);
5201
5202 if (NIL_P(tmp1)) {
5203 tmp1 = rb_ary_new3(1, ary1);
5204 }
5205
5206 if (NIL_P(tmp2)) {
5207 tmp2 = rb_ary_new3(1, ary2);
5208 }
5209
5210 if (tmp1 == ary1) {
5211 tmp1 = rb_ary_dup(ary1);
5212 }
5213 return rb_ary_concat(tmp1, tmp2);
5214}
5215
5216// YJIT implementation is using the C function
5217// and needs to call a non-static function
5218VALUE
5219rb_vm_concat_array(VALUE ary1, VALUE ary2st)
5220{
5221 return vm_concat_array(ary1, ary2st);
5222}
5223
5224static VALUE
5225vm_splat_array(VALUE flag, VALUE ary)
5226{
5227 VALUE tmp = rb_check_to_array(ary);
5228 if (NIL_P(tmp)) {
5229 return rb_ary_new3(1, ary);
5230 }
5231 else if (RTEST(flag)) {
5232 return rb_ary_dup(tmp);
5233 }
5234 else {
5235 return tmp;
5236 }
5237}
5238
5239// YJIT implementation is using the C function
5240// and needs to call a non-static function
5241VALUE
5242rb_vm_splat_array(VALUE flag, VALUE ary)
5243{
5244 return vm_splat_array(flag, ary);
5245}
5246
5247static VALUE
5248vm_check_match(rb_execution_context_t *ec, VALUE target, VALUE pattern, rb_num_t flag)
5249{
5250 enum vm_check_match_type type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5251
5252 if (flag & VM_CHECKMATCH_ARRAY) {
5253 long i;
5254 const long n = RARRAY_LEN(pattern);
5255
5256 for (i = 0; i < n; i++) {
5257 VALUE v = RARRAY_AREF(pattern, i);
5258 VALUE c = check_match(ec, v, target, type);
5259
5260 if (RTEST(c)) {
5261 return c;
5262 }
5263 }
5264 return Qfalse;
5265 }
5266 else {
5267 return check_match(ec, pattern, target, type);
5268 }
5269}
5270
5271VALUE
5272rb_vm_check_match(rb_execution_context_t *ec, VALUE target, VALUE pattern, rb_num_t flag)
5273{
5274 return vm_check_match(ec, target, pattern, flag);
5275}
5276
5277static VALUE
5278vm_check_keyword(lindex_t bits, lindex_t idx, const VALUE *ep)
5279{
5280 const VALUE kw_bits = *(ep - bits);
5281
5282 if (FIXNUM_P(kw_bits)) {
5283 unsigned int b = (unsigned int)FIX2ULONG(kw_bits);
5284 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5285 return Qfalse;
5286 }
5287 else {
5288 VM_ASSERT(RB_TYPE_P(kw_bits, T_HASH));
5289 if (rb_hash_has_key(kw_bits, INT2FIX(idx))) return Qfalse;
5290 }
5291 return Qtrue;
5292}
5293
5294static void
5295vm_dtrace(rb_event_flag_t flag, rb_execution_context_t *ec)
5296{
5297 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5298 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5299 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5300 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5301
5302 switch (flag) {
5303 case RUBY_EVENT_CALL:
5304 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5305 return;
5306 case RUBY_EVENT_C_CALL:
5307 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5308 return;
5309 case RUBY_EVENT_RETURN:
5310 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5311 return;
5313 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5314 return;
5315 }
5316 }
5317}
5318
5319static VALUE
5320vm_const_get_under(ID id, rb_num_t flags, VALUE cbase)
5321{
5322 if (!rb_const_defined_at(cbase, id)) {
5323 return 0;
5324 }
5325 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5326 return rb_public_const_get_at(cbase, id);
5327 }
5328 else {
5329 return rb_const_get_at(cbase, id);
5330 }
5331}
5332
5333static VALUE
5334vm_check_if_class(ID id, rb_num_t flags, VALUE super, VALUE klass)
5335{
5336 if (!RB_TYPE_P(klass, T_CLASS)) {
5337 return 0;
5338 }
5339 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5340 VALUE tmp = rb_class_real(RCLASS_SUPER(klass));
5341
5342 if (tmp != super) {
5343 rb_raise(rb_eTypeError,
5344 "superclass mismatch for class %"PRIsVALUE"",
5345 rb_id2str(id));
5346 }
5347 else {
5348 return klass;
5349 }
5350 }
5351 else {
5352 return klass;
5353 }
5354}
5355
5356static VALUE
5357vm_check_if_module(ID id, VALUE mod)
5358{
5359 if (!RB_TYPE_P(mod, T_MODULE)) {
5360 return 0;
5361 }
5362 else {
5363 return mod;
5364 }
5365}
5366
5367static VALUE
5368declare_under(ID id, VALUE cbase, VALUE c)
5369{
5370 rb_set_class_path_string(c, cbase, rb_id2str(id));
5371 rb_const_set(cbase, id, c);
5372 return c;
5373}
5374
5375static VALUE
5376vm_declare_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
5377{
5378 /* new class declaration */
5379 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
5380 VALUE c = declare_under(id, cbase, rb_define_class_id(id, s));
5382 rb_class_inherited(s, c);
5383 return c;
5384}
5385
5386static VALUE
5387vm_declare_module(ID id, VALUE cbase)
5388{
5389 /* new module declaration */
5390 return declare_under(id, cbase, rb_module_new());
5391}
5392
5393NORETURN(static void unmatched_redefinition(const char *type, VALUE cbase, ID id, VALUE old));
5394static void
5395unmatched_redefinition(const char *type, VALUE cbase, ID id, VALUE old)
5396{
5397 VALUE name = rb_id2str(id);
5398 VALUE message = rb_sprintf("%"PRIsVALUE" is not a %s",
5399 name, type);
5400 VALUE location = rb_const_source_location_at(cbase, id);
5401 if (!NIL_P(location)) {
5402 rb_str_catf(message, "\n%"PRIsVALUE":%"PRIsVALUE":"
5403 " previous definition of %"PRIsVALUE" was here",
5404 rb_ary_entry(location, 0), rb_ary_entry(location, 1), name);
5405 }
5406 rb_exc_raise(rb_exc_new_str(rb_eTypeError, message));
5407}
5408
5409static VALUE
5410vm_define_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
5411{
5412 VALUE klass;
5413
5414 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !RB_TYPE_P(super, T_CLASS)) {
5415 rb_raise(rb_eTypeError,
5416 "superclass must be an instance of Class (given an instance of %"PRIsVALUE")",
5417 rb_obj_class(super));
5418 }
5419
5420 vm_check_if_namespace(cbase);
5421
5422 /* find klass */
5423 rb_autoload_load(cbase, id);
5424 if ((klass = vm_const_get_under(id, flags, cbase)) != 0) {
5425 if (!vm_check_if_class(id, flags, super, klass))
5426 unmatched_redefinition("class", cbase, id, klass);
5427 return klass;
5428 }
5429 else {
5430 return vm_declare_class(id, flags, cbase, super);
5431 }
5432}
5433
5434static VALUE
5435vm_define_module(ID id, rb_num_t flags, VALUE cbase)
5436{
5437 VALUE mod;
5438
5439 vm_check_if_namespace(cbase);
5440 if ((mod = vm_const_get_under(id, flags, cbase)) != 0) {
5441 if (!vm_check_if_module(id, mod))
5442 unmatched_redefinition("module", cbase, id, mod);
5443 return mod;
5444 }
5445 else {
5446 return vm_declare_module(id, cbase);
5447 }
5448}
5449
5450static VALUE
5451vm_find_or_create_class_by_id(ID id,
5452 rb_num_t flags,
5453 VALUE cbase,
5454 VALUE super)
5455{
5456 rb_vm_defineclass_type_t type = VM_DEFINECLASS_TYPE(flags);
5457
5458 switch (type) {
5459 case VM_DEFINECLASS_TYPE_CLASS:
5460 /* classdef returns class scope value */
5461 return vm_define_class(id, flags, cbase, super);
5462
5463 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5464 /* classdef returns class scope value */
5465 return rb_singleton_class(cbase);
5466
5467 case VM_DEFINECLASS_TYPE_MODULE:
5468 /* classdef returns class scope value */
5469 return vm_define_module(id, flags, cbase);
5470
5471 default:
5472 rb_bug("unknown defineclass type: %d", (int)type);
5473 }
5474}
5475
5476static rb_method_visibility_t
5477vm_scope_visibility_get(const rb_execution_context_t *ec)
5478{
5479 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
5480
5481 if (!vm_env_cref_by_cref(cfp->ep)) {
5482 return METHOD_VISI_PUBLIC;
5483 }
5484 else {
5485 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
5486 }
5487}
5488
5489static int
5490vm_scope_module_func_check(const rb_execution_context_t *ec)
5491{
5492 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
5493
5494 if (!vm_env_cref_by_cref(cfp->ep)) {
5495 return FALSE;
5496 }
5497 else {
5498 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
5499 }
5500}
5501
5502static void
5503vm_define_method(const rb_execution_context_t *ec, VALUE obj, ID id, VALUE iseqval, int is_singleton)
5504{
5505 VALUE klass;
5506 rb_method_visibility_t visi;
5507 rb_cref_t *cref = vm_ec_cref(ec);
5508
5509 if (is_singleton) {
5510 klass = rb_singleton_class(obj); /* class and frozen checked in this API */
5511 visi = METHOD_VISI_PUBLIC;
5512 }
5513 else {
5514 klass = CREF_CLASS_FOR_DEFINITION(cref);
5515 visi = vm_scope_visibility_get(ec);
5516 }
5517
5518 if (NIL_P(klass)) {
5519 rb_raise(rb_eTypeError, "no class/module to add method");
5520 }
5521
5522 rb_add_method_iseq(klass, id, (const rb_iseq_t *)iseqval, cref, visi);
5523 // Set max_iv_count on klasses based on number of ivar sets that are in the initialize method
5524 if (id == idInitialize && klass != rb_cObject && RB_TYPE_P(klass, T_CLASS) && (rb_get_alloc_func(klass) == rb_class_allocate_instance)) {
5525
5526 RCLASS_EXT(klass)->max_iv_count = rb_estimate_iv_count(klass, (const rb_iseq_t *)iseqval);
5527 }
5528
5529 if (!is_singleton && vm_scope_module_func_check(ec)) {
5530 klass = rb_singleton_class(klass);
5531 rb_add_method_iseq(klass, id, (const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5532 }
5533}
5534
5535static VALUE
5536vm_invokeblock_i(struct rb_execution_context_struct *ec,
5537 struct rb_control_frame_struct *reg_cfp,
5538 struct rb_calling_info *calling)
5539{
5540 const struct rb_callinfo *ci = calling->cd->ci;
5541 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5542
5543 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5544 rb_vm_localjump_error("no block given (yield)", Qnil, 0);
5545 }
5546 else {
5547 return vm_invoke_block(ec, GET_CFP(), calling, ci, false, block_handler);
5548 }
5549}
5550
5551enum method_explorer_type {
5552 mexp_search_method,
5553 mexp_search_invokeblock,
5554 mexp_search_super,
5555};
5556
5557static inline VALUE
5558vm_sendish(
5559 struct rb_execution_context_struct *ec,
5560 struct rb_control_frame_struct *reg_cfp,
5561 struct rb_call_data *cd,
5562 VALUE block_handler,
5563 enum method_explorer_type method_explorer
5564) {
5565 VALUE val = Qundef;
5566 const struct rb_callinfo *ci = cd->ci;
5567 const struct rb_callcache *cc;
5568 int argc = vm_ci_argc(ci);
5569 VALUE recv = TOPN(argc);
5570 struct rb_calling_info calling = {
5571 .block_handler = block_handler,
5572 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5573 .recv = recv,
5574 .argc = argc,
5575 .cd = cd,
5576 };
5577
5578 switch (method_explorer) {
5579 case mexp_search_method:
5580 calling.cc = cc = vm_search_method_fastpath((VALUE)reg_cfp->iseq, cd, CLASS_OF(recv));
5581 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5582 break;
5583 case mexp_search_super:
5584 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5585 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5586 break;
5587 case mexp_search_invokeblock:
5588 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
5589 break;
5590 }
5591 return val;
5592}
5593
5594VALUE
5595rb_vm_send(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd, ISEQ blockiseq)
5596{
5597 stack_check(ec);
5598 VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq, false);
5599 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
5600 VM_EXEC(ec, val);
5601 return val;
5602}
5603
5604VALUE
5605rb_vm_opt_send_without_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd)
5606{
5607 stack_check(ec);
5608 VALUE bh = VM_BLOCK_HANDLER_NONE;
5609 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
5610 VM_EXEC(ec, val);
5611 return val;
5612}
5613
5614VALUE
5615rb_vm_invokesuper(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd, ISEQ blockiseq)
5616{
5617 stack_check(ec);
5618 VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq, true);
5619 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
5620 VM_EXEC(ec, val);
5621 return val;
5622}
5623
5624VALUE
5625rb_vm_invokeblock(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd)
5626{
5627 stack_check(ec);
5628 VALUE bh = VM_BLOCK_HANDLER_NONE;
5629 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
5630 VM_EXEC(ec, val);
5631 return val;
5632}
5633
5634/* object.c */
5635VALUE rb_nil_to_s(VALUE);
5636VALUE rb_true_to_s(VALUE);
5637VALUE rb_false_to_s(VALUE);
5638/* numeric.c */
5639VALUE rb_int_to_s(int argc, VALUE *argv, VALUE x);
5640VALUE rb_fix_to_s(VALUE);
5641/* variable.c */
5642VALUE rb_mod_to_s(VALUE);
5644
5645static VALUE
5646vm_objtostring(const rb_iseq_t *iseq, VALUE recv, CALL_DATA cd)
5647{
5648 int type = TYPE(recv);
5649 if (type == T_STRING) {
5650 return recv;
5651 }
5652
5653 const struct rb_callcache *cc = vm_search_method((VALUE)iseq, cd, recv);
5654
5655 switch (type) {
5656 case T_SYMBOL:
5657 if (check_cfunc(vm_cc_cme(cc), rb_sym_to_s)) {
5658 // rb_sym_to_s() allocates a mutable string, but since we are only
5659 // going to use this string for interpolation, it's fine to use the
5660 // frozen string.
5661 return rb_sym2str(recv);
5662 }
5663 break;
5664 case T_MODULE:
5665 case T_CLASS:
5666 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
5667 // rb_mod_to_s() allocates a mutable string, but since we are only
5668 // going to use this string for interpolation, it's fine to use the
5669 // frozen string.
5670 VALUE val = rb_mod_name(recv);
5671 if (NIL_P(val)) {
5672 val = rb_mod_to_s(recv);
5673 }
5674 return val;
5675 }
5676 break;
5677 case T_NIL:
5678 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
5679 return rb_nil_to_s(recv);
5680 }
5681 break;
5682 case T_TRUE:
5683 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
5684 return rb_true_to_s(recv);
5685 }
5686 break;
5687 case T_FALSE:
5688 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
5689 return rb_false_to_s(recv);
5690 }
5691 break;
5692 case T_FIXNUM:
5693 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
5694 return rb_fix_to_s(recv);
5695 }
5696 break;
5697 }
5698 return Qundef;
5699}
5700
5701static VALUE
5702vm_opt_str_freeze(VALUE str, int bop, ID id)
5703{
5704 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
5705 return str;
5706 }
5707 else {
5708 return Qundef;
5709 }
5710}
5711
5712/* this macro is mandatory to use OPTIMIZED_CMP. What a design! */
5713#define id_cmp idCmp
5714
5715static VALUE
5716vm_opt_newarray_max(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5717{
5718 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
5719 if (num == 0) {
5720 return Qnil;
5721 }
5722 else {
5723 VALUE result = *ptr;
5724 rb_snum_t i = num - 1;
5725 while (i-- > 0) {
5726 const VALUE v = *++ptr;
5727 if (OPTIMIZED_CMP(v, result) > 0) {
5728 result = v;
5729 }
5730 }
5731 return result;
5732 }
5733 }
5734 else {
5735 return rb_vm_call_with_refinements(ec, rb_ary_new4(num, ptr), idMax, 0, NULL, RB_NO_KEYWORDS);
5736 }
5737}
5738
5739VALUE
5740rb_vm_opt_newarray_max(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5741{
5742 return vm_opt_newarray_max(ec, num, ptr);
5743}
5744
5745static VALUE
5746vm_opt_newarray_min(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5747{
5748 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
5749 if (num == 0) {
5750 return Qnil;
5751 }
5752 else {
5753 VALUE result = *ptr;
5754 rb_snum_t i = num - 1;
5755 while (i-- > 0) {
5756 const VALUE v = *++ptr;
5757 if (OPTIMIZED_CMP(v, result) < 0) {
5758 result = v;
5759 }
5760 }
5761 return result;
5762 }
5763 }
5764 else {
5765 return rb_vm_call_with_refinements(ec, rb_ary_new4(num, ptr), idMin, 0, NULL, RB_NO_KEYWORDS);
5766 }
5767}
5768
5769VALUE
5770rb_vm_opt_newarray_min(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5771{
5772 return vm_opt_newarray_min(ec, num, ptr);
5773}
5774
5775static VALUE
5776vm_opt_newarray_hash(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5777{
5778 // If Array#hash is _not_ monkeypatched, use the optimized call
5779 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
5780 return rb_ary_hash_values(num, ptr);
5781 }
5782 else {
5783 return rb_vm_call_with_refinements(ec, rb_ary_new4(num, ptr), idHash, 0, NULL, RB_NO_KEYWORDS);
5784 }
5785}
5786
5787VALUE
5788rb_vm_opt_newarray_hash(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5789{
5790 return vm_opt_newarray_hash(ec, num, ptr);
5791}
5792
5793#undef id_cmp
5794
5795#define IMEMO_CONST_CACHE_SHAREABLE IMEMO_FL_USER0
5796
5797static void
5798vm_track_constant_cache(ID id, void *ic)
5799{
5800 struct rb_id_table *const_cache = GET_VM()->constant_cache;
5801 VALUE lookup_result;
5802 st_table *ics;
5803
5804 if (rb_id_table_lookup(const_cache, id, &lookup_result)) {
5805 ics = (st_table *)lookup_result;
5806 }
5807 else {
5808 ics = st_init_numtable();
5809 rb_id_table_insert(const_cache, id, (VALUE)ics);
5810 }
5811
5812 st_insert(ics, (st_data_t) ic, (st_data_t) Qtrue);
5813}
5814
5815static void
5816vm_ic_track_const_chain(rb_control_frame_t *cfp, IC ic, const ID *segments)
5817{
5818 RB_VM_LOCK_ENTER();
5819
5820 for (int i = 0; segments[i]; i++) {
5821 ID id = segments[i];
5822 if (id == idNULL) continue;
5823 vm_track_constant_cache(id, ic);
5824 }
5825
5826 RB_VM_LOCK_LEAVE();
5827}
5828
5829// For RJIT inlining
5830static inline bool
5831vm_inlined_ic_hit_p(VALUE flags, VALUE value, const rb_cref_t *ic_cref, const VALUE *reg_ep)
5832{
5833 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
5834 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
5835
5836 return (ic_cref == NULL || // no need to check CREF
5837 ic_cref == vm_get_cref(reg_ep));
5838 }
5839 return false;
5840}
5841
5842static bool
5843vm_ic_hit_p(const struct iseq_inline_constant_cache_entry *ice, const VALUE *reg_ep)
5844{
5845 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
5846 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
5847}
5848
5849// YJIT needs this function to never allocate and never raise
5850bool
5851rb_vm_ic_hit_p(IC ic, const VALUE *reg_ep)
5852{
5853 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
5854}
5855
5856static void
5857vm_ic_update(const rb_iseq_t *iseq, IC ic, VALUE val, const VALUE *reg_ep, const VALUE *pc)
5858{
5859 if (ruby_vm_const_missing_count > 0) {
5860 ruby_vm_const_missing_count = 0;
5861 ic->entry = NULL;
5862 return;
5863 }
5864
5865 struct iseq_inline_constant_cache_entry *ice = (struct iseq_inline_constant_cache_entry *)rb_imemo_new(imemo_constcache, 0, 0, 0, 0);
5866 RB_OBJ_WRITE(ice, &ice->value, val);
5867 ice->ic_cref = vm_get_const_key_cref(reg_ep);
5868 if (rb_ractor_shareable_p(val)) ice->flags |= IMEMO_CONST_CACHE_SHAREABLE;
5869 RB_OBJ_WRITE(iseq, &ic->entry, ice);
5870
5871 RUBY_ASSERT(pc >= ISEQ_BODY(iseq)->iseq_encoded);
5872 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
5873 rb_yjit_constant_ic_update(iseq, ic, pos);
5874 rb_rjit_constant_ic_update(iseq, ic, pos);
5875}
5876
5877VALUE
5878rb_vm_opt_getconstant_path(rb_execution_context_t *ec, rb_control_frame_t *const reg_cfp, IC ic)
5879{
5880 VALUE val;
5881 const ID *segments = ic->segments;
5882 struct iseq_inline_constant_cache_entry *ice = ic->entry;
5883 if (ice && vm_ic_hit_p(ice, GET_EP())) {
5884 val = ice->value;
5885
5886 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
5887 } else {
5888 ruby_vm_constant_cache_misses++;
5889 val = vm_get_ev_const_chain(ec, segments);
5890 vm_ic_track_const_chain(GET_CFP(), ic, segments);
5891 // Undo the PC increment to get the address to this instruction
5892 // INSN_ATTR(width) == 2
5893 vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
5894 }
5895 return val;
5896}
5897
5898static VALUE
5899vm_once_dispatch(rb_execution_context_t *ec, ISEQ iseq, ISE is)
5900{
5901 rb_thread_t *th = rb_ec_thread_ptr(ec);
5902 rb_thread_t *const RUNNING_THREAD_ONCE_DONE = (rb_thread_t *)(0x1);
5903
5904 again:
5905 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
5906 return is->once.value;
5907 }
5908 else if (is->once.running_thread == NULL) {
5909 VALUE val;
5910 is->once.running_thread = th;
5911 val = rb_ensure(vm_once_exec, (VALUE)iseq, vm_once_clear, (VALUE)is);
5912 RB_OBJ_WRITE(ec->cfp->iseq, &is->once.value, val);
5913 /* is->once.running_thread is cleared by vm_once_clear() */
5914 is->once.running_thread = RUNNING_THREAD_ONCE_DONE; /* success */
5915 return val;
5916 }
5917 else if (is->once.running_thread == th) {
5918 /* recursive once */
5919 return vm_once_exec((VALUE)iseq);
5920 }
5921 else {
5922 /* waiting for finish */
5923 RUBY_VM_CHECK_INTS(ec);
5925 goto again;
5926 }
5927}
5928
5929static OFFSET
5930vm_case_dispatch(CDHASH hash, OFFSET else_offset, VALUE key)
5931{
5932 switch (OBJ_BUILTIN_TYPE(key)) {
5933 case -1:
5934 case T_FLOAT:
5935 case T_SYMBOL:
5936 case T_BIGNUM:
5937 case T_STRING:
5938 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
5939 SYMBOL_REDEFINED_OP_FLAG |
5940 INTEGER_REDEFINED_OP_FLAG |
5941 FLOAT_REDEFINED_OP_FLAG |
5942 NIL_REDEFINED_OP_FLAG |
5943 TRUE_REDEFINED_OP_FLAG |
5944 FALSE_REDEFINED_OP_FLAG |
5945 STRING_REDEFINED_OP_FLAG)) {
5946 st_data_t val;
5947 if (RB_FLOAT_TYPE_P(key)) {
5948 double kval = RFLOAT_VALUE(key);
5949 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
5950 key = FIXABLE(kval) ? LONG2FIX((long)kval) : rb_dbl2big(kval);
5951 }
5952 }
5953 if (rb_hash_stlike_lookup(hash, key, &val)) {
5954 return FIX2LONG((VALUE)val);
5955 }
5956 else {
5957 return else_offset;
5958 }
5959 }
5960 }
5961 return 0;
5962}
5963
5964NORETURN(static void
5965 vm_stack_consistency_error(const rb_execution_context_t *ec,
5966 const rb_control_frame_t *,
5967 const VALUE *));
5968static void
5969vm_stack_consistency_error(const rb_execution_context_t *ec,
5970 const rb_control_frame_t *cfp,
5971 const VALUE *bp)
5972{
5973 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
5974 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
5975 static const char stack_consistency_error[] =
5976 "Stack consistency error (sp: %"PRIdPTRDIFF", bp: %"PRIdPTRDIFF")";
5977#if defined RUBY_DEVEL
5978 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
5979 rb_str_cat_cstr(mesg, "\n");
5980 rb_str_append(mesg, rb_iseq_disasm(cfp->iseq));
5982#else
5983 rb_bug(stack_consistency_error, nsp, nbp);
5984#endif
5985}
5986
5987static VALUE
5988vm_opt_plus(VALUE recv, VALUE obj)
5989{
5990 if (FIXNUM_2_P(recv, obj) &&
5991 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
5992 return rb_fix_plus_fix(recv, obj);
5993 }
5994 else if (FLONUM_2_P(recv, obj) &&
5995 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
5996 return DBL2NUM(RFLOAT_VALUE(recv) + RFLOAT_VALUE(obj));
5997 }
5998 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5999 return Qundef;
6000 }
6001 else if (RBASIC_CLASS(recv) == rb_cFloat &&
6002 RBASIC_CLASS(obj) == rb_cFloat &&
6003 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6004 return DBL2NUM(RFLOAT_VALUE(recv) + RFLOAT_VALUE(obj));
6005 }
6006 else if (RBASIC_CLASS(recv) == rb_cString &&
6007 RBASIC_CLASS(obj) == rb_cString &&
6008 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6009 return rb_str_opt_plus(recv, obj);
6010 }
6011 else if (RBASIC_CLASS(recv) == rb_cArray &&
6012 RBASIC_CLASS(obj) == rb_cArray &&
6013 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6014 return rb_ary_plus(recv, obj);
6015 }
6016 else {
6017 return Qundef;
6018 }
6019}
6020
6021static VALUE
6022vm_opt_minus(VALUE recv, VALUE obj)
6023{
6024 if (FIXNUM_2_P(recv, obj) &&
6025 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6026 return rb_fix_minus_fix(recv, obj);
6027 }
6028 else if (FLONUM_2_P(recv, obj) &&
6029 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6030 return DBL2NUM(RFLOAT_VALUE(recv) - RFLOAT_VALUE(obj));
6031 }
6032 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
6033 return Qundef;
6034 }
6035 else if (RBASIC_CLASS(recv) == rb_cFloat &&
6036 RBASIC_CLASS(obj) == rb_cFloat &&
6037 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6038 return DBL2NUM(RFLOAT_VALUE(recv) - RFLOAT_VALUE(obj));
6039 }
6040 else {
6041 return Qundef;
6042 }
6043}
6044
6045static VALUE
6046vm_opt_mult(VALUE recv, VALUE obj)
6047{
6048 if (FIXNUM_2_P(recv, obj) &&
6049 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6050 return rb_fix_mul_fix(recv, obj);
6051 }
6052 else if (FLONUM_2_P(recv, obj) &&
6053 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6054 return DBL2NUM(RFLOAT_VALUE(recv) * RFLOAT_VALUE(obj));
6055 }
6056 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
6057 return Qundef;
6058 }
6059 else if (RBASIC_CLASS(recv) == rb_cFloat &&
6060 RBASIC_CLASS(obj) == rb_cFloat &&
6061 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6062 return DBL2NUM(RFLOAT_VALUE(recv) * RFLOAT_VALUE(obj));
6063 }
6064 else {
6065 return Qundef;
6066 }
6067}
6068
6069static VALUE
6070vm_opt_div(VALUE recv, VALUE obj)
6071{
6072 if (FIXNUM_2_P(recv, obj) &&
6073 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6074 return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_div_fix(recv, obj);
6075 }
6076 else if (FLONUM_2_P(recv, obj) &&
6077 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6078 return rb_flo_div_flo(recv, obj);
6079 }
6080 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
6081 return Qundef;
6082 }
6083 else if (RBASIC_CLASS(recv) == rb_cFloat &&
6084 RBASIC_CLASS(obj) == rb_cFloat &&
6085 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6086 return rb_flo_div_flo(recv, obj);
6087 }
6088 else {
6089 return Qundef;
6090 }
6091}
6092
6093static VALUE
6094vm_opt_mod(VALUE recv, VALUE obj)
6095{
6096 if (FIXNUM_2_P(recv, obj) &&
6097 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6098 return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_mod_fix(recv, obj);
6099 }
6100 else if (FLONUM_2_P(recv, obj) &&
6101 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6102 return DBL2NUM(ruby_float_mod(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj)));
6103 }
6104 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
6105 return Qundef;
6106 }
6107 else if (RBASIC_CLASS(recv) == rb_cFloat &&
6108 RBASIC_CLASS(obj) == rb_cFloat &&
6109 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6110 return DBL2NUM(ruby_float_mod(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj)));
6111 }
6112 else {
6113 return Qundef;
6114 }
6115}
6116
6117static VALUE
6118vm_opt_neq(const rb_iseq_t *iseq, CALL_DATA cd, CALL_DATA cd_eq, VALUE recv, VALUE obj)
6119{
6120 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
6121 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
6122
6123 if (!UNDEF_P(val)) {
6124 return RBOOL(!RTEST(val));
6125 }
6126 }
6127
6128 return Qundef;
6129}
6130
6131static VALUE
6132vm_opt_lt(VALUE recv, VALUE obj)
6133{
6134 if (FIXNUM_2_P(recv, obj) &&
6135 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6136 return RBOOL((SIGNED_VALUE)recv < (SIGNED_VALUE)obj);
6137 }
6138 else if (FLONUM_2_P(recv, obj) &&
6139 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6140 return RBOOL(RFLOAT_VALUE(recv) < RFLOAT_VALUE(obj));
6141 }
6142 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
6143 return Qundef;
6144 }
6145 else if (RBASIC_CLASS(recv) == rb_cFloat &&
6146 RBASIC_CLASS(obj) == rb_cFloat &&
6147 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6148 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
6149 return RBOOL(RFLOAT_VALUE(recv) < RFLOAT_VALUE(obj));
6150 }
6151 else {
6152 return Qundef;
6153 }
6154}
6155
6156static VALUE
6157vm_opt_le(VALUE recv, VALUE obj)
6158{
6159 if (FIXNUM_2_P(recv, obj) &&
6160 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6161 return RBOOL((SIGNED_VALUE)recv <= (SIGNED_VALUE)obj);
6162 }
6163 else if (FLONUM_2_P(recv, obj) &&
6164 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6165 return RBOOL(RFLOAT_VALUE(recv) <= RFLOAT_VALUE(obj));
6166 }
6167 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
6168 return Qundef;
6169 }
6170 else if (RBASIC_CLASS(recv) == rb_cFloat &&
6171 RBASIC_CLASS(obj) == rb_cFloat &&
6172 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6173 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
6174 return RBOOL(RFLOAT_VALUE(recv) <= RFLOAT_VALUE(obj));
6175 }
6176 else {
6177 return Qundef;
6178 }
6179}
6180
6181static VALUE
6182vm_opt_gt(VALUE recv, VALUE obj)
6183{
6184 if (FIXNUM_2_P(recv, obj) &&
6185 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6186 return RBOOL((SIGNED_VALUE)recv > (SIGNED_VALUE)obj);
6187 }
6188 else if (FLONUM_2_P(recv, obj) &&
6189 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6190 return RBOOL(RFLOAT_VALUE(recv) > RFLOAT_VALUE(obj));
6191 }
6192 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
6193 return Qundef;
6194 }
6195 else if (RBASIC_CLASS(recv) == rb_cFloat &&
6196 RBASIC_CLASS(obj) == rb_cFloat &&
6197 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6198 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
6199 return RBOOL(RFLOAT_VALUE(recv) > RFLOAT_VALUE(obj));
6200 }
6201 else {
6202 return Qundef;
6203 }
6204}
6205
6206static VALUE
6207vm_opt_ge(VALUE recv, VALUE obj)
6208{
6209 if (FIXNUM_2_P(recv, obj) &&
6210 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6211 return RBOOL((SIGNED_VALUE)recv >= (SIGNED_VALUE)obj);
6212 }
6213 else if (FLONUM_2_P(recv, obj) &&
6214 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6215 return RBOOL(RFLOAT_VALUE(recv) >= RFLOAT_VALUE(obj));
6216 }
6217 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
6218 return Qundef;
6219 }
6220 else if (RBASIC_CLASS(recv) == rb_cFloat &&
6221 RBASIC_CLASS(obj) == rb_cFloat &&
6222 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6223 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
6224 return RBOOL(RFLOAT_VALUE(recv) >= RFLOAT_VALUE(obj));
6225 }
6226 else {
6227 return Qundef;
6228 }
6229}
6230
6231
6232static VALUE
6233vm_opt_ltlt(VALUE recv, VALUE obj)
6234{
6235 if (SPECIAL_CONST_P(recv)) {
6236 return Qundef;
6237 }
6238 else if (RBASIC_CLASS(recv) == rb_cString &&
6239 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6240 if (LIKELY(RB_TYPE_P(obj, T_STRING))) {
6241 return rb_str_buf_append(recv, obj);
6242 }
6243 else {
6244 return rb_str_concat(recv, obj);
6245 }
6246 }
6247 else if (RBASIC_CLASS(recv) == rb_cArray &&
6248 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6249 return rb_ary_push(recv, obj);
6250 }
6251 else {
6252 return Qundef;
6253 }
6254}
6255
6256static VALUE
6257vm_opt_and(VALUE recv, VALUE obj)
6258{
6259 // If recv and obj are both fixnums, then the bottom tag bit
6260 // will be 1 on both. 1 & 1 == 1, so the result value will also
6261 // be a fixnum. If either side is *not* a fixnum, then the tag bit
6262 // will be 0, and we return Qundef.
6263 VALUE ret = ((SIGNED_VALUE) recv) & ((SIGNED_VALUE) obj);
6264
6265 if (FIXNUM_P(ret) &&
6266 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
6267 return ret;
6268 }
6269 else {
6270 return Qundef;
6271 }
6272}
6273
6274static VALUE
6275vm_opt_or(VALUE recv, VALUE obj)
6276{
6277 if (FIXNUM_2_P(recv, obj) &&
6278 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
6279 return recv | obj;
6280 }
6281 else {
6282 return Qundef;
6283 }
6284}
6285
6286static VALUE
6287vm_opt_aref(VALUE recv, VALUE obj)
6288{
6289 if (SPECIAL_CONST_P(recv)) {
6290 if (FIXNUM_2_P(recv, obj) &&
6291 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
6292 return rb_fix_aref(recv, obj);
6293 }
6294 return Qundef;
6295 }
6296 else if (RBASIC_CLASS(recv) == rb_cArray &&
6297 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
6298 if (FIXNUM_P(obj)) {
6299 return rb_ary_entry_internal(recv, FIX2LONG(obj));
6300 }
6301 else {
6302 return rb_ary_aref1(recv, obj);
6303 }
6304 }
6305 else if (RBASIC_CLASS(recv) == rb_cHash &&
6306 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
6307 return rb_hash_aref(recv, obj);
6308 }
6309 else {
6310 return Qundef;
6311 }
6312}
6313
6314static VALUE
6315vm_opt_aset(VALUE recv, VALUE obj, VALUE set)
6316{
6317 if (SPECIAL_CONST_P(recv)) {
6318 return Qundef;
6319 }
6320 else if (RBASIC_CLASS(recv) == rb_cArray &&
6321 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
6322 FIXNUM_P(obj)) {
6323 rb_ary_store(recv, FIX2LONG(obj), set);
6324 return set;
6325 }
6326 else if (RBASIC_CLASS(recv) == rb_cHash &&
6327 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
6328 rb_hash_aset(recv, obj, set);
6329 return set;
6330 }
6331 else {
6332 return Qundef;
6333 }
6334}
6335
6336static VALUE
6337vm_opt_aref_with(VALUE recv, VALUE key)
6338{
6339 if (!SPECIAL_CONST_P(recv) && RBASIC_CLASS(recv) == rb_cHash &&
6340 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
6341 rb_hash_compare_by_id_p(recv) == Qfalse &&
6342 !FL_TEST(recv, RHASH_PROC_DEFAULT)) {
6343 return rb_hash_aref(recv, key);
6344 }
6345 else {
6346 return Qundef;
6347 }
6348}
6349
6350VALUE
6351rb_vm_opt_aref_with(VALUE recv, VALUE key)
6352{
6353 return vm_opt_aref_with(recv, key);
6354}
6355
6356static VALUE
6357vm_opt_aset_with(VALUE recv, VALUE key, VALUE val)
6358{
6359 if (!SPECIAL_CONST_P(recv) && RBASIC_CLASS(recv) == rb_cHash &&
6360 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
6361 rb_hash_compare_by_id_p(recv) == Qfalse) {
6362 return rb_hash_aset(recv, key, val);
6363 }
6364 else {
6365 return Qundef;
6366 }
6367}
6368
6369static VALUE
6370vm_opt_length(VALUE recv, int bop)
6371{
6372 if (SPECIAL_CONST_P(recv)) {
6373 return Qundef;
6374 }
6375 else if (RBASIC_CLASS(recv) == rb_cString &&
6376 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6377 if (bop == BOP_EMPTY_P) {
6378 return LONG2NUM(RSTRING_LEN(recv));
6379 }
6380 else {
6381 return rb_str_length(recv);
6382 }
6383 }
6384 else if (RBASIC_CLASS(recv) == rb_cArray &&
6385 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6386 return LONG2NUM(RARRAY_LEN(recv));
6387 }
6388 else if (RBASIC_CLASS(recv) == rb_cHash &&
6389 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6390 return INT2FIX(RHASH_SIZE(recv));
6391 }
6392 else {
6393 return Qundef;
6394 }
6395}
6396
6397static VALUE
6398vm_opt_empty_p(VALUE recv)
6399{
6400 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
6401 case Qundef: return Qundef;
6402 case INT2FIX(0): return Qtrue;
6403 default: return Qfalse;
6404 }
6405}
6406
6407VALUE rb_false(VALUE obj);
6408
6409static VALUE
6410vm_opt_nil_p(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv)
6411{
6412 if (NIL_P(recv) &&
6413 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
6414 return Qtrue;
6415 }
6416 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
6417 return Qfalse;
6418 }
6419 else {
6420 return Qundef;
6421 }
6422}
6423
6424static VALUE
6425fix_succ(VALUE x)
6426{
6427 switch (x) {
6428 case ~0UL:
6429 /* 0xFFFF_FFFF == INT2FIX(-1)
6430 * `-1.succ` is of course 0. */
6431 return INT2FIX(0);
6432 case RSHIFT(~0UL, 1):
6433 /* 0x7FFF_FFFF == LONG2FIX(0x3FFF_FFFF)
6434 * 0x3FFF_FFFF + 1 == 0x4000_0000, which is a Bignum. */
6435 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
6436 default:
6437 /* LONG2FIX(FIX2LONG(x)+FIX2LONG(y))
6438 * == ((lx*2+1)/2 + (ly*2+1)/2)*2+1
6439 * == lx*2 + ly*2 + 1
6440 * == (lx*2+1) + (ly*2+1) - 1
6441 * == x + y - 1
6442 *
6443 * Here, if we put y := INT2FIX(1):
6444 *
6445 * == x + INT2FIX(1) - 1
6446 * == x + 2 .
6447 */
6448 return x + 2;
6449 }
6450}
6451
6452static VALUE
6453vm_opt_succ(VALUE recv)
6454{
6455 if (FIXNUM_P(recv) &&
6456 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
6457 return fix_succ(recv);
6458 }
6459 else if (SPECIAL_CONST_P(recv)) {
6460 return Qundef;
6461 }
6462 else if (RBASIC_CLASS(recv) == rb_cString &&
6463 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
6464 return rb_str_succ(recv);
6465 }
6466 else {
6467 return Qundef;
6468 }
6469}
6470
6471static VALUE
6472vm_opt_not(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv)
6473{
6474 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
6475 return RBOOL(!RTEST(recv));
6476 }
6477 else {
6478 return Qundef;
6479 }
6480}
6481
6482static VALUE
6483vm_opt_regexpmatch2(VALUE recv, VALUE obj)
6484{
6485 if (SPECIAL_CONST_P(recv)) {
6486 return Qundef;
6487 }
6488 else if (RBASIC_CLASS(recv) == rb_cString &&
6489 CLASS_OF(obj) == rb_cRegexp &&
6490 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
6491 return rb_reg_match(obj, recv);
6492 }
6493 else if (RBASIC_CLASS(recv) == rb_cRegexp &&
6494 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
6495 return rb_reg_match(recv, obj);
6496 }
6497 else {
6498 return Qundef;
6499 }
6500}
6501
6502rb_event_flag_t rb_iseq_event_flags(const rb_iseq_t *iseq, size_t pos);
6503
6504NOINLINE(static void vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp));
6505
6506static inline void
6507vm_trace_hook(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const VALUE *pc,
6508 rb_event_flag_t pc_events, rb_event_flag_t target_event,
6509 rb_hook_list_t *global_hooks, rb_hook_list_t *const *local_hooks_ptr, VALUE val)
6510{
6511 rb_event_flag_t event = pc_events & target_event;
6512 VALUE self = GET_SELF();
6513
6514 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
6515
6516 if (event & global_hooks->events) {
6517 /* increment PC because source line is calculated with PC-1 */
6518 reg_cfp->pc++;
6519 vm_dtrace(event, ec);
6520 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
6521 reg_cfp->pc--;
6522 }
6523
6524 // Load here since global hook above can add and free local hooks
6525 rb_hook_list_t *local_hooks = *local_hooks_ptr;
6526 if (local_hooks != NULL) {
6527 if (event & local_hooks->events) {
6528 /* increment PC because source line is calculated with PC-1 */
6529 reg_cfp->pc++;
6530 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
6531 reg_cfp->pc--;
6532 }
6533 }
6534}
6535
6536// Return true if given cc has cfunc which is NOT handled by opt_send_without_block.
6537bool
6538rb_vm_opt_cfunc_p(CALL_CACHE cc, int insn)
6539{
6540 switch (insn) {
6541 case BIN(opt_eq):
6542 return check_cfunc(vm_cc_cme(cc), rb_obj_equal);
6543 case BIN(opt_nil_p):
6544 return check_cfunc(vm_cc_cme(cc), rb_false);
6545 case BIN(opt_not):
6546 return check_cfunc(vm_cc_cme(cc), rb_obj_not);
6547 default:
6548 return false;
6549 }
6550}
6551
6552#define VM_TRACE_HOOK(target_event, val) do { \
6553 if ((pc_events & (target_event)) & enabled_flags) { \
6554 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
6555 } \
6556} while (0)
6557
6558static VALUE
6559rescue_errinfo(rb_execution_context_t *ec, rb_control_frame_t *cfp)
6560{
6561 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
6562 VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
6563 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
6564}
6565
6566static void
6567vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp)
6568{
6569 const VALUE *pc = reg_cfp->pc;
6570 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
6571 rb_event_flag_t global_events = enabled_flags;
6572
6573 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
6574 return;
6575 }
6576 else {
6577 const rb_iseq_t *iseq = reg_cfp->iseq;
6578 VALUE iseq_val = (VALUE)iseq;
6579 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
6580 rb_event_flag_t pc_events = rb_iseq_event_flags(iseq, pos);
6581 rb_hook_list_t *local_hooks = iseq->aux.exec.local_hooks;
6582 rb_hook_list_t *const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
6583 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
6584 rb_hook_list_t *bmethod_local_hooks = NULL;
6585 rb_hook_list_t **bmethod_local_hooks_ptr = NULL;
6586 rb_event_flag_t bmethod_local_events = 0;
6587 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
6588 enabled_flags |= iseq_local_events;
6589
6590 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
6591
6592 if (bmethod_frame) {
6593 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(reg_cfp);
6594 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
6595 bmethod_local_hooks = me->def->body.bmethod.hooks;
6596 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
6597 if (bmethod_local_hooks) {
6598 bmethod_local_events = bmethod_local_hooks->events;
6599 }
6600 }
6601
6602
6603 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
6604#if 0
6605 /* disable trace */
6606 /* TODO: incomplete */
6607 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
6608#else
6609 /* do not disable trace because of performance problem
6610 * (re-enable overhead)
6611 */
6612#endif
6613 return;
6614 }
6615 else if (ec->trace_arg != NULL) {
6616 /* already tracing */
6617 return;
6618 }
6619 else {
6620 rb_hook_list_t *global_hooks = rb_ec_ractor_hooks(ec);
6621 /* Note, not considering iseq local events here since the same
6622 * iseq could be used in multiple bmethods. */
6623 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
6624
6625 if (0) {
6626 ruby_debug_printf("vm_trace>>%4d (%4x) - %s:%d %s\n",
6627 (int)pos,
6628 (int)pc_events,
6629 RSTRING_PTR(rb_iseq_path(iseq)),
6630 (int)rb_iseq_line_no(iseq, pos),
6631 RSTRING_PTR(rb_iseq_label(iseq)));
6632 }
6633 VM_ASSERT(reg_cfp->pc == pc);
6634 VM_ASSERT(pc_events != 0);
6635
6636 /* check traces */
6637 if ((pc_events & RUBY_EVENT_B_CALL) && bmethod_frame && (bmethod_events & RUBY_EVENT_CALL)) {
6638 /* b_call instruction running as a method. Fire call event. */
6639 vm_trace_hook(ec, reg_cfp, pc, RUBY_EVENT_CALL, RUBY_EVENT_CALL, global_hooks, bmethod_local_hooks_ptr, Qundef);
6640 }
6642 VM_TRACE_HOOK(RUBY_EVENT_RESCUE, rescue_errinfo(ec, reg_cfp));
6643 VM_TRACE_HOOK(RUBY_EVENT_LINE, Qundef);
6644 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE, Qundef);
6645 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH, Qundef);
6646 VM_TRACE_HOOK(RUBY_EVENT_END | RUBY_EVENT_RETURN | RUBY_EVENT_B_RETURN, TOPN(0));
6647 if ((pc_events & RUBY_EVENT_B_RETURN) && bmethod_frame && (bmethod_events & RUBY_EVENT_RETURN)) {
6648 /* b_return instruction running as a method. Fire return event. */
6649 vm_trace_hook(ec, reg_cfp, pc, RUBY_EVENT_RETURN, RUBY_EVENT_RETURN, global_hooks, bmethod_local_hooks_ptr, TOPN(0));
6650 }
6651
6652 // Pin the iseq since `local_hooks_ptr` points inside the iseq's slot on the GC heap.
6653 // We need the pointer to stay valid in case compaction happens in a trace hook.
6654 //
6655 // Similar treatment is unnecessary for `bmethod_local_hooks_ptr` since
6656 // storage for `rb_method_definition_t` is not on the GC heap.
6657 RB_GC_GUARD(iseq_val);
6658 }
6659 }
6660}
6661#undef VM_TRACE_HOOK
6662
6663#if VM_CHECK_MODE > 0
6664NORETURN( NOINLINE( COLDFUNC
6665void rb_vm_canary_is_found_dead(enum ruby_vminsn_type i, VALUE c)));
6666
6667void
6668Init_vm_stack_canary(void)
6669{
6670 /* This has to be called _after_ our PRNG is properly set up. */
6671 int n = ruby_fill_random_bytes(&vm_stack_canary, sizeof vm_stack_canary, false);
6672 vm_stack_canary |= 0x01; // valid VALUE (Fixnum)
6673
6674 vm_stack_canary_was_born = true;
6675 VM_ASSERT(n == 0);
6676}
6677
6678void
6679rb_vm_canary_is_found_dead(enum ruby_vminsn_type i, VALUE c)
6680{
6681 /* Because a method has already been called, why not call
6682 * another one. */
6683 const char *insn = rb_insns_name(i);
6684 VALUE inspection = rb_inspect(c);
6685 const char *str = StringValueCStr(inspection);
6686
6687 rb_bug("dead canary found at %s: %s", insn, str);
6688}
6689
6690#else
6691void Init_vm_stack_canary(void) { /* nothing to do */ }
6692#endif
6693
6694
6695/* a part of the following code is generated by this ruby script:
6696
669716.times{|i|
6698 typedef_args = (0...i).map{|j| "VALUE v#{j+1}"}.join(", ")
6699 typedef_args.prepend(", ") if i != 0
6700 call_args = (0...i).map{|j| "argv[#{j}]"}.join(", ")
6701 call_args.prepend(", ") if i != 0
6702 puts %Q{
6703static VALUE
6704builtin_invoker#{i}(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6705{
6706 typedef VALUE (*rb_invoke_funcptr#{i}_t)(rb_execution_context_t *ec, VALUE self#{typedef_args});
6707 return (*(rb_invoke_funcptr#{i}_t)funcptr)(ec, self#{call_args});
6708}}
6709}
6710
6711puts
6712puts "static VALUE (* const cfunc_invokers[])(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr) = {"
671316.times{|i|
6714 puts " builtin_invoker#{i},"
6715}
6716puts "};"
6717*/
6718
6719static VALUE
6720builtin_invoker0(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6721{
6722 typedef VALUE (*rb_invoke_funcptr0_t)(rb_execution_context_t *ec, VALUE self);
6723 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
6724}
6725
6726static VALUE
6727builtin_invoker1(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6728{
6729 typedef VALUE (*rb_invoke_funcptr1_t)(rb_execution_context_t *ec, VALUE self, VALUE v1);
6730 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
6731}
6732
6733static VALUE
6734builtin_invoker2(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6735{
6736 typedef VALUE (*rb_invoke_funcptr2_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2);
6737 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
6738}
6739
6740static VALUE
6741builtin_invoker3(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6742{
6743 typedef VALUE (*rb_invoke_funcptr3_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3);
6744 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
6745}
6746
6747static VALUE
6748builtin_invoker4(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6749{
6750 typedef VALUE (*rb_invoke_funcptr4_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4);
6751 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
6752}
6753
6754static VALUE
6755builtin_invoker5(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6756{
6757 typedef VALUE (*rb_invoke_funcptr5_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5);
6758 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
6759}
6760
6761static VALUE
6762builtin_invoker6(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6763{
6764 typedef VALUE (*rb_invoke_funcptr6_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6);
6765 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
6766}
6767
6768static VALUE
6769builtin_invoker7(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6770{
6771 typedef VALUE (*rb_invoke_funcptr7_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7);
6772 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
6773}
6774
6775static VALUE
6776builtin_invoker8(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6777{
6778 typedef VALUE (*rb_invoke_funcptr8_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8);
6779 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
6780}
6781
6782static VALUE
6783builtin_invoker9(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6784{
6785 typedef VALUE (*rb_invoke_funcptr9_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9);
6786 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
6787}
6788
6789static VALUE
6790builtin_invoker10(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6791{
6792 typedef VALUE (*rb_invoke_funcptr10_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10);
6793 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
6794}
6795
6796static VALUE
6797builtin_invoker11(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6798{
6799 typedef VALUE (*rb_invoke_funcptr11_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11);
6800 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
6801}
6802
6803static VALUE
6804builtin_invoker12(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6805{
6806 typedef VALUE (*rb_invoke_funcptr12_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12);
6807 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
6808}
6809
6810static VALUE
6811builtin_invoker13(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6812{
6813 typedef VALUE (*rb_invoke_funcptr13_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13);
6814 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
6815}
6816
6817static VALUE
6818builtin_invoker14(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6819{
6820 typedef VALUE (*rb_invoke_funcptr14_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13, VALUE v14);
6821 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
6822}
6823
6824static VALUE
6825builtin_invoker15(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6826{
6827 typedef VALUE (*rb_invoke_funcptr15_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13, VALUE v14, VALUE v15);
6828 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
6829}
6830
6831typedef VALUE (*builtin_invoker)(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr);
6832
6833static builtin_invoker
6834lookup_builtin_invoker(int argc)
6835{
6836 static const builtin_invoker invokers[] = {
6837 builtin_invoker0,
6838 builtin_invoker1,
6839 builtin_invoker2,
6840 builtin_invoker3,
6841 builtin_invoker4,
6842 builtin_invoker5,
6843 builtin_invoker6,
6844 builtin_invoker7,
6845 builtin_invoker8,
6846 builtin_invoker9,
6847 builtin_invoker10,
6848 builtin_invoker11,
6849 builtin_invoker12,
6850 builtin_invoker13,
6851 builtin_invoker14,
6852 builtin_invoker15,
6853 };
6854
6855 return invokers[argc];
6856}
6857
6858static inline VALUE
6859invoke_bf(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const struct rb_builtin_function* bf, const VALUE *argv)
6860{
6861 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF; // Verify an assumption of `Primitive.attr! :leaf`
6862 SETUP_CANARY(canary_p);
6863 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, (rb_insn_func_t)bf->func_ptr);
6864 CHECK_CANARY(canary_p, BIN(invokebuiltin));
6865 return ret;
6866}
6867
6868static VALUE
6869vm_invoke_builtin(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_builtin_function* bf, const VALUE *argv)
6870{
6871 return invoke_bf(ec, cfp, bf, argv);
6872}
6873
6874static VALUE
6875vm_invoke_builtin_delegate(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_builtin_function *bf, unsigned int start_index)
6876{
6877 if (0) { // debug print
6878 fputs("vm_invoke_builtin_delegate: passing -> ", stderr);
6879 for (int i=0; i<bf->argc; i++) {
6880 ruby_debug_printf(":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
6881 }
6882 ruby_debug_printf("\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc, bf->func_ptr);
6883 }
6884
6885 if (bf->argc == 0) {
6886 return invoke_bf(ec, cfp, bf, NULL);
6887 }
6888 else {
6889 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
6890 return invoke_bf(ec, cfp, bf, argv);
6891 }
6892}
6893
6894// for __builtin_inline!()
6895
6896VALUE
6897rb_vm_lvar_exposed(rb_execution_context_t *ec, int index)
6898{
6899 const rb_control_frame_t *cfp = ec->cfp;
6900 return cfp->ep[index];
6901}
#define RUBY_ASSERT(expr)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
Definition assert.h:177
#define RUBY_EVENT_END
Encountered an end of a class clause.
Definition event.h:40
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
Definition event.h:43
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
Definition event.h:56
#define RUBY_EVENT_CLASS
Encountered a new class.
Definition event.h:39
#define RUBY_EVENT_LINE
Encountered a new line.
Definition event.h:38
#define RUBY_EVENT_RETURN
Encountered a return statement.
Definition event.h:42
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
Definition event.h:44
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
Definition event.h:55
uint32_t rb_event_flag_t
Represents event(s).
Definition event.h:108
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
Definition event.h:41
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
Definition event.h:61
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
Definition class.c:2283
VALUE rb_module_new(void)
Creates a new, anonymous module.
Definition class.c:1061
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
Definition class.c:961
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
Definition class.c:940
#define TYPE(_)
Old name of rb_type.
Definition value_type.h:107
#define FL_SINGLETON
Old name of RUBY_FL_SINGLETON.
Definition fl_type.h:58
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
Definition fl_type.h:66
#define REALLOC_N
Old name of RB_REALLOC_N.
Definition memory.h:397
#define ALLOC
Old name of RB_ALLOC.
Definition memory.h:394
#define RFLOAT_VALUE
Old name of rb_float_value.
Definition double.h:28
#define T_STRING
Old name of RUBY_T_STRING.
Definition value_type.h:78
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition long.h:48
#define T_NIL
Old name of RUBY_T_NIL.
Definition value_type.h:72
#define T_FLOAT
Old name of RUBY_T_FLOAT.
Definition value_type.h:64
#define T_IMEMO
Old name of RUBY_T_IMEMO.
Definition value_type.h:67
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
Definition value_type.h:57
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
Definition value_type.h:79
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
Definition value_type.h:63
#define SYM2ID
Old name of RB_SYM2ID.
Definition symbol.h:45
#define CLASS_OF
Old name of rb_class_of.
Definition globals.h:203
#define rb_ary_new4
Old name of rb_ary_new_from_values.
Definition array.h:653
#define FIXABLE
Old name of RB_FIXABLE.
Definition fixnum.h:25
#define LONG2FIX
Old name of RB_INT2FIX.
Definition long.h:49
#define FIX2INT
Old name of RB_FIX2INT.
Definition int.h:41
#define T_MODULE
Old name of RUBY_T_MODULE.
Definition value_type.h:70
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
Definition assume.h:27
#define FIX2ULONG
Old name of RB_FIX2ULONG.
Definition long.h:47
#define T_TRUE
Old name of RUBY_T_TRUE.
Definition value_type.h:81
#define T_ICLASS
Old name of RUBY_T_ICLASS.
Definition value_type.h:66
#define T_HASH
Old name of RUBY_T_HASH.
Definition value_type.h:65
#define ALLOC_N
Old name of RB_ALLOC_N.
Definition memory.h:393
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
Definition fl_type.h:132
#define rb_ary_new3
Old name of rb_ary_new_from_args.
Definition array.h:652
#define LONG2NUM
Old name of RB_LONG2NUM.
Definition long.h:50
#define rb_exc_new3
Old name of rb_exc_new_str.
Definition error.h:38
#define T_FALSE
Old name of RUBY_T_FALSE.
Definition value_type.h:61
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
Definition long.h:46
#define T_ARRAY
Old name of RUBY_T_ARRAY.
Definition value_type.h:56
#define T_OBJECT
Old name of RUBY_T_OBJECT.
Definition value_type.h:75
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
Definition value_type.h:80
#define DBL2NUM
Old name of rb_float_new.
Definition double.h:29
#define T_CLASS
Old name of RUBY_T_CLASS.
Definition value_type.h:58
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
Definition value_type.h:85
#define FL_TEST
Old name of RB_FL_TEST.
Definition fl_type.h:131
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
Definition fl_type.h:69
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
Definition fl_type.h:130
#define SYMBOL_P
Old name of RB_SYMBOL_P.
Definition value_type.h:88
void rb_notimplement(void)
Definition error.c:3498
VALUE rb_eTypeError
TypeError exception.
Definition error.c:1344
VALUE rb_eFatal
fatal exception.
Definition error.c:1340
VALUE rb_eNoMethodError
NoMethodError exception.
Definition error.c:1352
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
Definition eval.c:699
VALUE rb_eRuntimeError
RuntimeError exception.
Definition error.c:1342
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
Definition error.c:423
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
Definition error.c:3805
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
Definition error.c:1395
VALUE rb_cClass
Class class.
Definition object.c:66
VALUE rb_cArray
Array class.
Definition array.c:39
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
Definition object.c:2049
VALUE rb_cRegexp
Regexp class.
Definition re.c:2580
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
Definition object.c:1228
VALUE rb_cHash
Hash class.
Definition hash.c:110
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
Definition object.c:215
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
Definition object.c:636
VALUE rb_cBasicObject
BasicObject class.
Definition object.c:62
VALUE rb_cModule
Module class.
Definition object.c:65
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
Definition object.c:205
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
Definition object.c:821
VALUE rb_cFloat
Float class.
Definition numeric.c:197
VALUE rb_cProc
Proc class.
Definition proc.c:43
VALUE rb_cString
String class.
Definition string.c:78
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
Definition gc.h:631
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition gc.h:619
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
Definition error.h:35
#define rb_check_frozen
Just another name of rb_check_frozen.
Definition error.h:264
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
Definition error.h:280
#define rb_check_frozen_internal(obj)
Definition error.h:247
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
Definition proc.c:1010
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
Definition re.c:1870
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
Definition re.c:3635
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
Definition re.c:1845
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
Definition re.c:1927
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
Definition re.c:1828
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
Definition re.c:1894
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
Definition re.c:1960
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
Definition string.c:3382
VALUE rb_sym_to_s(VALUE sym)
This is an rb_sym2str() + rb_str_dup() combo.
Definition string.c:11659
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
Definition string.c:4826
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
Definition string.c:3473
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
Definition string.h:1656
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
Definition string.c:2177
void rb_thread_schedule(void)
Tries to switch to another thread.
Definition thread.c:1448
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
Definition variable.c:3141
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
Definition variable.c:1854
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
Definition variable.c:3917
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
Definition variable.c:3972
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
Definition variable.c:1340
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
Definition variable.c:3596
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
Definition variable.c:2976
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
Definition variable.c:122
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
Definition variable.c:3147
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
Definition variable.c:326
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
Definition variable.c:1871
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
Definition variable.c:3455
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
Definition variable.c:3994
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
Definition variable.c:283
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
Definition variable.c:3449
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
Definition vm_eval.c:687
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
Definition vm_method.c:1165
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
Definition vm_method.c:1698
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
Definition symbol.c:1092
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
Definition symbol.c:950
int off
Offset inside of ptr.
Definition io.h:5
int len
Length of the buffer.
Definition io.h:8
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
Definition ractor.h:249
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
Definition ractor.h:235
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
Definition memory.h:366
#define ALLOCA_N(type, n)
Definition memory.h:286
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
Definition memory.h:161
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
Definition memory.h:378
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
Definition rarray.h:51
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
Definition rarray.h:281
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
Definition rarray.h:366
#define RARRAY_AREF(a, i)
Definition rarray.h:403
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
Definition rarray.h:52
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
Definition rbasic.h:152
#define RBASIC(obj)
Convenient casting macro.
Definition rbasic.h:40
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
Definition rclass.h:44
#define RHASH_SIZE(h)
Queries the size of the hash.
Definition rhash.h:69
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
Definition rhash.h:79
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
Definition robject.h:136
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
Definition rstring.h:89
#define RB_NO_KEYWORDS
Do not pass keywords.
Definition scan_args.h:69
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
Definition stdarg.h:64
Definition hash.h:53
Definition iseq.h:267
Definition vm_core.h:233
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
Definition vm_core.h:259
Definition vm_core.h:267
Definition vm_core.h:262
Definition method.h:62
Definition constant.h:33
CREF (Class REFerence)
Definition method.h:44
Definition class.h:36
Definition method.h:54
rb_cref_t * cref
class reference, should be marked
Definition method.h:136
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
Definition method.h:135
Definition st.h:79
IFUNC (Internal FUNCtion)
Definition imemo.h:83
SVAR (Special VARiable)
Definition imemo.h:52
const VALUE cref_or_me
class reference or rb_method_entry_t
Definition imemo.h:54
THROW_DATA.
Definition imemo.h:61
Definition vm_core.h:271
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
Definition value.h:63
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
Definition value_type.h:181
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
Definition value_type.h:263