Ruby 3.2.4p170 (2024-04-23 revision af471c0e0127eea0cafa6f308c0425bbfab0acf5)
vm_insnhelper.c
1/**********************************************************************
2
3 vm_insnhelper.c - instruction helper functions.
4
5 $Author$
6
7 Copyright (C) 2007 Koichi Sasada
8
9**********************************************************************/
10
11#include "ruby/internal/config.h"
12
13#include <math.h>
14
15#include "constant.h"
16#include "debug_counter.h"
17#include "internal.h"
18#include "internal/class.h"
19#include "internal/compar.h"
20#include "internal/hash.h"
21#include "internal/numeric.h"
22#include "internal/proc.h"
23#include "internal/random.h"
24#include "internal/variable.h"
25#include "internal/struct.h"
26#include "variable.h"
27
28/* finish iseq array */
29#include "insns.inc"
30#ifndef MJIT_HEADER
31#include "insns_info.inc"
32#endif
33
34extern rb_method_definition_t *rb_method_definition_create(rb_method_type_t type, ID mid);
35extern void rb_method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *def, void *opts);
36extern int rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_definition_t *d2);
37extern VALUE rb_make_no_method_exception(VALUE exc, VALUE format, VALUE obj,
38 int argc, const VALUE *argv, int priv);
39
40#ifndef MJIT_HEADER
41static const struct rb_callcache vm_empty_cc;
42static const struct rb_callcache vm_empty_cc_for_super;
43#endif
44
45/* control stack frame */
46
47static rb_control_frame_t *vm_get_ruby_level_caller_cfp(const rb_execution_context_t *ec, const rb_control_frame_t *cfp);
48
49MJIT_STATIC VALUE
50ruby_vm_special_exception_copy(VALUE exc)
51{
53 rb_obj_copy_ivar(e, exc);
54 return e;
55}
56
57NORETURN(static void ec_stack_overflow(rb_execution_context_t *ec, int));
58static void
59ec_stack_overflow(rb_execution_context_t *ec, int setup)
60{
61 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
62 ec->raised_flag = RAISED_STACKOVERFLOW;
63 if (setup) {
64 VALUE at = rb_ec_backtrace_object(ec);
65 mesg = ruby_vm_special_exception_copy(mesg);
66 rb_ivar_set(mesg, idBt, at);
67 rb_ivar_set(mesg, idBt_locations, at);
68 }
69 ec->errinfo = mesg;
70 EC_JUMP_TAG(ec, TAG_RAISE);
71}
72
73NORETURN(static void vm_stackoverflow(void));
74#ifdef MJIT_HEADER
75NOINLINE(static COLDFUNC void vm_stackoverflow(void));
76#endif
77
78static void
79vm_stackoverflow(void)
80{
81 ec_stack_overflow(GET_EC(), TRUE);
82}
83
84NORETURN(MJIT_STATIC void rb_ec_stack_overflow(rb_execution_context_t *ec, int crit));
85MJIT_STATIC void
86rb_ec_stack_overflow(rb_execution_context_t *ec, int crit)
87{
88 if (rb_during_gc()) {
89 rb_bug("system stack overflow during GC. Faulty native extension?");
90 }
91 if (crit) {
92 ec->raised_flag = RAISED_STACKOVERFLOW;
93 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
94 EC_JUMP_TAG(ec, TAG_RAISE);
95 }
96#ifdef USE_SIGALTSTACK
97 ec_stack_overflow(ec, TRUE);
98#else
99 ec_stack_overflow(ec, FALSE);
100#endif
101}
102
103
104#if VM_CHECK_MODE > 0
105static int
106callable_class_p(VALUE klass)
107{
108#if VM_CHECK_MODE >= 2
109 if (!klass) return FALSE;
110 switch (RB_BUILTIN_TYPE(klass)) {
111 default:
112 break;
113 case T_ICLASS:
114 if (!RB_TYPE_P(RCLASS_SUPER(klass), T_MODULE)) break;
115 case T_MODULE:
116 return TRUE;
117 }
118 while (klass) {
119 if (klass == rb_cBasicObject) {
120 return TRUE;
121 }
122 klass = RCLASS_SUPER(klass);
123 }
124 return FALSE;
125#else
126 return klass != 0;
127#endif
128}
129
130static int
131callable_method_entry_p(const rb_callable_method_entry_t *cme)
132{
133 if (cme == NULL) {
134 return TRUE;
135 }
136 else {
137 VM_ASSERT(IMEMO_TYPE_P((VALUE)cme, imemo_ment));
138
139 if (callable_class_p(cme->defined_class)) {
140 return TRUE;
141 }
142 else {
143 return FALSE;
144 }
145 }
146}
147
148static void
149vm_check_frame_detail(VALUE type, int req_block, int req_me, int req_cref, VALUE specval, VALUE cref_or_me, int is_cframe, const rb_iseq_t *iseq)
150{
151 unsigned int magic = (unsigned int)(type & VM_FRAME_MAGIC_MASK);
152 enum imemo_type cref_or_me_type = imemo_env; /* impossible value */
153
154 if (RB_TYPE_P(cref_or_me, T_IMEMO)) {
155 cref_or_me_type = imemo_type(cref_or_me);
156 }
157 if (type & VM_FRAME_FLAG_BMETHOD) {
158 req_me = TRUE;
159 }
160
161 if (req_block && (type & VM_ENV_FLAG_LOCAL) == 0) {
162 rb_bug("vm_push_frame: specval (%p) should be a block_ptr on %x frame", (void *)specval, magic);
163 }
164 if (!req_block && (type & VM_ENV_FLAG_LOCAL) != 0) {
165 rb_bug("vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (void *)specval, magic);
166 }
167
168 if (req_me) {
169 if (cref_or_me_type != imemo_ment) {
170 rb_bug("vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
171 }
172 }
173 else {
174 if (req_cref && cref_or_me_type != imemo_cref) {
175 rb_bug("vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
176 }
177 else { /* cref or Qfalse */
178 if (cref_or_me != Qfalse && cref_or_me_type != imemo_cref) {
179 if (((type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
180 /* ignore */
181 }
182 else {
183 rb_bug("vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
184 }
185 }
186 }
187 }
188
189 if (cref_or_me_type == imemo_ment) {
190 const rb_callable_method_entry_t *me = (const rb_callable_method_entry_t *)cref_or_me;
191
192 if (!callable_method_entry_p(me)) {
193 rb_bug("vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
194 }
195 }
196
197 if ((type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
198 VM_ASSERT(iseq == NULL ||
199 RBASIC_CLASS((VALUE)iseq) == 0 || // dummy frame for loading
200 RUBY_VM_NORMAL_ISEQ_P(iseq) //argument error
201 );
202 }
203 else {
204 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
205 }
206}
207
208static void
209vm_check_frame(VALUE type,
210 VALUE specval,
211 VALUE cref_or_me,
212 const rb_iseq_t *iseq)
213{
214 VALUE given_magic = type & VM_FRAME_MAGIC_MASK;
215 VM_ASSERT(FIXNUM_P(type));
216
217#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
218 case magic: \
219 vm_check_frame_detail(type, req_block, req_me, req_cref, \
220 specval, cref_or_me, is_cframe, iseq); \
221 break
222 switch (given_magic) {
223 /* BLK ME CREF CFRAME */
224 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
225 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
226 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
227 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
228 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
229 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
230 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
231 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
232 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
233 default:
234 rb_bug("vm_push_frame: unknown type (%x)", (unsigned int)given_magic);
235 }
236#undef CHECK
237}
238
239static VALUE vm_stack_canary; /* Initialized later */
240static bool vm_stack_canary_was_born = false;
241
242#ifndef MJIT_HEADER
243MJIT_FUNC_EXPORTED void
244rb_vm_check_canary(const rb_execution_context_t *ec, VALUE *sp)
245{
246 const struct rb_control_frame_struct *reg_cfp = ec->cfp;
247 const struct rb_iseq_struct *iseq;
248
249 if (! LIKELY(vm_stack_canary_was_born)) {
250 return; /* :FIXME: isn't it rather fatal to enter this branch? */
251 }
252 else if ((VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
253 /* This is at the very beginning of a thread. cfp does not exist. */
254 return;
255 }
256 else if (! (iseq = GET_ISEQ())) {
257 return;
258 }
259 else if (LIKELY(sp[0] != vm_stack_canary)) {
260 return;
261 }
262 else {
263 /* we are going to call methods below; squash the canary to
264 * prevent infinite loop. */
265 sp[0] = Qundef;
266 }
267
268 const VALUE *orig = rb_iseq_original_iseq(iseq);
269 const VALUE *encoded = ISEQ_BODY(iseq)->iseq_encoded;
270 const ptrdiff_t pos = GET_PC() - encoded;
271 const enum ruby_vminsn_type insn = (enum ruby_vminsn_type)orig[pos];
272 const char *name = insn_name(insn);
273 const VALUE iseqw = rb_iseqw_new(iseq);
274 const VALUE inspection = rb_inspect(iseqw);
275 const char *stri = rb_str_to_cstr(inspection);
276 const VALUE disasm = rb_iseq_disasm(iseq);
277 const char *strd = rb_str_to_cstr(disasm);
278
279 /* rb_bug() is not capable of outputting this large contents. It
280 is designed to run form a SIGSEGV handler, which tends to be
281 very restricted. */
282 ruby_debug_printf(
283 "We are killing the stack canary set by %s, "
284 "at %s@pc=%"PRIdPTR"\n"
285 "watch out the C stack trace.\n"
286 "%s",
287 name, stri, pos, strd);
288 rb_bug("see above.");
289}
290#endif
291#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
292
293#else
294#define vm_check_canary(ec, sp)
295#define vm_check_frame(a, b, c, d)
296#endif /* VM_CHECK_MODE > 0 */
297
298#if USE_DEBUG_COUNTER
299static void
300vm_push_frame_debug_counter_inc(
301 const struct rb_execution_context_struct *ec,
302 const struct rb_control_frame_struct *reg_cfp,
303 VALUE type)
304{
305 const struct rb_control_frame_struct *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(reg_cfp);
306
307 RB_DEBUG_COUNTER_INC(frame_push);
308
309 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
310 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
311 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
312 if (prev) {
313 if (curr) {
314 RB_DEBUG_COUNTER_INC(frame_R2R);
315 }
316 else {
317 RB_DEBUG_COUNTER_INC(frame_R2C);
318 }
319 }
320 else {
321 if (curr) {
322 RB_DEBUG_COUNTER_INC(frame_C2R);
323 }
324 else {
325 RB_DEBUG_COUNTER_INC(frame_C2C);
326 }
327 }
328 }
329
330 switch (type & VM_FRAME_MAGIC_MASK) {
331 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method); return;
332 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block); return;
333 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class); return;
334 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top); return;
335 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc); return;
336 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc); return;
337 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval); return;
338 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue); return;
339 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy); return;
340 }
341
342 rb_bug("unreachable");
343}
344#else
345#define vm_push_frame_debug_counter_inc(ec, cfp, t) /* void */
346#endif
347
348STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
349STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
350STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
351
352static void
353vm_push_frame(rb_execution_context_t *ec,
354 const rb_iseq_t *iseq,
355 VALUE type,
356 VALUE self,
357 VALUE specval,
358 VALUE cref_or_me,
359 const VALUE *pc,
360 VALUE *sp,
361 int local_size,
362 int stack_max)
363{
364 rb_control_frame_t *const cfp = RUBY_VM_NEXT_CONTROL_FRAME(ec->cfp);
365
366 vm_check_frame(type, specval, cref_or_me, iseq);
367 VM_ASSERT(local_size >= 0);
368
369 /* check stack overflow */
370 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
371 vm_check_canary(ec, sp);
372
373 /* setup vm value stack */
374
375 /* initialize local variables */
376 for (int i=0; i < local_size; i++) {
377 *sp++ = Qnil;
378 }
379
380 /* setup ep with managing data */
381 *sp++ = cref_or_me; /* ep[-2] / Qnil or T_IMEMO(cref) or T_IMEMO(ment) */
382 *sp++ = specval /* ep[-1] / block handler or prev env ptr */;
383 *sp++ = type; /* ep[-0] / ENV_FLAGS */
384
385 /* setup new frame */
386 *cfp = (const struct rb_control_frame_struct) {
387 .pc = pc,
388 .sp = sp,
389 .iseq = iseq,
390 .self = self,
391 .ep = sp - 1,
392 .block_code = NULL,
393 .__bp__ = sp, /* Store initial value of ep as bp to skip calculation cost of bp on JIT cancellation. */
394#if VM_DEBUG_BP_CHECK
395 .bp_check = sp,
396#endif
397 .jit_return = NULL
398 };
399
400 ec->cfp = cfp;
401
402 if (VMDEBUG == 2) {
403 SDR();
404 }
405 vm_push_frame_debug_counter_inc(ec, cfp, type);
406}
407
408void
409rb_vm_pop_frame_no_int(rb_execution_context_t *ec)
410{
411 rb_control_frame_t *cfp = ec->cfp;
412
413 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
414 if (VMDEBUG == 2) SDR();
415
416 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
417}
418
419/* return TRUE if the frame is finished */
420static inline int
421vm_pop_frame(rb_execution_context_t *ec, rb_control_frame_t *cfp, const VALUE *ep)
422{
423 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
424
425 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
426 if (VMDEBUG == 2) SDR();
427
428 RUBY_VM_CHECK_INTS(ec);
429 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
430
431 return flags & VM_FRAME_FLAG_FINISH;
432}
433
434MJIT_STATIC void
435rb_vm_pop_frame(rb_execution_context_t *ec)
436{
437 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
438}
439
440// it pushes pseudo-frame with fname filename.
441VALUE
442rb_vm_push_frame_fname(rb_execution_context_t *ec, VALUE fname)
443{
444 VALUE tmpbuf = rb_imemo_tmpbuf_auto_free_pointer();
445 void *ptr = ruby_xcalloc(sizeof(struct rb_iseq_constant_body) + sizeof(struct rb_iseq_struct), 1);
446 rb_imemo_tmpbuf_set_ptr(tmpbuf, ptr);
447
448 struct rb_iseq_struct *dmy_iseq = (struct rb_iseq_struct *)ptr;
449 struct rb_iseq_constant_body *dmy_body = (struct rb_iseq_constant_body *)&dmy_iseq[1];
450 dmy_iseq->body = dmy_body;
451 dmy_body->type = ISEQ_TYPE_TOP;
452 dmy_body->location.pathobj = fname;
453
454 vm_push_frame(ec,
455 dmy_iseq, //const rb_iseq_t *iseq,
456 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH, // VALUE type,
457 ec->cfp->self, // VALUE self,
458 VM_BLOCK_HANDLER_NONE, // VALUE specval,
459 Qfalse, // VALUE cref_or_me,
460 NULL, // const VALUE *pc,
461 ec->cfp->sp, // VALUE *sp,
462 0, // int local_size,
463 0); // int stack_max
464
465 return tmpbuf;
466}
467
468/* method dispatch */
469static inline VALUE
470rb_arity_error_new(int argc, int min, int max)
471{
472 VALUE err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d", argc, min);
473 if (min == max) {
474 /* max is not needed */
475 }
476 else if (max == UNLIMITED_ARGUMENTS) {
477 rb_str_cat_cstr(err_mess, "+");
478 }
479 else {
480 rb_str_catf(err_mess, "..%d", max);
481 }
482 rb_str_cat_cstr(err_mess, ")");
483 return rb_exc_new3(rb_eArgError, err_mess);
484}
485
486MJIT_STATIC void
487rb_error_arity(int argc, int min, int max)
488{
489 rb_exc_raise(rb_arity_error_new(argc, min, max));
490}
491
492/* lvar */
493
494NOINLINE(static void vm_env_write_slowpath(const VALUE *ep, int index, VALUE v));
495
496static void
497vm_env_write_slowpath(const VALUE *ep, int index, VALUE v)
498{
499 /* remember env value forcely */
500 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
501 VM_FORCE_WRITE(&ep[index], v);
502 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
503 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
504}
505
506static inline void
507vm_env_write(const VALUE *ep, int index, VALUE v)
508{
509 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
510 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
511 VM_STACK_ENV_WRITE(ep, index, v);
512 }
513 else {
514 vm_env_write_slowpath(ep, index, v);
515 }
516}
517
518MJIT_STATIC VALUE
519rb_vm_bh_to_procval(const rb_execution_context_t *ec, VALUE block_handler)
520{
521 if (block_handler == VM_BLOCK_HANDLER_NONE) {
522 return Qnil;
523 }
524 else {
525 switch (vm_block_handler_type(block_handler)) {
526 case block_handler_type_iseq:
527 case block_handler_type_ifunc:
528 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler), rb_cProc);
529 case block_handler_type_symbol:
530 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
531 case block_handler_type_proc:
532 return VM_BH_TO_PROC(block_handler);
533 default:
534 VM_UNREACHABLE(rb_vm_bh_to_procval);
535 }
536 }
537}
538
539/* svar */
540
541#if VM_CHECK_MODE > 0
542static int
543vm_svar_valid_p(VALUE svar)
544{
545 if (RB_TYPE_P((VALUE)svar, T_IMEMO)) {
546 switch (imemo_type(svar)) {
547 case imemo_svar:
548 case imemo_cref:
549 case imemo_ment:
550 return TRUE;
551 default:
552 break;
553 }
554 }
555 rb_bug("vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
556 return FALSE;
557}
558#endif
559
560static inline struct vm_svar *
561lep_svar(const rb_execution_context_t *ec, const VALUE *lep)
562{
563 VALUE svar;
564
565 if (lep && (ec == NULL || ec->root_lep != lep)) {
566 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
567 }
568 else {
569 svar = ec->root_svar;
570 }
571
572 VM_ASSERT(svar == Qfalse || vm_svar_valid_p(svar));
573
574 return (struct vm_svar *)svar;
575}
576
577static inline void
578lep_svar_write(const rb_execution_context_t *ec, const VALUE *lep, const struct vm_svar *svar)
579{
580 VM_ASSERT(vm_svar_valid_p((VALUE)svar));
581
582 if (lep && (ec == NULL || ec->root_lep != lep)) {
583 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (VALUE)svar);
584 }
585 else {
586 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
587 }
588}
589
590static VALUE
591lep_svar_get(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key)
592{
593 const struct vm_svar *svar = lep_svar(ec, lep);
594
595 if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) return Qnil;
596
597 switch (key) {
598 case VM_SVAR_LASTLINE:
599 return svar->lastline;
600 case VM_SVAR_BACKREF:
601 return svar->backref;
602 default: {
603 const VALUE ary = svar->others;
604
605 if (NIL_P(ary)) {
606 return Qnil;
607 }
608 else {
609 return rb_ary_entry(ary, key - VM_SVAR_EXTRA_START);
610 }
611 }
612 }
613}
614
615static struct vm_svar *
616svar_new(VALUE obj)
617{
618 return (struct vm_svar *)rb_imemo_new(imemo_svar, Qnil, Qnil, Qnil, obj);
619}
620
621static void
622lep_svar_set(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, VALUE val)
623{
624 struct vm_svar *svar = lep_svar(ec, lep);
625
626 if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) {
627 lep_svar_write(ec, lep, svar = svar_new((VALUE)svar));
628 }
629
630 switch (key) {
631 case VM_SVAR_LASTLINE:
632 RB_OBJ_WRITE(svar, &svar->lastline, val);
633 return;
634 case VM_SVAR_BACKREF:
635 RB_OBJ_WRITE(svar, &svar->backref, val);
636 return;
637 default: {
638 VALUE ary = svar->others;
639
640 if (NIL_P(ary)) {
641 RB_OBJ_WRITE(svar, &svar->others, ary = rb_ary_new());
642 }
643 rb_ary_store(ary, key - VM_SVAR_EXTRA_START, val);
644 }
645 }
646}
647
648static inline VALUE
649vm_getspecial(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, rb_num_t type)
650{
651 VALUE val;
652
653 if (type == 0) {
654 val = lep_svar_get(ec, lep, key);
655 }
656 else {
657 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
658
659 if (type & 0x01) {
660 switch (type >> 1) {
661 case '&':
662 val = rb_reg_last_match(backref);
663 break;
664 case '`':
665 val = rb_reg_match_pre(backref);
666 break;
667 case '\'':
668 val = rb_reg_match_post(backref);
669 break;
670 case '+':
671 val = rb_reg_match_last(backref);
672 break;
673 default:
674 rb_bug("unexpected back-ref");
675 }
676 }
677 else {
678 val = rb_reg_nth_match((int)(type >> 1), backref);
679 }
680 }
681 return val;
682}
683
684PUREFUNC(static rb_callable_method_entry_t *check_method_entry(VALUE obj, int can_be_svar));
686check_method_entry(VALUE obj, int can_be_svar)
687{
688 if (obj == Qfalse) return NULL;
689
690#if VM_CHECK_MODE > 0
691 if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_method_entry: unknown type: %s", rb_obj_info(obj));
692#endif
693
694 switch (imemo_type(obj)) {
695 case imemo_ment:
696 return (rb_callable_method_entry_t *)obj;
697 case imemo_cref:
698 return NULL;
699 case imemo_svar:
700 if (can_be_svar) {
701 return check_method_entry(((struct vm_svar *)obj)->cref_or_me, FALSE);
702 }
703 default:
704#if VM_CHECK_MODE > 0
705 rb_bug("check_method_entry: svar should not be there:");
706#endif
707 return NULL;
708 }
709}
710
711MJIT_STATIC const rb_callable_method_entry_t *
712rb_vm_frame_method_entry(const rb_control_frame_t *cfp)
713{
714 const VALUE *ep = cfp->ep;
716
717 while (!VM_ENV_LOCAL_P(ep)) {
718 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return me;
719 ep = VM_ENV_PREV_EP(ep);
720 }
721
722 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
723}
724
725static const rb_iseq_t *
726method_entry_iseqptr(const rb_callable_method_entry_t *me)
727{
728 switch (me->def->type) {
729 case VM_METHOD_TYPE_ISEQ:
730 return me->def->body.iseq.iseqptr;
731 default:
732 return NULL;
733 }
734}
735
736static rb_cref_t *
737method_entry_cref(const rb_callable_method_entry_t *me)
738{
739 switch (me->def->type) {
740 case VM_METHOD_TYPE_ISEQ:
741 return me->def->body.iseq.cref;
742 default:
743 return NULL;
744 }
745}
746
747#if VM_CHECK_MODE == 0
748PUREFUNC(static rb_cref_t *check_cref(VALUE, int));
749#endif
750static rb_cref_t *
751check_cref(VALUE obj, int can_be_svar)
752{
753 if (obj == Qfalse) return NULL;
754
755#if VM_CHECK_MODE > 0
756 if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_cref: unknown type: %s", rb_obj_info(obj));
757#endif
758
759 switch (imemo_type(obj)) {
760 case imemo_ment:
761 return method_entry_cref((rb_callable_method_entry_t *)obj);
762 case imemo_cref:
763 return (rb_cref_t *)obj;
764 case imemo_svar:
765 if (can_be_svar) {
766 return check_cref(((struct vm_svar *)obj)->cref_or_me, FALSE);
767 }
768 default:
769#if VM_CHECK_MODE > 0
770 rb_bug("check_method_entry: svar should not be there:");
771#endif
772 return NULL;
773 }
774}
775
776static inline rb_cref_t *
777vm_env_cref(const VALUE *ep)
778{
779 rb_cref_t *cref;
780
781 while (!VM_ENV_LOCAL_P(ep)) {
782 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return cref;
783 ep = VM_ENV_PREV_EP(ep);
784 }
785
786 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
787}
788
789static int
790is_cref(const VALUE v, int can_be_svar)
791{
792 if (RB_TYPE_P(v, T_IMEMO)) {
793 switch (imemo_type(v)) {
794 case imemo_cref:
795 return TRUE;
796 case imemo_svar:
797 if (can_be_svar) return is_cref(((struct vm_svar *)v)->cref_or_me, FALSE);
798 default:
799 break;
800 }
801 }
802 return FALSE;
803}
804
805static int
806vm_env_cref_by_cref(const VALUE *ep)
807{
808 while (!VM_ENV_LOCAL_P(ep)) {
809 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) return TRUE;
810 ep = VM_ENV_PREV_EP(ep);
811 }
812 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
813}
814
815static rb_cref_t *
816cref_replace_with_duplicated_cref_each_frame(const VALUE *vptr, int can_be_svar, VALUE parent)
817{
818 const VALUE v = *vptr;
819 rb_cref_t *cref, *new_cref;
820
821 if (RB_TYPE_P(v, T_IMEMO)) {
822 switch (imemo_type(v)) {
823 case imemo_cref:
824 cref = (rb_cref_t *)v;
825 new_cref = vm_cref_dup(cref);
826 if (parent) {
827 RB_OBJ_WRITE(parent, vptr, new_cref);
828 }
829 else {
830 VM_FORCE_WRITE(vptr, (VALUE)new_cref);
831 }
832 return (rb_cref_t *)new_cref;
833 case imemo_svar:
834 if (can_be_svar) {
835 return cref_replace_with_duplicated_cref_each_frame(&((struct vm_svar *)v)->cref_or_me, FALSE, v);
836 }
837 /* fall through */
838 case imemo_ment:
839 rb_bug("cref_replace_with_duplicated_cref_each_frame: unreachable");
840 default:
841 break;
842 }
843 }
844 return NULL;
845}
846
847static rb_cref_t *
848vm_cref_replace_with_duplicated_cref(const VALUE *ep)
849{
850 if (vm_env_cref_by_cref(ep)) {
851 rb_cref_t *cref;
852 VALUE envval;
853
854 while (!VM_ENV_LOCAL_P(ep)) {
855 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
856 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
857 return cref;
858 }
859 ep = VM_ENV_PREV_EP(ep);
860 }
861 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
862 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
863 }
864 else {
865 rb_bug("vm_cref_dup: unreachable");
866 }
867}
868
869static rb_cref_t *
870vm_get_cref(const VALUE *ep)
871{
872 rb_cref_t *cref = vm_env_cref(ep);
873
874 if (cref != NULL) {
875 return cref;
876 }
877 else {
878 rb_bug("vm_get_cref: unreachable");
879 }
880}
881
882rb_cref_t *
883rb_vm_get_cref(const VALUE *ep)
884{
885 return vm_get_cref(ep);
886}
887
888static rb_cref_t *
889vm_ec_cref(const rb_execution_context_t *ec)
890{
891 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
892
893 if (cfp == NULL) {
894 return NULL;
895 }
896 return vm_get_cref(cfp->ep);
897}
898
899static const rb_cref_t *
900vm_get_const_key_cref(const VALUE *ep)
901{
902 const rb_cref_t *cref = vm_get_cref(ep);
903 const rb_cref_t *key_cref = cref;
904
905 while (cref) {
906 if (FL_TEST(CREF_CLASS(cref), FL_SINGLETON) ||
907 FL_TEST(CREF_CLASS(cref), RCLASS_CLONED)) {
908 return key_cref;
909 }
910 cref = CREF_NEXT(cref);
911 }
912
913 /* does not include singleton class */
914 return NULL;
915}
916
917void
918rb_vm_rewrite_cref(rb_cref_t *cref, VALUE old_klass, VALUE new_klass, rb_cref_t **new_cref_ptr)
919{
920 rb_cref_t *new_cref;
921
922 while (cref) {
923 if (CREF_CLASS(cref) == old_klass) {
924 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
925 *new_cref_ptr = new_cref;
926 return;
927 }
928 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
929 cref = CREF_NEXT(cref);
930 *new_cref_ptr = new_cref;
931 new_cref_ptr = &new_cref->next;
932 }
933 *new_cref_ptr = NULL;
934}
935
936static rb_cref_t *
937vm_cref_push(const rb_execution_context_t *ec, VALUE klass, const VALUE *ep, int pushed_by_eval, int singleton)
938{
939 rb_cref_t *prev_cref = NULL;
940
941 if (ep) {
942 prev_cref = vm_env_cref(ep);
943 }
944 else {
945 rb_control_frame_t *cfp = vm_get_ruby_level_caller_cfp(ec, ec->cfp);
946
947 if (cfp) {
948 prev_cref = vm_env_cref(cfp->ep);
949 }
950 }
951
952 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
953}
954
955static inline VALUE
956vm_get_cbase(const VALUE *ep)
957{
958 const rb_cref_t *cref = vm_get_cref(ep);
959
960 return CREF_CLASS_FOR_DEFINITION(cref);
961}
962
963static inline VALUE
964vm_get_const_base(const VALUE *ep)
965{
966 const rb_cref_t *cref = vm_get_cref(ep);
967
968 while (cref) {
969 if (!CREF_PUSHED_BY_EVAL(cref)) {
970 return CREF_CLASS_FOR_DEFINITION(cref);
971 }
972 cref = CREF_NEXT(cref);
973 }
974
975 return Qundef;
976}
977
978static inline void
979vm_check_if_namespace(VALUE klass)
980{
981 if (!RB_TYPE_P(klass, T_CLASS) && !RB_TYPE_P(klass, T_MODULE)) {
982 rb_raise(rb_eTypeError, "%+"PRIsVALUE" is not a class/module", klass);
983 }
984}
985
986static inline void
987vm_ensure_not_refinement_module(VALUE self)
988{
989 if (RB_TYPE_P(self, T_MODULE) && FL_TEST(self, RMODULE_IS_REFINEMENT)) {
990 rb_warn("not defined at the refinement, but at the outer class/module");
991 }
992}
993
994static inline VALUE
995vm_get_iclass(const rb_control_frame_t *cfp, VALUE klass)
996{
997 return klass;
998}
999
1000static inline VALUE
1001vm_get_ev_const(rb_execution_context_t *ec, VALUE orig_klass, ID id, bool allow_nil, int is_defined)
1002{
1003 void rb_const_warn_if_deprecated(const rb_const_entry_t *ce, VALUE klass, ID id);
1004 VALUE val;
1005
1006 if (NIL_P(orig_klass) && allow_nil) {
1007 /* in current lexical scope */
1008 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1009 const rb_cref_t *cref;
1010 VALUE klass = Qnil;
1011
1012 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1013 root_cref = CREF_NEXT(root_cref);
1014 }
1015 cref = root_cref;
1016 while (cref && CREF_NEXT(cref)) {
1017 if (CREF_PUSHED_BY_EVAL(cref)) {
1018 klass = Qnil;
1019 }
1020 else {
1021 klass = CREF_CLASS(cref);
1022 }
1023 cref = CREF_NEXT(cref);
1024
1025 if (!NIL_P(klass)) {
1026 VALUE av, am = 0;
1027 rb_const_entry_t *ce;
1028 search_continue:
1029 if ((ce = rb_const_lookup(klass, id))) {
1030 rb_const_warn_if_deprecated(ce, klass, id);
1031 val = ce->value;
1032 if (UNDEF_P(val)) {
1033 if (am == klass) break;
1034 am = klass;
1035 if (is_defined) return 1;
1036 if (rb_autoloading_value(klass, id, &av, NULL)) return av;
1037 rb_autoload_load(klass, id);
1038 goto search_continue;
1039 }
1040 else {
1041 if (is_defined) {
1042 return 1;
1043 }
1044 else {
1045 if (UNLIKELY(!rb_ractor_main_p())) {
1046 if (!rb_ractor_shareable_p(val)) {
1047 rb_raise(rb_eRactorIsolationError,
1048 "can not access non-shareable objects in constant %"PRIsVALUE"::%s by non-main ractor.", rb_class_path(klass), rb_id2name(id));
1049 }
1050 }
1051 return val;
1052 }
1053 }
1054 }
1055 }
1056 }
1057
1058 /* search self */
1059 if (root_cref && !NIL_P(CREF_CLASS(root_cref))) {
1060 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1061 }
1062 else {
1063 klass = CLASS_OF(ec->cfp->self);
1064 }
1065
1066 if (is_defined) {
1067 return rb_const_defined(klass, id);
1068 }
1069 else {
1070 return rb_const_get(klass, id);
1071 }
1072 }
1073 else {
1074 vm_check_if_namespace(orig_klass);
1075 if (is_defined) {
1076 return rb_public_const_defined_from(orig_klass, id);
1077 }
1078 else {
1079 return rb_public_const_get_from(orig_klass, id);
1080 }
1081 }
1082}
1083
1084VALUE
1085rb_vm_get_ev_const(rb_execution_context_t *ec, VALUE orig_klass, ID id, VALUE allow_nil)
1086{
1087 return vm_get_ev_const(ec, orig_klass, id, allow_nil == Qtrue, 0);
1088}
1089
1090static inline VALUE
1091vm_get_ev_const_chain(rb_execution_context_t *ec, const ID *segments)
1092{
1093 VALUE val = Qnil;
1094 int idx = 0;
1095 int allow_nil = TRUE;
1096 if (segments[0] == idNULL) {
1097 val = rb_cObject;
1098 idx++;
1099 allow_nil = FALSE;
1100 }
1101 while (segments[idx]) {
1102 ID id = segments[idx++];
1103 val = vm_get_ev_const(ec, val, id, allow_nil, 0);
1104 allow_nil = FALSE;
1105 }
1106 return val;
1107}
1108
1109
1110static inline VALUE
1111vm_get_cvar_base(const rb_cref_t *cref, const rb_control_frame_t *cfp, int top_level_raise)
1112{
1113 VALUE klass;
1114
1115 if (!cref) {
1116 rb_bug("vm_get_cvar_base: no cref");
1117 }
1118
1119 while (CREF_NEXT(cref) &&
1120 (NIL_P(CREF_CLASS(cref)) || FL_TEST(CREF_CLASS(cref), FL_SINGLETON) ||
1121 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1122 cref = CREF_NEXT(cref);
1123 }
1124 if (top_level_raise && !CREF_NEXT(cref)) {
1125 rb_raise(rb_eRuntimeError, "class variable access from toplevel");
1126 }
1127
1128 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1129
1130 if (NIL_P(klass)) {
1131 rb_raise(rb_eTypeError, "no class variables available");
1132 }
1133 return klass;
1134}
1135
1136ALWAYS_INLINE(static void fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, attr_index_t index, shape_id_t shape_id));
1137static inline void
1138fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, attr_index_t index, shape_id_t shape_id)
1139{
1140 if (is_attr) {
1141 vm_cc_attr_index_set(cc, index, shape_id);
1142 }
1143 else {
1144 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1145 }
1146}
1147
1148#define ractor_incidental_shareable_p(cond, val) \
1149 (!(cond) || rb_ractor_shareable_p(val))
1150#define ractor_object_incidental_shareable_p(obj, val) \
1151 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1152
1153#define ATTR_INDEX_NOT_SET (attr_index_t)-1
1154
1155ALWAYS_INLINE(static VALUE vm_getivar(VALUE, ID, const rb_iseq_t *, IVC, const struct rb_callcache *, int));
1156static inline VALUE
1157vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr)
1158{
1159#if OPT_IC_FOR_IVAR
1160 VALUE val = Qundef;
1161 shape_id_t shape_id;
1162 VALUE * ivar_list;
1163
1164 if (SPECIAL_CONST_P(obj)) {
1165 return Qnil;
1166 }
1167
1168#if SHAPE_IN_BASIC_FLAGS
1169 shape_id = RBASIC_SHAPE_ID(obj);
1170#endif
1171
1172 switch (BUILTIN_TYPE(obj)) {
1173 case T_OBJECT:
1174 ivar_list = ROBJECT_IVPTR(obj);
1175 VM_ASSERT(rb_ractor_shareable_p(obj) ? rb_ractor_shareable_p(val) : true);
1176
1177#if !SHAPE_IN_BASIC_FLAGS
1178 shape_id = ROBJECT_SHAPE_ID(obj);
1179#endif
1180 break;
1181 case T_CLASS:
1182 case T_MODULE:
1183 {
1184 if (UNLIKELY(!rb_ractor_main_p())) {
1185 // For two reasons we can only use the fast path on the main
1186 // ractor.
1187 // First, only the main ractor is allowed to set ivars on classes
1188 // and modules. So we can skip locking.
1189 // Second, other ractors need to check the shareability of the
1190 // values returned from the class ivars.
1191 goto general_path;
1192 }
1193
1194 ivar_list = RCLASS_IVPTR(obj);
1195
1196#if !SHAPE_IN_BASIC_FLAGS
1197 shape_id = RCLASS_SHAPE_ID(obj);
1198#endif
1199
1200 break;
1201 }
1202 default:
1203 if (FL_TEST_RAW(obj, FL_EXIVAR)) {
1204 struct gen_ivtbl *ivtbl;
1205 rb_gen_ivtbl_get(obj, id, &ivtbl);
1206#if !SHAPE_IN_BASIC_FLAGS
1207 shape_id = ivtbl->shape_id;
1208#endif
1209 ivar_list = ivtbl->ivptr;
1210 }
1211 else {
1212 return Qnil;
1213 }
1214 }
1215
1216 shape_id_t cached_id;
1217 attr_index_t index;
1218
1219 if (is_attr) {
1220 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1221 }
1222 else {
1223 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1224 }
1225
1226 if (LIKELY(cached_id == shape_id)) {
1227 RUBY_ASSERT(cached_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1228
1229 if (index == ATTR_INDEX_NOT_SET) {
1230 return Qnil;
1231 }
1232
1233 val = ivar_list[index];
1234 RUBY_ASSERT(!UNDEF_P(val));
1235 }
1236 else { // cache miss case
1237#if RUBY_DEBUG
1238 if (is_attr) {
1239 if (cached_id != INVALID_SHAPE_ID) {
1240 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1241 }
1242 else {
1243 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1244 }
1245 }
1246 else {
1247 if (cached_id != INVALID_SHAPE_ID) {
1248 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1249 }
1250 else {
1251 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1252 }
1253 }
1254#endif
1255
1256 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1257
1258 if (shape_id == OBJ_TOO_COMPLEX_SHAPE_ID) {
1259 if (!st_lookup(ROBJECT_IV_HASH(obj), id, &val)) {
1260 val = Qnil;
1261 }
1262 }
1263 else {
1264 if (rb_shape_get_iv_index(shape, id, &index)) {
1265 // This fills in the cache with the shared cache object.
1266 // "ent" is the shared cache object
1267 fill_ivar_cache(iseq, ic, cc, is_attr, index, shape_id);
1268
1269 // We fetched the ivar list above
1270 val = ivar_list[index];
1271 RUBY_ASSERT(!UNDEF_P(val));
1272 }
1273 else {
1274 if (is_attr) {
1275 vm_cc_attr_index_initialize(cc, shape_id);
1276 }
1277 else {
1278 vm_ic_attr_index_initialize(ic, shape_id);
1279 }
1280
1281 val = Qnil;
1282 }
1283 }
1284
1285 }
1286
1287 RUBY_ASSERT(!UNDEF_P(val));
1288
1289 return val;
1290
1291general_path:
1292#endif /* OPT_IC_FOR_IVAR */
1293 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1294
1295 if (is_attr) {
1296 return rb_attr_get(obj, id);
1297 }
1298 else {
1299 return rb_ivar_get(obj, id);
1300 }
1301}
1302
1303static void
1304populate_cache(attr_index_t index, shape_id_t next_shape_id, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, bool is_attr)
1305{
1306 RUBY_ASSERT(next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1307
1308 // Cache population code
1309 if (is_attr) {
1310 vm_cc_attr_index_set(cc, index, next_shape_id);
1311 }
1312 else {
1313 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1314 }
1315}
1316
1317ALWAYS_INLINE(static VALUE vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr));
1318NOINLINE(static VALUE vm_setivar_slowpath_ivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic));
1319NOINLINE(static VALUE vm_setivar_slowpath_attr(VALUE obj, ID id, VALUE val, const struct rb_callcache *cc));
1320
1321static VALUE
1322vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr)
1323{
1324#if OPT_IC_FOR_IVAR
1325 switch (BUILTIN_TYPE(obj)) {
1326 case T_OBJECT:
1327 {
1329
1330 attr_index_t index = rb_obj_ivar_set(obj, id, val);
1331
1332 shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
1333
1334 if (next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID) {
1335 populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr);
1336 }
1337
1338 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_iv_hit);
1339 return val;
1340 }
1341 case T_CLASS:
1342 case T_MODULE:
1343 break;
1344 default:
1345 {
1346 rb_ivar_set(obj, id, val);
1347 shape_id_t next_shape_id = rb_shape_get_shape_id(obj);
1348 rb_shape_t *next_shape = rb_shape_get_shape_by_id(next_shape_id);
1349 attr_index_t index;
1350
1351 if (rb_shape_get_iv_index(next_shape, id, &index)) { // based off the hash stored in the transition tree
1352 if (index >= MAX_IVARS) {
1353 rb_raise(rb_eArgError, "too many instance variables");
1354 }
1355
1356 populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr);
1357 }
1358 else {
1359 rb_bug("didn't find the id\n");
1360 }
1361
1362 return val;
1363 }
1364 }
1365#endif
1366 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1367 return rb_ivar_set(obj, id, val);
1368}
1369
1370static VALUE
1371vm_setivar_slowpath_ivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic)
1372{
1373 return vm_setivar_slowpath(obj, id, val, iseq, ic, NULL, false);
1374}
1375
1376static VALUE
1377vm_setivar_slowpath_attr(VALUE obj, ID id, VALUE val, const struct rb_callcache *cc)
1378{
1379 return vm_setivar_slowpath(obj, id, val, NULL, NULL, cc, true);
1380}
1381
1382NOINLINE(static VALUE vm_setivar_default(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1383static VALUE
1384vm_setivar_default(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1385{
1386#if SHAPE_IN_BASIC_FLAGS
1387 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1388#else
1389 shape_id_t shape_id = rb_generic_shape_id(obj);
1390#endif
1391
1392 struct gen_ivtbl *ivtbl = 0;
1393
1394 // Cache hit case
1395 if (shape_id == dest_shape_id) {
1396 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1397
1398 // Just get the IV table
1399 rb_gen_ivtbl_get(obj, 0, &ivtbl);
1400 }
1401 else if (dest_shape_id != INVALID_SHAPE_ID) {
1402 rb_shape_t * dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1403 shape_id_t source_shape_id = dest_shape->parent_id;
1404
1405 if (shape_id == source_shape_id && dest_shape->edge_name == id && dest_shape->type == SHAPE_IVAR) {
1406 ivtbl = rb_ensure_generic_iv_list_size(obj, dest_shape, index + 1);
1407#if SHAPE_IN_BASIC_FLAGS
1408 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1409#else
1410 RUBY_ASSERT(ivtbl->shape_id == dest_shape_id);
1411#endif
1412 }
1413 else {
1414 return Qundef;
1415 }
1416 }
1417 else {
1418 return Qundef;
1419 }
1420
1421 VALUE *ptr = ivtbl->ivptr;
1422
1423 RB_OBJ_WRITE(obj, &ptr[index], val);
1424
1425 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1426
1427 return val;
1428}
1429
1430static inline VALUE
1431vm_setivar(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1432{
1433#if OPT_IC_FOR_IVAR
1434 switch (BUILTIN_TYPE(obj)) {
1435 case T_OBJECT:
1436 {
1437 VM_ASSERT(!rb_ractor_shareable_p(obj) || rb_obj_frozen_p(obj));
1438
1439 shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
1440 RUBY_ASSERT(dest_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1441
1442 if (LIKELY(shape_id == dest_shape_id)) {
1443 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1444 VM_ASSERT(!rb_ractor_shareable_p(obj));
1445 }
1446 else if (dest_shape_id != INVALID_SHAPE_ID) {
1447 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1448 shape_id_t source_shape_id = dest_shape->parent_id;
1449
1450 if (shape_id == source_shape_id && dest_shape->edge_name == id) {
1451 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1452
1453 ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
1454
1455 RUBY_ASSERT(rb_shape_get_next_iv_shape(rb_shape_get_shape_by_id(source_shape_id), id) == dest_shape);
1456 RUBY_ASSERT(index < dest_shape->capacity);
1457 }
1458 else {
1459 break;
1460 }
1461 }
1462 else {
1463 break;
1464 }
1465
1466 VALUE *ptr = ROBJECT_IVPTR(obj);
1467
1468 RUBY_ASSERT(!rb_shape_obj_too_complex(obj));
1469 RB_OBJ_WRITE(obj, &ptr[index], val);
1470
1471 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1472 return val;
1473 }
1474 break;
1475 case T_CLASS:
1476 case T_MODULE:
1477 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1478 default:
1479 break;
1480 }
1481
1482 return Qundef;
1483#endif /* OPT_IC_FOR_IVAR */
1484}
1485
1486static VALUE
1487update_classvariable_cache(const rb_iseq_t *iseq, VALUE klass, ID id, const rb_cref_t * cref, ICVARC ic)
1488{
1489 VALUE defined_class = 0;
1490 VALUE cvar_value = rb_cvar_find(klass, id, &defined_class);
1491
1492 if (RB_TYPE_P(defined_class, T_ICLASS)) {
1493 defined_class = RBASIC(defined_class)->klass;
1494 }
1495
1496 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1497 if (!rb_cvc_tbl) {
1498 rb_bug("the cvc table should be set");
1499 }
1500
1501 VALUE ent_data;
1502 if (!rb_id_table_lookup(rb_cvc_tbl, id, &ent_data)) {
1503 rb_bug("should have cvar cache entry");
1504 }
1505
1506 struct rb_cvar_class_tbl_entry *ent = (void *)ent_data;
1507
1508 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1509 ent->cref = cref;
1510 ic->entry = ent;
1511
1512 RUBY_ASSERT(BUILTIN_TYPE((VALUE)cref) == T_IMEMO && IMEMO_TYPE_P(cref, imemo_cref));
1513 RB_OBJ_WRITTEN(iseq, Qundef, ent->cref);
1514 RB_OBJ_WRITTEN(iseq, Qundef, ent->class_value);
1515 RB_OBJ_WRITTEN(ent->class_value, Qundef, ent->cref);
1516
1517 return cvar_value;
1518}
1519
1520static inline VALUE
1521vm_getclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *reg_cfp, ID id, ICVARC ic)
1522{
1523 const rb_cref_t *cref;
1524 cref = vm_get_cref(GET_EP());
1525
1526 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1527 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1528
1529 VALUE v = rb_ivar_lookup(ic->entry->class_value, id, Qundef);
1530 RUBY_ASSERT(!UNDEF_P(v));
1531
1532 return v;
1533 }
1534
1535 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1536
1537 return update_classvariable_cache(iseq, klass, id, cref, ic);
1538}
1539
1540VALUE
1541rb_vm_getclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *cfp, ID id, ICVARC ic)
1542{
1543 return vm_getclassvariable(iseq, cfp, id, ic);
1544}
1545
1546static inline void
1547vm_setclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *reg_cfp, ID id, VALUE val, ICVARC ic)
1548{
1549 const rb_cref_t *cref;
1550 cref = vm_get_cref(GET_EP());
1551
1552 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1553 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1554
1555 rb_class_ivar_set(ic->entry->class_value, id, val);
1556 return;
1557 }
1558
1559 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1560
1561 rb_cvar_set(klass, id, val);
1562
1563 update_classvariable_cache(iseq, klass, id, cref, ic);
1564}
1565
1566void
1567rb_vm_setclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *cfp, ID id, VALUE val, ICVARC ic)
1568{
1569 vm_setclassvariable(iseq, cfp, id, val, ic);
1570}
1571
1572static inline VALUE
1573vm_getinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, IVC ic)
1574{
1575 return vm_getivar(obj, id, iseq, ic, NULL, FALSE);
1576}
1577
1578static inline void
1579vm_setinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, VALUE val, IVC ic)
1580{
1581 if (RB_SPECIAL_CONST_P(obj)) {
1583 return;
1584 }
1585
1586 shape_id_t dest_shape_id;
1587 attr_index_t index;
1588 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1589
1590 if (UNLIKELY(UNDEF_P(vm_setivar(obj, id, val, dest_shape_id, index)))) {
1591 switch (BUILTIN_TYPE(obj)) {
1592 case T_OBJECT:
1593 case T_CLASS:
1594 case T_MODULE:
1595 break;
1596 default:
1597 if (!UNDEF_P(vm_setivar_default(obj, id, val, dest_shape_id, index))) {
1598 return;
1599 }
1600 }
1601 vm_setivar_slowpath_ivar(obj, id, val, iseq, ic);
1602 }
1603}
1604
1605void
1606rb_vm_setinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, VALUE val, IVC ic)
1607{
1608 vm_setinstancevariable(iseq, obj, id, val, ic);
1609}
1610
1611static VALUE
1612vm_throw_continue(const rb_execution_context_t *ec, VALUE err)
1613{
1614 /* continue throw */
1615
1616 if (FIXNUM_P(err)) {
1617 ec->tag->state = FIX2INT(err);
1618 }
1619 else if (SYMBOL_P(err)) {
1620 ec->tag->state = TAG_THROW;
1621 }
1622 else if (THROW_DATA_P(err)) {
1623 ec->tag->state = THROW_DATA_STATE((struct vm_throw_data *)err);
1624 }
1625 else {
1626 ec->tag->state = TAG_RAISE;
1627 }
1628 return err;
1629}
1630
1631static VALUE
1632vm_throw_start(const rb_execution_context_t *ec, rb_control_frame_t *const reg_cfp, enum ruby_tag_type state,
1633 const int flag, const VALUE throwobj)
1634{
1635 const rb_control_frame_t *escape_cfp = NULL;
1636 const rb_control_frame_t * const eocfp = RUBY_VM_END_CONTROL_FRAME(ec); /* end of control frame pointer */
1637
1638 if (flag != 0) {
1639 /* do nothing */
1640 }
1641 else if (state == TAG_BREAK) {
1642 int is_orphan = 1;
1643 const VALUE *ep = GET_EP();
1644 const rb_iseq_t *base_iseq = GET_ISEQ();
1645 escape_cfp = reg_cfp;
1646
1647 while (ISEQ_BODY(base_iseq)->type != ISEQ_TYPE_BLOCK) {
1648 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1649 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1650 ep = escape_cfp->ep;
1651 base_iseq = escape_cfp->iseq;
1652 }
1653 else {
1654 ep = VM_ENV_PREV_EP(ep);
1655 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1656 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1657 VM_ASSERT(escape_cfp->iseq == base_iseq);
1658 }
1659 }
1660
1661 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1662 /* lambda{... break ...} */
1663 is_orphan = 0;
1664 state = TAG_RETURN;
1665 }
1666 else {
1667 ep = VM_ENV_PREV_EP(ep);
1668
1669 while (escape_cfp < eocfp) {
1670 if (escape_cfp->ep == ep) {
1671 const rb_iseq_t *const iseq = escape_cfp->iseq;
1672 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1673 const struct iseq_catch_table *const ct = ISEQ_BODY(iseq)->catch_table;
1674 unsigned int i;
1675
1676 if (!ct) break;
1677 for (i=0; i < ct->size; i++) {
1678 const struct iseq_catch_table_entry *const entry =
1679 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1680
1681 if (entry->type == CATCH_TYPE_BREAK &&
1682 entry->iseq == base_iseq &&
1683 entry->start < epc && entry->end >= epc) {
1684 if (entry->cont == epc) { /* found! */
1685 is_orphan = 0;
1686 }
1687 break;
1688 }
1689 }
1690 break;
1691 }
1692
1693 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1694 }
1695 }
1696
1697 if (is_orphan) {
1698 rb_vm_localjump_error("break from proc-closure", throwobj, TAG_BREAK);
1699 }
1700 }
1701 else if (state == TAG_RETRY) {
1702 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1703
1704 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1705 }
1706 else if (state == TAG_RETURN) {
1707 const VALUE *current_ep = GET_EP();
1708 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1709 int in_class_frame = 0;
1710 int toplevel = 1;
1711 escape_cfp = reg_cfp;
1712
1713 // find target_lep, target_ep
1714 while (!VM_ENV_LOCAL_P(ep)) {
1715 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1716 target_ep = ep;
1717 }
1718 ep = VM_ENV_PREV_EP(ep);
1719 }
1720 target_lep = ep;
1721
1722 while (escape_cfp < eocfp) {
1723 const VALUE *lep = VM_CF_LEP(escape_cfp);
1724
1725 if (!target_lep) {
1726 target_lep = lep;
1727 }
1728
1729 if (lep == target_lep &&
1730 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1731 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1732 in_class_frame = 1;
1733 target_lep = 0;
1734 }
1735
1736 if (lep == target_lep) {
1737 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1738 toplevel = 0;
1739 if (in_class_frame) {
1740 /* lambda {class A; ... return ...; end} */
1741 goto valid_return;
1742 }
1743 else {
1744 const VALUE *tep = current_ep;
1745
1746 while (target_lep != tep) {
1747 if (escape_cfp->ep == tep) {
1748 /* in lambda */
1749 if (tep == target_ep) {
1750 goto valid_return;
1751 }
1752 else {
1753 goto unexpected_return;
1754 }
1755 }
1756 tep = VM_ENV_PREV_EP(tep);
1757 }
1758 }
1759 }
1760 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1761 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1762 case ISEQ_TYPE_TOP:
1763 case ISEQ_TYPE_MAIN:
1764 if (toplevel) {
1765 if (in_class_frame) goto unexpected_return;
1766 if (target_ep == NULL) {
1767 goto valid_return;
1768 }
1769 else {
1770 goto unexpected_return;
1771 }
1772 }
1773 break;
1774 case ISEQ_TYPE_EVAL:
1775 case ISEQ_TYPE_CLASS:
1776 toplevel = 0;
1777 break;
1778 default:
1779 break;
1780 }
1781 }
1782 }
1783
1784 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1785 if (target_ep == NULL) {
1786 goto valid_return;
1787 }
1788 else {
1789 goto unexpected_return;
1790 }
1791 }
1792
1793 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1794 }
1795 unexpected_return:;
1796 rb_vm_localjump_error("unexpected return", throwobj, TAG_RETURN);
1797
1798 valid_return:;
1799 /* do nothing */
1800 }
1801 else {
1802 rb_bug("isns(throw): unsupported throw type");
1803 }
1804
1805 ec->tag->state = state;
1806 return (VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1807}
1808
1809static VALUE
1810vm_throw(const rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
1811 rb_num_t throw_state, VALUE throwobj)
1812{
1813 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1814 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1815
1816 if (state != 0) {
1817 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1818 }
1819 else {
1820 return vm_throw_continue(ec, throwobj);
1821 }
1822}
1823
1824static inline void
1825vm_expandarray(VALUE *sp, VALUE ary, rb_num_t num, int flag)
1826{
1827 int is_splat = flag & 0x01;
1828 rb_num_t space_size = num + is_splat;
1829 VALUE *base = sp - 1;
1830 const VALUE *ptr;
1831 rb_num_t len;
1832 const VALUE obj = ary;
1833
1834 if (!RB_TYPE_P(ary, T_ARRAY) && NIL_P(ary = rb_check_array_type(ary))) {
1835 ary = obj;
1836 ptr = &ary;
1837 len = 1;
1838 }
1839 else {
1840 ptr = RARRAY_CONST_PTR_TRANSIENT(ary);
1841 len = (rb_num_t)RARRAY_LEN(ary);
1842 }
1843
1844 if (space_size == 0) {
1845 /* no space left on stack */
1846 }
1847 else if (flag & 0x02) {
1848 /* post: ..., nil ,ary[-1], ..., ary[0..-num] # top */
1849 rb_num_t i = 0, j;
1850
1851 if (len < num) {
1852 for (i=0; i<num-len; i++) {
1853 *base++ = Qnil;
1854 }
1855 }
1856 for (j=0; i<num; i++, j++) {
1857 VALUE v = ptr[len - j - 1];
1858 *base++ = v;
1859 }
1860 if (is_splat) {
1861 *base = rb_ary_new4(len - j, ptr);
1862 }
1863 }
1864 else {
1865 /* normal: ary[num..-1], ary[num-2], ary[num-3], ..., ary[0] # top */
1866 rb_num_t i;
1867 VALUE *bptr = &base[space_size - 1];
1868
1869 for (i=0; i<num; i++) {
1870 if (len <= i) {
1871 for (; i<num; i++) {
1872 *bptr-- = Qnil;
1873 }
1874 break;
1875 }
1876 *bptr-- = ptr[i];
1877 }
1878 if (is_splat) {
1879 if (num > len) {
1880 *bptr = rb_ary_new();
1881 }
1882 else {
1883 *bptr = rb_ary_new4(len - num, ptr + num);
1884 }
1885 }
1886 }
1887 RB_GC_GUARD(ary);
1888}
1889
1890static VALUE vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
1891
1892static VALUE vm_mtbl_dump(VALUE klass, ID target_mid);
1893
1894static struct rb_class_cc_entries *
1895vm_ccs_create(VALUE klass, struct rb_id_table *cc_tbl, ID mid, const rb_callable_method_entry_t *cme)
1896{
1897 struct rb_class_cc_entries *ccs = ALLOC(struct rb_class_cc_entries);
1898#if VM_CHECK_MODE > 0
1899 ccs->debug_sig = ~(VALUE)ccs;
1900#endif
1901 ccs->capa = 0;
1902 ccs->len = 0;
1903 ccs->cme = cme;
1904 METHOD_ENTRY_CACHED_SET((rb_callable_method_entry_t *)cme);
1905 ccs->entries = NULL;
1906
1907 rb_id_table_insert(cc_tbl, mid, (VALUE)ccs);
1908 RB_OBJ_WRITTEN(klass, Qundef, cme);
1909 return ccs;
1910}
1911
1912static void
1913vm_ccs_push(VALUE klass, struct rb_class_cc_entries *ccs, const struct rb_callinfo *ci, const struct rb_callcache *cc)
1914{
1915 if (! vm_cc_markable(cc)) {
1916 return;
1917 }
1918 else if (! vm_ci_markable(ci)) {
1919 return;
1920 }
1921
1922 if (UNLIKELY(ccs->len == ccs->capa)) {
1923 if (ccs->capa == 0) {
1924 ccs->capa = 1;
1925 ccs->entries = ALLOC_N(struct rb_class_cc_entries_entry, ccs->capa);
1926 }
1927 else {
1928 ccs->capa *= 2;
1929 REALLOC_N(ccs->entries, struct rb_class_cc_entries_entry, ccs->capa);
1930 }
1931 }
1932 VM_ASSERT(ccs->len < ccs->capa);
1933
1934 const int pos = ccs->len++;
1935 RB_OBJ_WRITE(klass, &ccs->entries[pos].ci, ci);
1936 RB_OBJ_WRITE(klass, &ccs->entries[pos].cc, cc);
1937
1938 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
1939 // for tuning
1940 // vm_mtbl_dump(klass, 0);
1941 }
1942}
1943
1944#if VM_CHECK_MODE > 0
1945void
1946rb_vm_ccs_dump(struct rb_class_cc_entries *ccs)
1947{
1948 ruby_debug_printf("ccs:%p (%d,%d)\n", (void *)ccs, ccs->len, ccs->capa);
1949 for (int i=0; i<ccs->len; i++) {
1950 vm_ci_dump(ccs->entries[i].ci);
1951 rp(ccs->entries[i].cc);
1952 }
1953}
1954
1955static int
1956vm_ccs_verify(struct rb_class_cc_entries *ccs, ID mid, VALUE klass)
1957{
1958 VM_ASSERT(vm_ccs_p(ccs));
1959 VM_ASSERT(ccs->len <= ccs->capa);
1960
1961 for (int i=0; i<ccs->len; i++) {
1962 const struct rb_callinfo *ci = ccs->entries[i].ci;
1963 const struct rb_callcache *cc = ccs->entries[i].cc;
1964
1965 VM_ASSERT(vm_ci_p(ci));
1966 VM_ASSERT(vm_ci_mid(ci) == mid);
1967 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
1968 VM_ASSERT(vm_cc_class_check(cc, klass));
1969 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
1970 }
1971 return TRUE;
1972}
1973#endif
1974
1975#ifndef MJIT_HEADER
1976
1977static const rb_callable_method_entry_t *check_overloaded_cme(const rb_callable_method_entry_t *cme, const struct rb_callinfo * const ci);
1978
1979static const struct rb_callcache *
1980vm_search_cc(const VALUE klass, const struct rb_callinfo * const ci)
1981{
1982 const ID mid = vm_ci_mid(ci);
1983 struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
1984 struct rb_class_cc_entries *ccs = NULL;
1985 VALUE ccs_data;
1986
1987 if (cc_tbl) {
1988 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
1989 ccs = (struct rb_class_cc_entries *)ccs_data;
1990 const int ccs_len = ccs->len;
1991
1992 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
1993 rb_vm_ccs_free(ccs);
1994 rb_id_table_delete(cc_tbl, mid);
1995 ccs = NULL;
1996 }
1997 else {
1998 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
1999
2000 for (int i=0; i<ccs_len; i++) {
2001 const struct rb_callinfo *ccs_ci = ccs->entries[i].ci;
2002 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2003
2004 VM_ASSERT(vm_ci_p(ccs_ci));
2005 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2006
2007 if (ccs_ci == ci) { // TODO: equality
2008 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2009
2010 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2011 VM_ASSERT(ccs_cc->klass == klass);
2012 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2013
2014 return ccs_cc;
2015 }
2016 }
2017 }
2018 }
2019 }
2020 else {
2021 cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
2022 }
2023
2024 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2025
2026 const rb_callable_method_entry_t *cme;
2027
2028 if (ccs) {
2029 cme = ccs->cme;
2030 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2031
2032 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2033 }
2034 else {
2035 cme = rb_callable_method_entry(klass, mid);
2036 }
2037
2038 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2039
2040 if (cme == NULL) {
2041 // undef or not found: can't cache the information
2042 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2043 return &vm_empty_cc;
2044 }
2045
2046 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2047
2048 METHOD_ENTRY_CACHED_SET((struct rb_callable_method_entry_struct *)cme);
2049
2050 if (ccs == NULL) {
2051 VM_ASSERT(cc_tbl != NULL);
2052
2053 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2054 // rb_callable_method_entry() prepares ccs.
2055 ccs = (struct rb_class_cc_entries *)ccs_data;
2056 }
2057 else {
2058 // TODO: required?
2059 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2060 }
2061 }
2062
2063 cme = check_overloaded_cme(cme, ci);
2064
2065 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general);
2066 vm_ccs_push(klass, ccs, ci, cc);
2067
2068 VM_ASSERT(vm_cc_cme(cc) != NULL);
2069 VM_ASSERT(cme->called_id == mid);
2070 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2071
2072 return cc;
2073}
2074
2075MJIT_FUNC_EXPORTED const struct rb_callcache *
2076rb_vm_search_method_slowpath(const struct rb_callinfo *ci, VALUE klass)
2077{
2078 const struct rb_callcache *cc;
2079
2080 VM_ASSERT(RB_TYPE_P(klass, T_CLASS) || RB_TYPE_P(klass, T_ICLASS));
2081
2082 RB_VM_LOCK_ENTER();
2083 {
2084 cc = vm_search_cc(klass, ci);
2085
2086 VM_ASSERT(cc);
2087 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2088 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2089 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2090 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2091 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2092 }
2093 RB_VM_LOCK_LEAVE();
2094
2095 return cc;
2096}
2097#endif
2098
2099static const struct rb_callcache *
2100vm_search_method_slowpath0(VALUE cd_owner, struct rb_call_data *cd, VALUE klass)
2101{
2102#if USE_DEBUG_COUNTER
2103 const struct rb_callcache *old_cc = cd->cc;
2104#endif
2105
2106 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2107
2108#if OPT_INLINE_METHOD_CACHE
2109 cd->cc = cc;
2110
2111 const struct rb_callcache *empty_cc =
2112#ifdef MJIT_HEADER
2113 rb_vm_empty_cc();
2114#else
2115 &vm_empty_cc;
2116#endif
2117 if (cd_owner && cc != empty_cc) RB_OBJ_WRITTEN(cd_owner, Qundef, cc);
2118
2119#if USE_DEBUG_COUNTER
2120 if (old_cc == empty_cc) {
2121 // empty
2122 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2123 }
2124 else if (old_cc == cc) {
2125 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2126 }
2127 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2128 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2129 }
2130 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2131 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2132 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2133 }
2134 else {
2135 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2136 }
2137#endif
2138#endif // OPT_INLINE_METHOD_CACHE
2139
2140 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2141 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2142
2143 return cc;
2144}
2145
2146#ifndef MJIT_HEADER
2147ALWAYS_INLINE(static const struct rb_callcache *vm_search_method_fastpath(VALUE cd_owner, struct rb_call_data *cd, VALUE klass));
2148#endif
2149static const struct rb_callcache *
2150vm_search_method_fastpath(VALUE cd_owner, struct rb_call_data *cd, VALUE klass)
2151{
2152 const struct rb_callcache *cc = cd->cc;
2153
2154#if OPT_INLINE_METHOD_CACHE
2155 if (LIKELY(vm_cc_class_check(cc, klass))) {
2156 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2157 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2158 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2159 VM_ASSERT(vm_cc_cme(cc) == NULL || // not found
2160 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) || // search_super w/ define_method
2161 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci)); // cme->called_id == ci->mid
2162
2163 return cc;
2164 }
2165 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2166 }
2167 else {
2168 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2169 }
2170#endif
2171
2172 return vm_search_method_slowpath0(cd_owner, cd, klass);
2173}
2174
2175static const struct rb_callcache *
2176vm_search_method(VALUE cd_owner, struct rb_call_data *cd, VALUE recv)
2177{
2178 VALUE klass = CLASS_OF(recv);
2179 VM_ASSERT(klass != Qfalse);
2180 VM_ASSERT(RBASIC_CLASS(klass) == 0 || rb_obj_is_kind_of(klass, rb_cClass));
2181
2182 return vm_search_method_fastpath(cd_owner, cd, klass);
2183}
2184
2185#if __has_attribute(transparent_union)
2186typedef union {
2187 VALUE (*anyargs)(ANYARGS);
2188 VALUE (*f00)(VALUE);
2189 VALUE (*f01)(VALUE, VALUE);
2190 VALUE (*f02)(VALUE, VALUE, VALUE);
2191 VALUE (*f03)(VALUE, VALUE, VALUE, VALUE);
2192 VALUE (*f04)(VALUE, VALUE, VALUE, VALUE, VALUE);
2193 VALUE (*f05)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2194 VALUE (*f06)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2195 VALUE (*f07)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2204 VALUE (*fm1)(int, union { VALUE *x; const VALUE *y; } __attribute__((__transparent_union__)), VALUE);
2205} __attribute__((__transparent_union__)) cfunc_type;
2206#else
2207typedef VALUE (*cfunc_type)(ANYARGS);
2208#endif
2209
2210static inline int
2211check_cfunc(const rb_callable_method_entry_t *me, cfunc_type func)
2212{
2213 if (! me) {
2214 return false;
2215 }
2216 else {
2217 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2218 VM_ASSERT(callable_method_entry_p(me));
2219 VM_ASSERT(me->def);
2220 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2221 return false;
2222 }
2223 else {
2224#if __has_attribute(transparent_union)
2225 return me->def->body.cfunc.func == func.anyargs;
2226#else
2227 return me->def->body.cfunc.func == func;
2228#endif
2229 }
2230 }
2231}
2232
2233static inline int
2234vm_method_cfunc_is(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv, cfunc_type func)
2235{
2236 VM_ASSERT(iseq != NULL);
2237 const struct rb_callcache *cc = vm_search_method((VALUE)iseq, cd, recv);
2238 return check_cfunc(vm_cc_cme(cc), func);
2239}
2240
2241#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2242
2243static inline bool
2244FIXNUM_2_P(VALUE a, VALUE b)
2245{
2246 /* FIXNUM_P(a) && FIXNUM_P(b)
2247 * == ((a & 1) && (b & 1))
2248 * == a & b & 1 */
2249 SIGNED_VALUE x = a;
2250 SIGNED_VALUE y = b;
2251 SIGNED_VALUE z = x & y & 1;
2252 return z == 1;
2253}
2254
2255static inline bool
2256FLONUM_2_P(VALUE a, VALUE b)
2257{
2258#if USE_FLONUM
2259 /* FLONUM_P(a) && FLONUM_P(b)
2260 * == ((a & 3) == 2) && ((b & 3) == 2)
2261 * == ! ((a ^ 2) | (b ^ 2) & 3)
2262 */
2263 SIGNED_VALUE x = a;
2264 SIGNED_VALUE y = b;
2265 SIGNED_VALUE z = ((x ^ 2) | (y ^ 2)) & 3;
2266 return !z;
2267#else
2268 return false;
2269#endif
2270}
2271
2272static VALUE
2273opt_equality_specialized(VALUE recv, VALUE obj)
2274{
2275 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2276 goto compare_by_identity;
2277 }
2278 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2279 goto compare_by_identity;
2280 }
2281 else if (STATIC_SYM_P(recv) && STATIC_SYM_P(obj) && EQ_UNREDEFINED_P(SYMBOL)) {
2282 goto compare_by_identity;
2283 }
2284 else if (SPECIAL_CONST_P(recv)) {
2285 //
2286 }
2287 else if (RBASIC_CLASS(recv) == rb_cFloat && RB_FLOAT_TYPE_P(obj) && EQ_UNREDEFINED_P(FLOAT)) {
2288 double a = RFLOAT_VALUE(recv);
2289 double b = RFLOAT_VALUE(obj);
2290
2291#if MSC_VERSION_BEFORE(1300)
2292 if (isnan(a)) {
2293 return Qfalse;
2294 }
2295 else if (isnan(b)) {
2296 return Qfalse;
2297 }
2298 else
2299#endif
2300 return RBOOL(a == b);
2301 }
2302 else if (RBASIC_CLASS(recv) == rb_cString && EQ_UNREDEFINED_P(STRING)) {
2303 if (recv == obj) {
2304 return Qtrue;
2305 }
2306 else if (RB_TYPE_P(obj, T_STRING)) {
2307 return rb_str_eql_internal(obj, recv);
2308 }
2309 }
2310 return Qundef;
2311
2312 compare_by_identity:
2313 return RBOOL(recv == obj);
2314}
2315
2316static VALUE
2317opt_equality(const rb_iseq_t *cd_owner, VALUE recv, VALUE obj, CALL_DATA cd)
2318{
2319 VM_ASSERT(cd_owner != NULL);
2320
2321 VALUE val = opt_equality_specialized(recv, obj);
2322 if (!UNDEF_P(val)) return val;
2323
2324 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2325 return Qundef;
2326 }
2327 else {
2328 return RBOOL(recv == obj);
2329 }
2330}
2331
2332#undef EQ_UNREDEFINED_P
2333
2334#ifndef MJIT_HEADER
2335
2336static inline const struct rb_callcache *gccct_method_search(rb_execution_context_t *ec, VALUE recv, ID mid, int argc); // vm_eval.c
2337NOINLINE(static VALUE opt_equality_by_mid_slowpath(VALUE recv, VALUE obj, ID mid));
2338
2339static VALUE
2340opt_equality_by_mid_slowpath(VALUE recv, VALUE obj, ID mid)
2341{
2342 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, 1);
2343
2344 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2345 return RBOOL(recv == obj);
2346 }
2347 else {
2348 return Qundef;
2349 }
2350}
2351
2352static VALUE
2353opt_equality_by_mid(VALUE recv, VALUE obj, ID mid)
2354{
2355 VALUE val = opt_equality_specialized(recv, obj);
2356 if (!UNDEF_P(val)) {
2357 return val;
2358 }
2359 else {
2360 return opt_equality_by_mid_slowpath(recv, obj, mid);
2361 }
2362}
2363
2364VALUE
2365rb_equal_opt(VALUE obj1, VALUE obj2)
2366{
2367 return opt_equality_by_mid(obj1, obj2, idEq);
2368}
2369
2370VALUE
2371rb_eql_opt(VALUE obj1, VALUE obj2)
2372{
2373 return opt_equality_by_mid(obj1, obj2, idEqlP);
2374}
2375
2376#endif // MJIT_HEADER
2377
2378extern VALUE rb_vm_call0(rb_execution_context_t *ec, VALUE, ID, int, const VALUE*, const rb_callable_method_entry_t *, int kw_splat);
2379extern VALUE rb_vm_call_with_refinements(rb_execution_context_t *, VALUE, ID, int, const VALUE *, int);
2380
2381static VALUE
2382check_match(rb_execution_context_t *ec, VALUE pattern, VALUE target, enum vm_check_match_type type)
2383{
2384 switch (type) {
2385 case VM_CHECKMATCH_TYPE_WHEN:
2386 return pattern;
2387 case VM_CHECKMATCH_TYPE_RESCUE:
2388 if (!rb_obj_is_kind_of(pattern, rb_cModule)) {
2389 rb_raise(rb_eTypeError, "class or module required for rescue clause");
2390 }
2391 /* fall through */
2392 case VM_CHECKMATCH_TYPE_CASE: {
2393 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target, RB_NO_KEYWORDS);
2394 }
2395 default:
2396 rb_bug("check_match: unreachable");
2397 }
2398}
2399
2400
2401#if MSC_VERSION_BEFORE(1300)
2402#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2403#else
2404#define CHECK_CMP_NAN(a, b) /* do nothing */
2405#endif
2406
2407static inline VALUE
2408double_cmp_lt(double a, double b)
2409{
2410 CHECK_CMP_NAN(a, b);
2411 return RBOOL(a < b);
2412}
2413
2414static inline VALUE
2415double_cmp_le(double a, double b)
2416{
2417 CHECK_CMP_NAN(a, b);
2418 return RBOOL(a <= b);
2419}
2420
2421static inline VALUE
2422double_cmp_gt(double a, double b)
2423{
2424 CHECK_CMP_NAN(a, b);
2425 return RBOOL(a > b);
2426}
2427
2428static inline VALUE
2429double_cmp_ge(double a, double b)
2430{
2431 CHECK_CMP_NAN(a, b);
2432 return RBOOL(a >= b);
2433}
2434
2435static inline VALUE *
2436vm_base_ptr(const rb_control_frame_t *cfp)
2437{
2438#if 0 // we may optimize and use this once we confirm it does not spoil performance on JIT.
2439 const rb_control_frame_t *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
2440
2441 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2442 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2443 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD) {
2444 /* adjust `self' */
2445 bp += 1;
2446 }
2447#if VM_DEBUG_BP_CHECK
2448 if (bp != cfp->bp_check) {
2449 ruby_debug_printf("bp_check: %ld, bp: %ld\n",
2450 (long)(cfp->bp_check - GET_EC()->vm_stack),
2451 (long)(bp - GET_EC()->vm_stack));
2452 rb_bug("vm_base_ptr: unreachable");
2453 }
2454#endif
2455 return bp;
2456 }
2457 else {
2458 return NULL;
2459 }
2460#else
2461 return cfp->__bp__;
2462#endif
2463}
2464
2465/* method call processes with call_info */
2466
2467#include "vm_args.c"
2468
2469static inline VALUE vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc, int param_size, int local_size);
2470ALWAYS_INLINE(static VALUE vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const rb_callable_method_entry_t *me, int opt_pc, int param_size, int local_size));
2471static inline VALUE vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc);
2472static VALUE vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
2473static VALUE vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
2474static VALUE vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
2475static inline VALUE vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
2476
2477static vm_call_handler vm_call_iseq_setup_func(const struct rb_callinfo *ci, const int param_size, const int local_size);
2478
2479static VALUE
2480vm_call_iseq_setup_tailcall_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
2481{
2482 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2483
2484 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2485}
2486
2487static VALUE
2488vm_call_iseq_setup_normal_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
2489{
2490 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2491
2492 const struct rb_callcache *cc = calling->cc;
2493 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2494 int param = ISEQ_BODY(iseq)->param.size;
2495 int local = ISEQ_BODY(iseq)->local_table_size;
2496 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2497}
2498
2499MJIT_STATIC bool
2500rb_simple_iseq_p(const rb_iseq_t *iseq)
2501{
2502 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2503 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2504 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2505 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2506 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2507 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2508 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2509}
2510
2511MJIT_FUNC_EXPORTED bool
2512rb_iseq_only_optparam_p(const rb_iseq_t *iseq)
2513{
2514 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2515 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2516 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2517 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2518 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2519 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2520 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2521}
2522
2523MJIT_FUNC_EXPORTED bool
2524rb_iseq_only_kwparam_p(const rb_iseq_t *iseq)
2525{
2526 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2527 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2528 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2529 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2530 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2531 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2532}
2533
2534// If true, cc->call needs to include `CALLER_SETUP_ARG` (i.e. can't be skipped in fastpath)
2535MJIT_STATIC bool
2536rb_splat_or_kwargs_p(const struct rb_callinfo *restrict ci)
2537{
2538 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
2539}
2540
2541
2542static inline void
2543CALLER_SETUP_ARG(struct rb_control_frame_struct *restrict cfp,
2544 struct rb_calling_info *restrict calling,
2545 const struct rb_callinfo *restrict ci)
2546{
2547 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2548 VALUE final_hash;
2549 /* This expands the rest argument to the stack.
2550 * So, vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT is now inconsistent.
2551 */
2552 vm_caller_setup_arg_splat(cfp, calling);
2553 if (!IS_ARGS_KW_OR_KW_SPLAT(ci) &&
2554 calling->argc > 0 &&
2555 RB_TYPE_P((final_hash = *(cfp->sp - 1)), T_HASH) &&
2556 (((struct RHash *)final_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2557 *(cfp->sp - 1) = rb_hash_dup(final_hash);
2558 calling->kw_splat = 1;
2559 }
2560 }
2561 if (UNLIKELY(IS_ARGS_KW_OR_KW_SPLAT(ci))) {
2562 if (IS_ARGS_KEYWORD(ci)) {
2563 /* This converts VM_CALL_KWARG style to VM_CALL_KW_SPLAT style
2564 * by creating a keyword hash.
2565 * So, vm_ci_flag(ci) & VM_CALL_KWARG is now inconsistent.
2566 */
2567 vm_caller_setup_arg_kw(cfp, calling, ci);
2568 }
2569 else {
2570 VALUE keyword_hash = cfp->sp[-1];
2571 if (!RB_TYPE_P(keyword_hash, T_HASH)) {
2572 /* Convert a non-hash keyword splat to a new hash */
2573 cfp->sp[-1] = rb_hash_dup(rb_to_hash_type(keyword_hash));
2574 }
2575 else if (!IS_ARGS_KW_SPLAT_MUT(ci)) {
2576 /* Convert a hash keyword splat to a new hash unless
2577 * a mutable keyword splat was passed.
2578 */
2579 cfp->sp[-1] = rb_hash_dup(keyword_hash);
2580 }
2581 }
2582 }
2583}
2584
2585static inline void
2586CALLER_REMOVE_EMPTY_KW_SPLAT(struct rb_control_frame_struct *restrict cfp,
2587 struct rb_calling_info *restrict calling,
2588 const struct rb_callinfo *restrict ci)
2589{
2590 if (UNLIKELY(calling->kw_splat)) {
2591 /* This removes the last Hash object if it is empty.
2592 * So, vm_ci_flag(ci) & VM_CALL_KW_SPLAT is now inconsistent.
2593 */
2594 if (RHASH_EMPTY_P(cfp->sp[-1])) {
2595 cfp->sp--;
2596 calling->argc--;
2597 calling->kw_splat = 0;
2598 }
2599 }
2600}
2601
2602#define USE_OPT_HIST 0
2603
2604#if USE_OPT_HIST
2605#define OPT_HIST_MAX 64
2606static int opt_hist[OPT_HIST_MAX+1];
2607
2608__attribute__((destructor))
2609static void
2610opt_hist_show_results_at_exit(void)
2611{
2612 for (int i=0; i<OPT_HIST_MAX; i++) {
2613 ruby_debug_printf("opt_hist\t%d\t%d\n", i, opt_hist[i]);
2614 }
2615}
2616#endif
2617
2618static VALUE
2619vm_call_iseq_setup_normal_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2620 struct rb_calling_info *calling)
2621{
2622 const struct rb_callcache *cc = calling->cc;
2623 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2624 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2625 const int opt = calling->argc - lead_num;
2626 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2627 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2628 const int param = ISEQ_BODY(iseq)->param.size;
2629 const int local = ISEQ_BODY(iseq)->local_table_size;
2630 const int delta = opt_num - opt;
2631
2632 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2633
2634#if USE_OPT_HIST
2635 if (opt_pc < OPT_HIST_MAX) {
2636 opt_hist[opt]++;
2637 }
2638 else {
2639 opt_hist[OPT_HIST_MAX]++;
2640 }
2641#endif
2642
2643 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2644}
2645
2646static VALUE
2647vm_call_iseq_setup_tailcall_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2648 struct rb_calling_info *calling)
2649{
2650 const struct rb_callcache *cc = calling->cc;
2651 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2652 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2653 const int opt = calling->argc - lead_num;
2654 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2655
2656 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2657
2658#if USE_OPT_HIST
2659 if (opt_pc < OPT_HIST_MAX) {
2660 opt_hist[opt]++;
2661 }
2662 else {
2663 opt_hist[OPT_HIST_MAX]++;
2664 }
2665#endif
2666
2667 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2668}
2669
2670static void
2671args_setup_kw_parameters(rb_execution_context_t *const ec, const rb_iseq_t *const iseq,
2672 VALUE *const passed_values, const int passed_keyword_len, const VALUE *const passed_keywords,
2673 VALUE *const locals);
2674
2675static VALUE
2676vm_call_iseq_setup_kwparm_kwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2677 struct rb_calling_info *calling)
2678{
2679 const struct rb_callinfo *ci = calling->ci;
2680 const struct rb_callcache *cc = calling->cc;
2681
2682 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2683 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2684
2685 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2686 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2687 const struct rb_callinfo_kwarg *kw_arg = vm_ci_kwarg(ci);
2688 const int ci_kw_len = kw_arg->keyword_len;
2689 const VALUE * const ci_keywords = kw_arg->keywords;
2690 VALUE *argv = cfp->sp - calling->argc;
2691 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
2692 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2693 VALUE * const ci_kws = ALLOCA_N(VALUE, ci_kw_len);
2694 MEMCPY(ci_kws, argv + lead_num, VALUE, ci_kw_len);
2695 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2696
2697 int param = ISEQ_BODY(iseq)->param.size;
2698 int local = ISEQ_BODY(iseq)->local_table_size;
2699 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2700}
2701
2702static VALUE
2703vm_call_iseq_setup_kwparm_nokwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2704 struct rb_calling_info *calling)
2705{
2706 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->ci;
2707 const struct rb_callcache *cc = calling->cc;
2708
2709 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2710 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2711
2712 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2713 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2714 VALUE * const argv = cfp->sp - calling->argc;
2715 VALUE * const klocals = argv + kw_param->bits_start - kw_param->num;
2716
2717 int i;
2718 for (i=0; i<kw_param->num; i++) {
2719 klocals[i] = kw_param->default_values[i];
2720 }
2721 klocals[i] = INT2FIX(0); // kw specify flag
2722 // NOTE:
2723 // nobody check this value, but it should be cleared because it can
2724 // points invalid VALUE (T_NONE objects, raw pointer and so on).
2725
2726 int param = ISEQ_BODY(iseq)->param.size;
2727 int local = ISEQ_BODY(iseq)->local_table_size;
2728 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2729}
2730
2731static inline int
2732vm_callee_setup_arg(rb_execution_context_t *ec, struct rb_calling_info *calling,
2733 const rb_iseq_t *iseq, VALUE *argv, int param_size, int local_size)
2734{
2735 const struct rb_callinfo *ci = calling->ci;
2736 const struct rb_callcache *cc = calling->cc;
2737 bool cacheable_ci = vm_ci_markable(ci);
2738
2739 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
2740 if (LIKELY(rb_simple_iseq_p(iseq))) {
2741 rb_control_frame_t *cfp = ec->cfp;
2742 CALLER_SETUP_ARG(cfp, calling, ci);
2743 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2744
2745 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
2746 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
2747 }
2748
2749 VM_ASSERT(ci == calling->ci);
2750 VM_ASSERT(cc == calling->cc);
2751 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size), cacheable_ci && vm_call_iseq_optimizable_p(ci, cc));
2752 return 0;
2753 }
2754 else if (rb_iseq_only_optparam_p(iseq)) {
2755 rb_control_frame_t *cfp = ec->cfp;
2756 CALLER_SETUP_ARG(cfp, calling, ci);
2757 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2758
2759 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2760 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2761 const int argc = calling->argc;
2762 const int opt = argc - lead_num;
2763
2764 if (opt < 0 || opt > opt_num) {
2765 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
2766 }
2767
2768 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2769 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
2770 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2771 cacheable_ci && vm_call_cacheable(ci, cc));
2772 }
2773 else {
2774 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
2775 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2776 cacheable_ci && vm_call_cacheable(ci, cc));
2777 }
2778
2779 /* initialize opt vars for self-references */
2780 VM_ASSERT((int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
2781 for (int i=argc; i<lead_num + opt_num; i++) {
2782 argv[i] = Qnil;
2783 }
2784 return (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2785 }
2786 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
2787 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2788 const int argc = calling->argc;
2789 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2790
2791 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
2792 const struct rb_callinfo_kwarg *kw_arg = vm_ci_kwarg(ci);
2793
2794 if (argc - kw_arg->keyword_len == lead_num) {
2795 const int ci_kw_len = kw_arg->keyword_len;
2796 const VALUE * const ci_keywords = kw_arg->keywords;
2797 VALUE * const ci_kws = ALLOCA_N(VALUE, ci_kw_len);
2798 MEMCPY(ci_kws, argv + lead_num, VALUE, ci_kw_len);
2799
2800 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
2801 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2802
2803 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
2804 cacheable_ci && vm_call_cacheable(ci, cc));
2805
2806 return 0;
2807 }
2808 }
2809 else if (argc == lead_num) {
2810 /* no kwarg */
2811 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
2812 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
2813
2814 if (klocals[kw_param->num] == INT2FIX(0)) {
2815 /* copy from default_values */
2816 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
2817 cacheable_ci && vm_call_cacheable(ci, cc));
2818 }
2819
2820 return 0;
2821 }
2822 }
2823 }
2824
2825 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
2826}
2827
2828static VALUE
2829vm_call_iseq_setup(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
2830{
2831 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
2832
2833 const struct rb_callcache *cc = calling->cc;
2834 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2835 const int param_size = ISEQ_BODY(iseq)->param.size;
2836 const int local_size = ISEQ_BODY(iseq)->local_table_size;
2837 const int opt_pc = vm_callee_setup_arg(ec, calling, def_iseq_ptr(vm_cc_cme(cc)->def), cfp->sp - calling->argc, param_size, local_size);
2838 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
2839}
2840
2841static inline VALUE
2842vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
2843 int opt_pc, int param_size, int local_size)
2844{
2845 const struct rb_callinfo *ci = calling->ci;
2846 const struct rb_callcache *cc = calling->cc;
2847
2848 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2849 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
2850 }
2851 else {
2852 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2853 }
2854}
2855
2856static inline VALUE
2857vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const rb_callable_method_entry_t *me,
2858 int opt_pc, int param_size, int local_size)
2859{
2860 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
2861 VALUE *argv = cfp->sp - calling->argc;
2862 VALUE *sp = argv + param_size;
2863 cfp->sp = argv - 1 /* recv */;
2864
2865 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
2866 calling->block_handler, (VALUE)me,
2867 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
2868 local_size - param_size,
2869 ISEQ_BODY(iseq)->stack_max);
2870 return Qundef;
2871}
2872
2873static inline VALUE
2874vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc)
2875{
2876 const struct rb_callcache *cc = calling->cc;
2877 unsigned int i;
2878 VALUE *argv = cfp->sp - calling->argc;
2879 const rb_callable_method_entry_t *me = vm_cc_cme(cc);
2880 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
2881 VALUE *src_argv = argv;
2882 VALUE *sp_orig, *sp;
2883 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
2884
2885 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
2886 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
2887 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
2888 dst_captured->code.val = src_captured->code.val;
2889 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
2890 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
2891 }
2892 else {
2893 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
2894 }
2895 }
2896
2897 vm_pop_frame(ec, cfp, cfp->ep);
2898 cfp = ec->cfp;
2899
2900 sp_orig = sp = cfp->sp;
2901
2902 /* push self */
2903 sp[0] = calling->recv;
2904 sp++;
2905
2906 /* copy arguments */
2907 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
2908 *sp++ = src_argv[i];
2909 }
2910
2911 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
2912 calling->recv, calling->block_handler, (VALUE)me,
2913 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
2914 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
2915 ISEQ_BODY(iseq)->stack_max);
2916
2917 cfp->sp = sp_orig;
2918
2919 return Qundef;
2920}
2921
2922static void
2923ractor_unsafe_check(void)
2924{
2925 if (!rb_ractor_main_p()) {
2926 rb_raise(rb_eRactorUnsafeError, "ractor unsafe method called from not main ractor");
2927 }
2928}
2929
2930static VALUE
2931call_cfunc_m2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2932{
2933 ractor_unsafe_check();
2934 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
2935 return (*f)(recv, rb_ary_new4(argc, argv));
2936}
2937
2938static VALUE
2939call_cfunc_m1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2940{
2941 ractor_unsafe_check();
2942 VALUE(*f)(int, const VALUE *, VALUE) = (VALUE(*)(int, const VALUE *, VALUE))func;
2943 return (*f)(argc, argv, recv);
2944}
2945
2946static VALUE
2947call_cfunc_0(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2948{
2949 ractor_unsafe_check();
2950 VALUE(*f)(VALUE) = (VALUE(*)(VALUE))func;
2951 return (*f)(recv);
2952}
2953
2954static VALUE
2955call_cfunc_1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2956{
2957 ractor_unsafe_check();
2958 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
2959 return (*f)(recv, argv[0]);
2960}
2961
2962static VALUE
2963call_cfunc_2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2964{
2965 ractor_unsafe_check();
2966 VALUE(*f)(VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE))func;
2967 return (*f)(recv, argv[0], argv[1]);
2968}
2969
2970static VALUE
2971call_cfunc_3(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2972{
2973 ractor_unsafe_check();
2974 VALUE(*f)(VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE))func;
2975 return (*f)(recv, argv[0], argv[1], argv[2]);
2976}
2977
2978static VALUE
2979call_cfunc_4(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2980{
2981 ractor_unsafe_check();
2982 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE))func;
2983 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
2984}
2985
2986static VALUE
2987call_cfunc_5(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2988{
2989 ractor_unsafe_check();
2990 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
2991 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
2992}
2993
2994static VALUE
2995call_cfunc_6(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2996{
2997 ractor_unsafe_check();
2999 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3000}
3001
3002static VALUE
3003call_cfunc_7(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3004{
3005 ractor_unsafe_check();
3007 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3008}
3009
3010static VALUE
3011call_cfunc_8(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3012{
3013 ractor_unsafe_check();
3015 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3016}
3017
3018static VALUE
3019call_cfunc_9(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3020{
3021 ractor_unsafe_check();
3023 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3024}
3025
3026static VALUE
3027call_cfunc_10(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3028{
3029 ractor_unsafe_check();
3031 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3032}
3033
3034static VALUE
3035call_cfunc_11(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3036{
3037 ractor_unsafe_check();
3039 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3040}
3041
3042static VALUE
3043call_cfunc_12(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3044{
3045 ractor_unsafe_check();
3047 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3048}
3049
3050static VALUE
3051call_cfunc_13(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3052{
3053 ractor_unsafe_check();
3055 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3056}
3057
3058static VALUE
3059call_cfunc_14(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3060{
3061 ractor_unsafe_check();
3063 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3064}
3065
3066static VALUE
3067call_cfunc_15(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3068{
3069 ractor_unsafe_check();
3071 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3072}
3073
3074static VALUE
3075ractor_safe_call_cfunc_m2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3076{
3077 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
3078 return (*f)(recv, rb_ary_new4(argc, argv));
3079}
3080
3081static VALUE
3082ractor_safe_call_cfunc_m1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3083{
3084 VALUE(*f)(int, const VALUE *, VALUE) = (VALUE(*)(int, const VALUE *, VALUE))func;
3085 return (*f)(argc, argv, recv);
3086}
3087
3088static VALUE
3089ractor_safe_call_cfunc_0(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3090{
3091 VALUE(*f)(VALUE) = (VALUE(*)(VALUE))func;
3092 return (*f)(recv);
3093}
3094
3095static VALUE
3096ractor_safe_call_cfunc_1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3097{
3098 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
3099 return (*f)(recv, argv[0]);
3100}
3101
3102static VALUE
3103ractor_safe_call_cfunc_2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3104{
3105 VALUE(*f)(VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE))func;
3106 return (*f)(recv, argv[0], argv[1]);
3107}
3108
3109static VALUE
3110ractor_safe_call_cfunc_3(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3111{
3112 VALUE(*f)(VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE))func;
3113 return (*f)(recv, argv[0], argv[1], argv[2]);
3114}
3115
3116static VALUE
3117ractor_safe_call_cfunc_4(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3118{
3119 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE))func;
3120 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3121}
3122
3123static VALUE
3124ractor_safe_call_cfunc_5(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3125{
3126 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
3127 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3128}
3129
3130static VALUE
3131ractor_safe_call_cfunc_6(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3132{
3134 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3135}
3136
3137static VALUE
3138ractor_safe_call_cfunc_7(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3139{
3141 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3142}
3143
3144static VALUE
3145ractor_safe_call_cfunc_8(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3146{
3148 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3149}
3150
3151static VALUE
3152ractor_safe_call_cfunc_9(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3153{
3155 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3156}
3157
3158static VALUE
3159ractor_safe_call_cfunc_10(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3160{
3162 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3163}
3164
3165static VALUE
3166ractor_safe_call_cfunc_11(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3167{
3169 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3170}
3171
3172static VALUE
3173ractor_safe_call_cfunc_12(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3174{
3176 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3177}
3178
3179static VALUE
3180ractor_safe_call_cfunc_13(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3181{
3183 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3184}
3185
3186static VALUE
3187ractor_safe_call_cfunc_14(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3188{
3190 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3191}
3192
3193static VALUE
3194ractor_safe_call_cfunc_15(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3195{
3197 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3198}
3199
3200static inline int
3201vm_cfp_consistent_p(rb_execution_context_t *ec, const rb_control_frame_t *reg_cfp)
3202{
3203 const int ov_flags = RAISED_STACKOVERFLOW;
3204 if (LIKELY(reg_cfp == ec->cfp + 1)) return TRUE;
3205 if (rb_ec_raised_p(ec, ov_flags)) {
3206 rb_ec_raised_reset(ec, ov_flags);
3207 return TRUE;
3208 }
3209 return FALSE;
3210}
3211
3212#define CHECK_CFP_CONSISTENCY(func) \
3213 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3214 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3215
3216static inline
3217const rb_method_cfunc_t *
3218vm_method_cfunc_entry(const rb_callable_method_entry_t *me)
3219{
3220#if VM_DEBUG_VERIFY_METHOD_CACHE
3221 switch (me->def->type) {
3222 case VM_METHOD_TYPE_CFUNC:
3223 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3224 break;
3225# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3226 METHOD_BUG(ISEQ);
3227 METHOD_BUG(ATTRSET);
3228 METHOD_BUG(IVAR);
3229 METHOD_BUG(BMETHOD);
3230 METHOD_BUG(ZSUPER);
3231 METHOD_BUG(UNDEF);
3232 METHOD_BUG(OPTIMIZED);
3233 METHOD_BUG(MISSING);
3234 METHOD_BUG(REFINED);
3235 METHOD_BUG(ALIAS);
3236# undef METHOD_BUG
3237 default:
3238 rb_bug("wrong method type: %d", me->def->type);
3239 }
3240#endif
3241 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3242}
3243
3244static VALUE
3245vm_call_cfunc_with_frame(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3246{
3247 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3248 const struct rb_callinfo *ci = calling->ci;
3249 const struct rb_callcache *cc = calling->cc;
3250 VALUE val;
3251 const rb_callable_method_entry_t *me = vm_cc_cme(cc);
3252 const rb_method_cfunc_t *cfunc = vm_method_cfunc_entry(me);
3253 int len = cfunc->argc;
3254
3255 VALUE recv = calling->recv;
3256 VALUE block_handler = calling->block_handler;
3257 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3258 int argc = calling->argc;
3259 int orig_argc = argc;
3260
3261 if (UNLIKELY(calling->kw_splat)) {
3262 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3263 }
3264
3265 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3266 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, recv, me->def->original_id, vm_ci_mid(ci), me->owner, Qundef);
3267
3268 vm_push_frame(ec, NULL, frame_type, recv,
3269 block_handler, (VALUE)me,
3270 0, ec->cfp->sp, 0, 0);
3271
3272 if (len >= 0) rb_check_arity(argc, len, len);
3273
3274 reg_cfp->sp -= orig_argc + 1;
3275 val = (*cfunc->invoker)(recv, argc, reg_cfp->sp + 1, cfunc->func);
3276
3277 CHECK_CFP_CONSISTENCY("vm_call_cfunc");
3278
3279 rb_vm_pop_frame(ec);
3280
3281 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3282 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3283
3284 return val;
3285}
3286
3287static VALUE
3288vm_call_cfunc(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3289{
3290 const struct rb_callinfo *ci = calling->ci;
3291 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3292
3293 CALLER_SETUP_ARG(reg_cfp, calling, ci);
3294 CALLER_REMOVE_EMPTY_KW_SPLAT(reg_cfp, calling, ci);
3295 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat);
3296 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3297}
3298
3299static VALUE
3300vm_call_ivar(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3301{
3302 const struct rb_callcache *cc = calling->cc;
3303 RB_DEBUG_COUNTER_INC(ccf_ivar);
3304 cfp->sp -= 1;
3305 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE);
3306 return ivar;
3307}
3308
3309static VALUE
3310vm_call_attrset_direct(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_callcache *cc, VALUE obj)
3311{
3312 RB_DEBUG_COUNTER_INC(ccf_attrset);
3313 VALUE val = *(cfp->sp - 1);
3314 cfp->sp -= 2;
3315 attr_index_t index = vm_cc_attr_index(cc);
3316 shape_id_t dest_shape_id = vm_cc_attr_index_dest_shape_id(cc);
3317 ID id = vm_cc_cme(cc)->def->body.attr.id;
3319 VALUE res = vm_setivar(obj, id, val, dest_shape_id, index);
3320 if (UNDEF_P(res)) {
3321 switch (BUILTIN_TYPE(obj)) {
3322 case T_OBJECT:
3323 case T_CLASS:
3324 case T_MODULE:
3325 break;
3326 default:
3327 {
3328 res = vm_setivar_default(obj, id, val, dest_shape_id, index);
3329 if (!UNDEF_P(res)) {
3330 return res;
3331 }
3332 }
3333 }
3334 res = vm_setivar_slowpath_attr(obj, id, val, cc);
3335 }
3336 return res;
3337}
3338
3339static VALUE
3340vm_call_attrset(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3341{
3342 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
3343}
3344
3345bool
3346rb_vm_call_ivar_attrset_p(const vm_call_handler ch)
3347{
3348 return (ch == vm_call_ivar || ch == vm_call_attrset);
3349}
3350
3351static inline VALUE
3352vm_call_bmethod_body(rb_execution_context_t *ec, struct rb_calling_info *calling, const VALUE *argv)
3353{
3354 rb_proc_t *proc;
3355 VALUE val;
3356 const struct rb_callcache *cc = calling->cc;
3357 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3358 VALUE procv = cme->def->body.bmethod.proc;
3359
3360 if (!RB_OBJ_SHAREABLE_P(procv) &&
3361 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
3362 rb_raise(rb_eRuntimeError, "defined with an un-shareable Proc in a different Ractor");
3363 }
3364
3365 /* control block frame */
3366 GetProcPtr(procv, proc);
3367 val = rb_vm_invoke_bmethod(ec, proc, calling->recv, calling->argc, argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
3368
3369 return val;
3370}
3371
3372static VALUE
3373vm_call_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3374{
3375 RB_DEBUG_COUNTER_INC(ccf_bmethod);
3376
3377 VALUE *argv;
3378 int argc;
3379 const struct rb_callinfo *ci = calling->ci;
3380
3381 CALLER_SETUP_ARG(cfp, calling, ci);
3382 argc = calling->argc;
3383 argv = ALLOCA_N(VALUE, argc);
3384 MEMCPY(argv, cfp->sp - argc, VALUE, argc);
3385 cfp->sp += - argc - 1;
3386
3387 return vm_call_bmethod_body(ec, calling, argv);
3388}
3389
3390MJIT_FUNC_EXPORTED VALUE
3391rb_find_defined_class_by_owner(VALUE current_class, VALUE target_owner)
3392{
3393 VALUE klass = current_class;
3394
3395 /* for prepended Module, then start from cover class */
3396 if (RB_TYPE_P(klass, T_ICLASS) && FL_TEST(klass, RICLASS_IS_ORIGIN) &&
3397 RB_TYPE_P(RBASIC_CLASS(klass), T_CLASS)) {
3398 klass = RBASIC_CLASS(klass);
3399 }
3400
3401 while (RTEST(klass)) {
3402 VALUE owner = RB_TYPE_P(klass, T_ICLASS) ? RBASIC_CLASS(klass) : klass;
3403 if (owner == target_owner) {
3404 return klass;
3405 }
3406 klass = RCLASS_SUPER(klass);
3407 }
3408
3409 return current_class; /* maybe module function */
3410}
3411
3412static const rb_callable_method_entry_t *
3413aliased_callable_method_entry(const rb_callable_method_entry_t *me)
3414{
3415 const rb_method_entry_t *orig_me = me->def->body.alias.original_me;
3416 const rb_callable_method_entry_t *cme;
3417
3418 if (orig_me->defined_class == 0) {
3419 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
3420 VM_ASSERT(RB_TYPE_P(orig_me->owner, T_MODULE));
3421 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
3422
3423 if (me->def->reference_count == 1) {
3424 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
3425 }
3426 else {
3428 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
3429 rb_method_definition_set((rb_method_entry_t *)me, def, (void *)cme);
3430 }
3431 }
3432 else {
3433 cme = (const rb_callable_method_entry_t *)orig_me;
3434 }
3435
3436 VM_ASSERT(callable_method_entry_p(cme));
3437 return cme;
3438}
3439
3441rb_aliased_callable_method_entry(const rb_callable_method_entry_t *me)
3442{
3443 return aliased_callable_method_entry(me);
3444}
3445
3446static VALUE
3447vm_call_alias(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3448{
3449 calling->cc = &VM_CC_ON_STACK(Qundef,
3450 vm_call_general,
3451 {{0}},
3452 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
3453
3454 return vm_call_method_each_type(ec, cfp, calling);
3455}
3456
3457static enum method_missing_reason
3458ci_missing_reason(const struct rb_callinfo *ci)
3459{
3460 enum method_missing_reason stat = MISSING_NOENTRY;
3461 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
3462 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
3463 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
3464 return stat;
3465}
3466
3467static VALUE vm_call_method_missing(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
3468
3469static VALUE
3470vm_call_symbol(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
3471 struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE symbol, int flags)
3472{
3473 ASSUME(calling->argc >= 0);
3474 /* Also assumes CALLER_SETUP_ARG is already done. */
3475
3476 enum method_missing_reason missing_reason = MISSING_NOENTRY;
3477 int argc = calling->argc;
3478 VALUE recv = calling->recv;
3479 VALUE klass = CLASS_OF(recv);
3480 ID mid = rb_check_id(&symbol);
3481 flags |= VM_CALL_OPT_SEND | (calling->kw_splat ? VM_CALL_KW_SPLAT : 0);
3482
3483 if (UNLIKELY(! mid)) {
3484 mid = idMethodMissing;
3485 missing_reason = ci_missing_reason(ci);
3486 ec->method_missing_reason = missing_reason;
3487
3488 /* E.g. when argc == 2
3489 *
3490 * | | | | TOPN
3491 * | | +------+
3492 * | | +---> | arg1 | 0
3493 * +------+ | +------+
3494 * | arg1 | -+ +-> | arg0 | 1
3495 * +------+ | +------+
3496 * | arg0 | ---+ | sym | 2
3497 * +------+ +------+
3498 * | recv | | recv | 3
3499 * --+------+--------+------+------
3500 */
3501 int i = argc;
3502 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3503 INC_SP(1);
3504 MEMMOVE(&TOPN(i - 1), &TOPN(i), VALUE, i);
3505 argc = ++calling->argc;
3506
3507 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
3508 /* Inadvertent symbol creation shall be forbidden, see [Feature #5112] */
3509 TOPN(i) = symbol;
3510 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
3511 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
3512 VALUE exc = rb_make_no_method_exception(
3513 rb_eNoMethodError, 0, recv, argc, argv, priv);
3514
3515 rb_exc_raise(exc);
3516 }
3517 else {
3518 TOPN(i) = rb_str_intern(symbol);
3519 }
3520 }
3521
3522 calling->ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci));
3523 calling->cc = &VM_CC_ON_STACK(klass,
3524 vm_call_general,
3525 { .method_missing_reason = missing_reason },
3526 rb_callable_method_entry_with_refinements(klass, mid, NULL));
3527
3528 if (flags & VM_CALL_FCALL) {
3529 return vm_call_method(ec, reg_cfp, calling);
3530 }
3531
3532 const struct rb_callcache *cc = calling->cc;
3533 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
3534
3535 if (vm_cc_cme(cc) != NULL) {
3536 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
3537 case METHOD_VISI_PUBLIC: /* likely */
3538 return vm_call_method_each_type(ec, reg_cfp, calling);
3539 case METHOD_VISI_PRIVATE:
3540 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
3541 break;
3542 case METHOD_VISI_PROTECTED:
3543 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
3544 break;
3545 default:
3546 VM_UNREACHABLE(vm_call_method);
3547 }
3548 return vm_call_method_missing(ec, reg_cfp, calling);
3549 }
3550
3551 return vm_call_method_nome(ec, reg_cfp, calling);
3552}
3553
3554static VALUE
3555vm_call_opt_send(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3556{
3557 RB_DEBUG_COUNTER_INC(ccf_opt_send);
3558
3559 int i;
3560 VALUE sym;
3561
3562 CALLER_SETUP_ARG(reg_cfp, calling, calling->ci);
3563
3564 i = calling->argc - 1;
3565
3566 if (calling->argc == 0) {
3567 rb_raise(rb_eArgError, "no method name given");
3568 }
3569 else {
3570 sym = TOPN(i);
3571 /* E.g. when i == 2
3572 *
3573 * | | | | TOPN
3574 * +------+ | |
3575 * | arg1 | ---+ | | 0
3576 * +------+ | +------+
3577 * | arg0 | -+ +-> | arg1 | 1
3578 * +------+ | +------+
3579 * | sym | +---> | arg0 | 2
3580 * +------+ +------+
3581 * | recv | | recv | 3
3582 * --+------+--------+------+------
3583 */
3584 /* shift arguments */
3585 if (i > 0) {
3586 MEMMOVE(&TOPN(i), &TOPN(i-1), VALUE, i);
3587 }
3588 calling->argc -= 1;
3589 DEC_SP(1);
3590
3591 return vm_call_symbol(ec, reg_cfp, calling, calling->ci, sym, VM_CALL_FCALL);
3592 }
3593}
3594
3595static VALUE
3596vm_call_method_missing_body(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling,
3597 const struct rb_callinfo *orig_ci, enum method_missing_reason reason)
3598{
3599 RB_DEBUG_COUNTER_INC(ccf_method_missing);
3600
3601 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
3602 unsigned int argc;
3603
3604 CALLER_SETUP_ARG(reg_cfp, calling, orig_ci);
3605 argc = calling->argc + 1;
3606
3607 unsigned int flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | (calling->kw_splat ? VM_CALL_KW_SPLAT : 0);
3608 calling->argc = argc;
3609
3610 /* shift arguments: m(a, b, c) #=> method_missing(:m, a, b, c) */
3611 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3612 vm_check_canary(ec, reg_cfp->sp);
3613 if (argc > 1) {
3614 MEMMOVE(argv+1, argv, VALUE, argc-1);
3615 }
3616 argv[0] = ID2SYM(vm_ci_mid(orig_ci));
3617 INC_SP(1);
3618
3619 ec->method_missing_reason = reason;
3620 calling->ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci));
3621 calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }},
3622 rb_callable_method_entry_without_refinements(CLASS_OF(calling->recv), idMethodMissing, NULL));
3623 return vm_call_method(ec, reg_cfp, calling);
3624}
3625
3626static VALUE
3627vm_call_method_missing(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3628{
3629 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->ci, vm_cc_cmethod_missing_reason(calling->cc));
3630}
3631
3632static const rb_callable_method_entry_t *refined_method_callable_without_refinement(const rb_callable_method_entry_t *me);
3633static VALUE
3634vm_call_zsuper(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, VALUE klass)
3635{
3636 klass = RCLASS_SUPER(klass);
3637
3638 const rb_callable_method_entry_t *cme = klass ? rb_callable_method_entry(klass, vm_ci_mid(calling->ci)) : NULL;
3639 if (cme == NULL) {
3640 return vm_call_method_nome(ec, cfp, calling);
3641 }
3642 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
3643 cme->def->body.refined.orig_me) {
3644 cme = refined_method_callable_without_refinement(cme);
3645 }
3646
3647 calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }}, cme);
3648
3649 return vm_call_method_each_type(ec, cfp, calling);
3650}
3651
3652static inline VALUE
3653find_refinement(VALUE refinements, VALUE klass)
3654{
3655 if (NIL_P(refinements)) {
3656 return Qnil;
3657 }
3658 return rb_hash_lookup(refinements, klass);
3659}
3660
3661PUREFUNC(static rb_control_frame_t * current_method_entry(const rb_execution_context_t *ec, rb_control_frame_t *cfp));
3662static rb_control_frame_t *
3663current_method_entry(const rb_execution_context_t *ec, rb_control_frame_t *cfp)
3664{
3665 rb_control_frame_t *top_cfp = cfp;
3666
3667 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
3668 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
3669
3670 do {
3671 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
3672 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
3673 /* TODO: orphan block */
3674 return top_cfp;
3675 }
3676 } while (cfp->iseq != local_iseq);
3677 }
3678 return cfp;
3679}
3680
3681static const rb_callable_method_entry_t *
3682refined_method_callable_without_refinement(const rb_callable_method_entry_t *me)
3683{
3684 const rb_method_entry_t *orig_me = me->def->body.refined.orig_me;
3685 const rb_callable_method_entry_t *cme;
3686
3687 if (orig_me->defined_class == 0) {
3688 cme = NULL;
3690 }
3691 else {
3692 cme = (const rb_callable_method_entry_t *)orig_me;
3693 }
3694
3695 VM_ASSERT(callable_method_entry_p(cme));
3696
3697 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
3698 cme = NULL;
3699 }
3700
3701 return cme;
3702}
3703
3704static const rb_callable_method_entry_t *
3705search_refined_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3706{
3707 ID mid = vm_ci_mid(calling->ci);
3708 const rb_cref_t *cref = vm_get_cref(cfp->ep);
3709 const struct rb_callcache * const cc = calling->cc;
3710 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3711
3712 for (; cref; cref = CREF_NEXT(cref)) {
3713 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
3714 if (NIL_P(refinement)) continue;
3715
3716 const rb_callable_method_entry_t *const ref_me =
3717 rb_callable_method_entry(refinement, mid);
3718
3719 if (ref_me) {
3720 if (vm_cc_call(cc) == vm_call_super_method) {
3721 const rb_control_frame_t *top_cfp = current_method_entry(ec, cfp);
3722 const rb_callable_method_entry_t *top_me = rb_vm_frame_method_entry(top_cfp);
3723 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
3724 continue;
3725 }
3726 }
3727
3728 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
3729 cme->def != ref_me->def) {
3730 cme = ref_me;
3731 }
3732 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
3733 return cme;
3734 }
3735 }
3736 else {
3737 return NULL;
3738 }
3739 }
3740
3741 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
3742 return refined_method_callable_without_refinement(vm_cc_cme(cc));
3743 }
3744 else {
3745 VALUE klass = RCLASS_SUPER(vm_cc_cme(cc)->defined_class);
3746 const rb_callable_method_entry_t *cme = klass ? rb_callable_method_entry(klass, mid) : NULL;
3747 return cme;
3748 }
3749}
3750
3751static VALUE
3752vm_call_refined(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3753{
3754 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }},
3755 search_refined_method(ec, cfp, calling));
3756
3757 if (vm_cc_cme(ref_cc)) {
3758 calling->cc= ref_cc;
3759 return vm_call_method(ec, cfp, calling);
3760 }
3761 else {
3762 return vm_call_method_nome(ec, cfp, calling);
3763 }
3764}
3765
3766static inline VALUE vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_callinfo *ci, bool is_lambda, VALUE block_handler);
3767
3768NOINLINE(static VALUE
3769 vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
3770 struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE block_handler));
3771
3772static VALUE
3773vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
3774 struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE block_handler)
3775{
3776 int argc = calling->argc;
3777
3778 /* remove self */
3779 if (argc > 0) MEMMOVE(&TOPN(argc), &TOPN(argc-1), VALUE, argc);
3780 DEC_SP(1);
3781
3782 return vm_invoke_block(ec, reg_cfp, calling, ci, false, block_handler);
3783}
3784
3785static VALUE
3786vm_call_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3787{
3788 RB_DEBUG_COUNTER_INC(ccf_opt_call);
3789
3790 const struct rb_callinfo *ci = calling->ci;
3791 VALUE procval = calling->recv;
3792 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
3793}
3794
3795static VALUE
3796vm_call_opt_block_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3797{
3798 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
3799
3800 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
3801 const struct rb_callinfo *ci = calling->ci;
3802
3803 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
3804 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
3805 }
3806 else {
3807 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
3808 calling->cc = rb_vm_search_method_slowpath(ci, CLASS_OF(calling->recv));
3809 return vm_call_general(ec, reg_cfp, calling);
3810 }
3811}
3812
3813static VALUE
3814vm_call_opt_struct_aref0(rb_execution_context_t *ec, struct rb_calling_info *calling)
3815{
3816 VALUE recv = calling->recv;
3817
3818 VM_ASSERT(RB_TYPE_P(recv, T_STRUCT));
3819 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
3820 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
3821
3822 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
3823 return internal_RSTRUCT_GET(recv, off);
3824}
3825
3826static VALUE
3827vm_call_opt_struct_aref(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3828{
3829 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
3830
3831 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
3832 reg_cfp->sp -= 1;
3833 return ret;
3834}
3835
3836static VALUE
3837vm_call_opt_struct_aset0(rb_execution_context_t *ec, struct rb_calling_info *calling, VALUE val)
3838{
3839 VALUE recv = calling->recv;
3840
3841 VM_ASSERT(RB_TYPE_P(recv, T_STRUCT));
3842 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
3843 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
3844
3845 rb_check_frozen(recv);
3846
3847 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
3848 internal_RSTRUCT_SET(recv, off, val);
3849
3850 return val;
3851}
3852
3853static VALUE
3854vm_call_opt_struct_aset(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3855{
3856 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
3857
3858 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
3859 reg_cfp->sp -= 2;
3860 return ret;
3861}
3862
3863NOINLINE(static VALUE vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
3864 const struct rb_callinfo *ci, const struct rb_callcache *cc));
3865
3866static VALUE
3867vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
3868 const struct rb_callinfo *ci, const struct rb_callcache *cc)
3869{
3870 switch (vm_cc_cme(cc)->def->body.optimized.type) {
3871 case OPTIMIZED_METHOD_TYPE_SEND:
3872 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
3873 return vm_call_opt_send(ec, cfp, calling);
3874 case OPTIMIZED_METHOD_TYPE_CALL:
3875 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
3876 return vm_call_opt_call(ec, cfp, calling);
3877 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
3878 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
3879 return vm_call_opt_block_call(ec, cfp, calling);
3880 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF:
3881 CALLER_SETUP_ARG(cfp, calling, ci);
3882 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3883 rb_check_arity(calling->argc, 0, 0);
3884 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
3885 return vm_call_opt_struct_aref(ec, cfp, calling);
3886
3887 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET:
3888 CALLER_SETUP_ARG(cfp, calling, ci);
3889 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3890 rb_check_arity(calling->argc, 1, 1);
3891 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
3892 return vm_call_opt_struct_aset(ec, cfp, calling);
3893 default:
3894 rb_bug("vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
3895 }
3896}
3897
3898#define VM_CALL_METHOD_ATTR(var, func, nohook) \
3899 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
3900 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
3901 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
3902 var = func; \
3903 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
3904 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
3905 } \
3906 else { \
3907 nohook; \
3908 var = func; \
3909 }
3910
3911static VALUE
3912vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3913{
3914 const struct rb_callinfo *ci = calling->ci;
3915 const struct rb_callcache *cc = calling->cc;
3916 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3917 VALUE v;
3918
3919 switch (cme->def->type) {
3920 case VM_METHOD_TYPE_ISEQ:
3921 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
3922 return vm_call_iseq_setup(ec, cfp, calling);
3923
3924 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3925 case VM_METHOD_TYPE_CFUNC:
3926 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
3927 return vm_call_cfunc(ec, cfp, calling);
3928
3929 case VM_METHOD_TYPE_ATTRSET:
3930 CALLER_SETUP_ARG(cfp, calling, ci);
3931 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3932
3933 rb_check_arity(calling->argc, 1, 1);
3934
3935 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG);
3936
3937 if (vm_cc_markable(cc)) {
3938 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
3939 VM_CALL_METHOD_ATTR(v,
3940 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
3941 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
3942 }
3943 else {
3944 cc = &((struct rb_callcache) {
3945 .flags = T_IMEMO |
3946 (imemo_callcache << FL_USHIFT) |
3947 VM_CALLCACHE_UNMARKABLE |
3948 ((VALUE)INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT) |
3949 VM_CALLCACHE_ON_STACK,
3950 .klass = cc->klass,
3951 .cme_ = cc->cme_,
3952 .call_ = cc->call_,
3953 .aux_ = {
3954 .attr = {
3955 .value = INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT,
3956 }
3957 },
3958 });
3959
3960 VM_CALL_METHOD_ATTR(v,
3961 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
3962 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
3963 }
3964 return v;
3965
3966 case VM_METHOD_TYPE_IVAR:
3967 CALLER_SETUP_ARG(cfp, calling, ci);
3968 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3969 rb_check_arity(calling->argc, 0, 0);
3970 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
3971 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT);
3972 VM_CALL_METHOD_ATTR(v,
3973 vm_call_ivar(ec, cfp, calling),
3974 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
3975 return v;
3976
3977 case VM_METHOD_TYPE_MISSING:
3978 vm_cc_method_missing_reason_set(cc, 0);
3979 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
3980 return vm_call_method_missing(ec, cfp, calling);
3981
3982 case VM_METHOD_TYPE_BMETHOD:
3983 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
3984 return vm_call_bmethod(ec, cfp, calling);
3985
3986 case VM_METHOD_TYPE_ALIAS:
3987 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
3988 return vm_call_alias(ec, cfp, calling);
3989
3990 case VM_METHOD_TYPE_OPTIMIZED:
3991 return vm_call_optimized(ec, cfp, calling, ci, cc);
3992
3993 case VM_METHOD_TYPE_UNDEF:
3994 break;
3995
3996 case VM_METHOD_TYPE_ZSUPER:
3997 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
3998
3999 case VM_METHOD_TYPE_REFINED:
4000 // CC_SET_FASTPATH(cc, vm_call_refined, TRUE);
4001 // should not set FASTPATH since vm_call_refined assumes cc->call is vm_call_super_method on invokesuper.
4002 return vm_call_refined(ec, cfp, calling);
4003 }
4004
4005 rb_bug("vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4006}
4007
4008NORETURN(static void vm_raise_method_missing(rb_execution_context_t *ec, int argc, const VALUE *argv, VALUE obj, int call_status));
4009
4010static VALUE
4011vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
4012{
4013 /* method missing */
4014 const struct rb_callinfo *ci = calling->ci;
4015 const int stat = ci_missing_reason(ci);
4016
4017 if (vm_ci_mid(ci) == idMethodMissing) {
4018 rb_control_frame_t *reg_cfp = cfp;
4019 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4020 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4021 }
4022 else {
4023 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4024 }
4025}
4026
4027/* Protected method calls and super invocations need to check that the receiver
4028 * (self for super) inherits the module on which the method is defined.
4029 * In the case of refinements, it should consider the original class not the
4030 * refinement.
4031 */
4032static VALUE
4033vm_defined_class_for_protected_call(const rb_callable_method_entry_t *me)
4034{
4035 VALUE defined_class = me->defined_class;
4036 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4037 return NIL_P(refined_class) ? defined_class : refined_class;
4038}
4039
4040static inline VALUE
4041vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
4042{
4043 const struct rb_callinfo *ci = calling->ci;
4044 const struct rb_callcache *cc = calling->cc;
4045
4046 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4047
4048 if (vm_cc_cme(cc) != NULL) {
4049 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4050 case METHOD_VISI_PUBLIC: /* likely */
4051 return vm_call_method_each_type(ec, cfp, calling);
4052
4053 case METHOD_VISI_PRIVATE:
4054 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4055 enum method_missing_reason stat = MISSING_PRIVATE;
4056 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4057
4058 vm_cc_method_missing_reason_set(cc, stat);
4059 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4060 return vm_call_method_missing(ec, cfp, calling);
4061 }
4062 return vm_call_method_each_type(ec, cfp, calling);
4063
4064 case METHOD_VISI_PROTECTED:
4065 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4066 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4067 if (!rb_obj_is_kind_of(cfp->self, defined_class)) {
4068 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4069 return vm_call_method_missing(ec, cfp, calling);
4070 }
4071 else {
4072 /* caching method info to dummy cc */
4073 VM_ASSERT(vm_cc_cme(cc) != NULL);
4074 struct rb_callcache cc_on_stack = *cc;
4075 FL_SET_RAW((VALUE)&cc_on_stack, VM_CALLCACHE_UNMARKABLE);
4076 calling->cc = &cc_on_stack;
4077 return vm_call_method_each_type(ec, cfp, calling);
4078 }
4079 }
4080 return vm_call_method_each_type(ec, cfp, calling);
4081
4082 default:
4083 rb_bug("unreachable");
4084 }
4085 }
4086 else {
4087 return vm_call_method_nome(ec, cfp, calling);
4088 }
4089}
4090
4091static VALUE
4092vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4093{
4094 RB_DEBUG_COUNTER_INC(ccf_general);
4095 return vm_call_method(ec, reg_cfp, calling);
4096}
4097
4098void
4099rb_vm_cc_general(const struct rb_callcache *cc)
4100{
4101 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4102 VM_ASSERT(cc != vm_cc_empty());
4103
4104 *(vm_call_handler *)&cc->call_ = vm_call_general;
4105}
4106
4107static VALUE
4108vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4109{
4110 RB_DEBUG_COUNTER_INC(ccf_super_method);
4111
4112 // This line is introduced to make different from `vm_call_general` because some compilers (VC we found)
4113 // can merge the function and the address of the function becomes same.
4114 // The address of `vm_call_super_method` is used in `search_refined_method`, so it should be different.
4115 if (ec == NULL) rb_bug("unreachable");
4116
4117 /* this check is required to distinguish with other functions. */
4118 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4119 return vm_call_method(ec, reg_cfp, calling);
4120}
4121
4122/* super */
4123
4124static inline VALUE
4125vm_search_normal_superclass(VALUE klass)
4126{
4127 if (BUILTIN_TYPE(klass) == T_ICLASS &&
4128 RB_TYPE_P(RBASIC(klass)->klass, T_MODULE) &&
4129 FL_TEST_RAW(RBASIC(klass)->klass, RMODULE_IS_REFINEMENT)) {
4130 klass = RBASIC(klass)->klass;
4131 }
4132 klass = RCLASS_ORIGIN(klass);
4133 return RCLASS_SUPER(klass);
4134}
4135
4136NORETURN(static void vm_super_outside(void));
4137
4138static void
4139vm_super_outside(void)
4140{
4141 rb_raise(rb_eNoMethodError, "super called outside of method");
4142}
4143
4144static const struct rb_callcache *
4145empty_cc_for_super(void)
4146{
4147#ifdef MJIT_HEADER
4148 return rb_vm_empty_cc_for_super();
4149#else
4150 return &vm_empty_cc_for_super;
4151#endif
4152}
4153
4154static const struct rb_callcache *
4155vm_search_super_method(const rb_control_frame_t *reg_cfp, struct rb_call_data *cd, VALUE recv)
4156{
4157 VALUE current_defined_class;
4158 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(reg_cfp);
4159
4160 if (!me) {
4161 vm_super_outside();
4162 }
4163
4164 current_defined_class = vm_defined_class_for_protected_call(me);
4165
4166 if (BUILTIN_TYPE(current_defined_class) != T_MODULE &&
4167 reg_cfp->iseq != method_entry_iseqptr(me) &&
4168 !rb_obj_is_kind_of(recv, current_defined_class)) {
4169 VALUE m = RB_TYPE_P(current_defined_class, T_ICLASS) ?
4170 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
4171
4172 if (m) { /* not bound UnboundMethod */
4174 "self has wrong type to call super in this context: "
4175 "%"PRIsVALUE" (expected %"PRIsVALUE")",
4176 rb_obj_class(recv), m);
4177 }
4178 }
4179
4180 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
4182 "implicit argument passing of super from method defined"
4183 " by define_method() is not supported."
4184 " Specify all arguments explicitly.");
4185 }
4186
4187 ID mid = me->def->original_id;
4188
4189 // update iseq. really? (TODO)
4190 cd->ci = vm_ci_new_runtime(mid,
4191 vm_ci_flag(cd->ci),
4192 vm_ci_argc(cd->ci),
4193 vm_ci_kwarg(cd->ci));
4194
4195 RB_OBJ_WRITTEN(reg_cfp->iseq, Qundef, cd->ci);
4196
4197 const struct rb_callcache *cc;
4198
4199 VALUE klass = vm_search_normal_superclass(me->defined_class);
4200
4201 if (!klass) {
4202 /* bound instance method of module */
4203 cc = vm_cc_new(klass, NULL, vm_call_method_missing);
4204 RB_OBJ_WRITE(reg_cfp->iseq, &cd->cc, cc);
4205 }
4206 else {
4207 cc = vm_search_method_fastpath((VALUE)reg_cfp->iseq, cd, klass);
4208 const rb_callable_method_entry_t *cached_cme = vm_cc_cme(cc);
4209
4210 // define_method can cache for different method id
4211 if (cached_cme == NULL) {
4212 // empty_cc_for_super is not markable object
4213 cd->cc = empty_cc_for_super();
4214 }
4215 else if (cached_cme->called_id != mid) {
4216 const rb_callable_method_entry_t *cme = rb_callable_method_entry(klass, mid);
4217 if (cme) {
4218 cc = vm_cc_new(klass, cme, vm_call_super_method);
4219 RB_OBJ_WRITE(reg_cfp->iseq, &cd->cc, cc);
4220 }
4221 else {
4222 cd->cc = cc = empty_cc_for_super();
4223 }
4224 }
4225 else {
4226 switch (cached_cme->def->type) {
4227 // vm_call_refined (search_refined_method) assumes cc->call is vm_call_super_method on invokesuper
4228 case VM_METHOD_TYPE_REFINED:
4229 // cc->klass is superclass of receiver class. Checking cc->klass is not enough to invalidate IVC for the receiver class.
4230 case VM_METHOD_TYPE_ATTRSET:
4231 case VM_METHOD_TYPE_IVAR:
4232 vm_cc_call_set(cc, vm_call_super_method); // invalidate fastpath
4233 break;
4234 default:
4235 break; // use fastpath
4236 }
4237 }
4238 }
4239
4240 VM_ASSERT((vm_cc_cme(cc), true));
4241
4242 return cc;
4243}
4244
4245/* yield */
4246
4247static inline int
4248block_proc_is_lambda(const VALUE procval)
4249{
4250 rb_proc_t *proc;
4251
4252 if (procval) {
4253 GetProcPtr(procval, proc);
4254 return proc->is_lambda;
4255 }
4256 else {
4257 return 0;
4258 }
4259}
4260
4261static VALUE
4262vm_yield_with_cfunc(rb_execution_context_t *ec,
4263 const struct rb_captured_block *captured,
4264 VALUE self, int argc, const VALUE *argv, int kw_splat, VALUE block_handler,
4266{
4267 int is_lambda = FALSE; /* TODO */
4268 VALUE val, arg, blockarg;
4269 int frame_flag;
4270 const struct vm_ifunc *ifunc = captured->code.ifunc;
4271
4272 if (is_lambda) {
4273 arg = rb_ary_new4(argc, argv);
4274 }
4275 else if (argc == 0) {
4276 arg = Qnil;
4277 }
4278 else {
4279 arg = argv[0];
4280 }
4281
4282 blockarg = rb_vm_bh_to_procval(ec, block_handler);
4283
4284 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
4285 if (kw_splat) {
4286 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
4287 }
4288
4289 vm_push_frame(ec, (const rb_iseq_t *)captured->code.ifunc,
4290 frame_flag,
4291 self,
4292 VM_GUARDED_PREV_EP(captured->ep),
4293 (VALUE)me,
4294 0, ec->cfp->sp, 0, 0);
4295 val = (*ifunc->func)(arg, (VALUE)ifunc->data, argc, argv, blockarg);
4296 rb_vm_pop_frame(ec);
4297
4298 return val;
4299}
4300
4301static VALUE
4302vm_yield_with_symbol(rb_execution_context_t *ec, VALUE symbol, int argc, const VALUE *argv, int kw_splat, VALUE block_handler)
4303{
4304 return rb_sym_proc_call(SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
4305}
4306
4307static inline int
4308vm_callee_setup_block_arg_arg0_splat(rb_control_frame_t *cfp, const rb_iseq_t *iseq, VALUE *argv, VALUE ary)
4309{
4310 int i;
4311 long len = RARRAY_LEN(ary);
4312
4313 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4314
4315 for (i=0; i<len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
4316 argv[i] = RARRAY_AREF(ary, i);
4317 }
4318
4319 return i;
4320}
4321
4322static inline VALUE
4323vm_callee_setup_block_arg_arg0_check(VALUE *argv)
4324{
4325 VALUE ary, arg0 = argv[0];
4326 ary = rb_check_array_type(arg0);
4327#if 0
4328 argv[0] = arg0;
4329#else
4330 VM_ASSERT(argv[0] == arg0);
4331#endif
4332 return ary;
4333}
4334
4335static int
4336vm_callee_setup_block_arg(rb_execution_context_t *ec, struct rb_calling_info *calling, const struct rb_callinfo *ci, const rb_iseq_t *iseq, VALUE *argv, const enum arg_setup_type arg_setup_type)
4337{
4338 if (rb_simple_iseq_p(iseq)) {
4339 rb_control_frame_t *cfp = ec->cfp;
4340 VALUE arg0;
4341
4342 CALLER_SETUP_ARG(cfp, calling, ci);
4343 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
4344
4345 if (arg_setup_type == arg_setup_block &&
4346 calling->argc == 1 &&
4347 ISEQ_BODY(iseq)->param.flags.has_lead &&
4348 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
4349 !NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
4350 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
4351 }
4352
4353 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
4354 if (arg_setup_type == arg_setup_block) {
4355 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
4356 int i;
4357 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4358 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] = Qnil;
4359 calling->argc = ISEQ_BODY(iseq)->param.lead_num; /* fill rest parameters */
4360 }
4361 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
4362 calling->argc = ISEQ_BODY(iseq)->param.lead_num; /* simply truncate arguments */
4363 }
4364 }
4365 else {
4366 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
4367 }
4368 }
4369
4370 return 0;
4371 }
4372 else {
4373 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
4374 }
4375}
4376
4377static int
4378vm_yield_setup_args(rb_execution_context_t *ec, const rb_iseq_t *iseq, const int argc, VALUE *argv, int kw_splat, VALUE block_handler, enum arg_setup_type arg_setup_type)
4379{
4380 struct rb_calling_info calling_entry, *calling;
4381
4382 calling = &calling_entry;
4383 calling->argc = argc;
4384 calling->block_handler = block_handler;
4385 calling->kw_splat = kw_splat;
4386 calling->recv = Qundef;
4387 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, (kw_splat ? VM_CALL_KW_SPLAT : 0), 0, 0);
4388
4389 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
4390}
4391
4392/* ruby iseq -> ruby block */
4393
4394static VALUE
4395vm_invoke_iseq_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4396 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4397 bool is_lambda, VALUE block_handler)
4398{
4399 const struct rb_captured_block *captured = VM_BH_TO_ISEQ_BLOCK(block_handler);
4400 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4401 const int arg_size = ISEQ_BODY(iseq)->param.size;
4402 VALUE * const rsp = GET_SP() - calling->argc;
4403 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, rsp, is_lambda ? arg_setup_method : arg_setup_block);
4404
4405 SET_SP(rsp);
4406
4407 vm_push_frame(ec, iseq,
4408 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
4409 captured->self,
4410 VM_GUARDED_PREV_EP(captured->ep), 0,
4411 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4412 rsp + arg_size,
4413 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
4414
4415 return Qundef;
4416}
4417
4418static VALUE
4419vm_invoke_symbol_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4420 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4421 MAYBE_UNUSED(bool is_lambda), VALUE block_handler)
4422{
4423 if (calling->argc < 1) {
4424 rb_raise(rb_eArgError, "no receiver given");
4425 }
4426 else {
4427 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
4428 CALLER_SETUP_ARG(reg_cfp, calling, ci);
4429 calling->recv = TOPN(--calling->argc);
4430 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, 0);
4431 }
4432}
4433
4434static VALUE
4435vm_invoke_ifunc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4436 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4437 MAYBE_UNUSED(bool is_lambda), VALUE block_handler)
4438{
4439 VALUE val;
4440 int argc;
4441 const struct rb_captured_block *captured = VM_BH_TO_IFUNC_BLOCK(block_handler);
4442 CALLER_SETUP_ARG(ec->cfp, calling, ci);
4443 CALLER_REMOVE_EMPTY_KW_SPLAT(ec->cfp, calling, ci);
4444 argc = calling->argc;
4445 val = vm_yield_with_cfunc(ec, captured, captured->self, argc, STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
4446 POPN(argc); /* TODO: should put before C/yield? */
4447 return val;
4448}
4449
4450static VALUE
4451vm_proc_to_block_handler(VALUE procval)
4452{
4453 const struct rb_block *block = vm_proc_block(procval);
4454
4455 switch (vm_block_type(block)) {
4456 case block_type_iseq:
4457 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
4458 case block_type_ifunc:
4459 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
4460 case block_type_symbol:
4461 return VM_BH_FROM_SYMBOL(block->as.symbol);
4462 case block_type_proc:
4463 return VM_BH_FROM_PROC(block->as.proc);
4464 }
4465 VM_UNREACHABLE(vm_yield_with_proc);
4466 return Qundef;
4467}
4468
4469static VALUE
4470vm_invoke_proc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4471 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4472 bool is_lambda, VALUE block_handler)
4473{
4474 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
4475 VALUE proc = VM_BH_TO_PROC(block_handler);
4476 is_lambda = block_proc_is_lambda(proc);
4477 block_handler = vm_proc_to_block_handler(proc);
4478 }
4479
4480 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
4481}
4482
4483static inline VALUE
4484vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4485 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4486 bool is_lambda, VALUE block_handler)
4487{
4488 VALUE (*func)(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4489 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4490 bool is_lambda, VALUE block_handler);
4491
4492 switch (vm_block_handler_type(block_handler)) {
4493 case block_handler_type_iseq: func = vm_invoke_iseq_block; break;
4494 case block_handler_type_ifunc: func = vm_invoke_ifunc_block; break;
4495 case block_handler_type_proc: func = vm_invoke_proc_block; break;
4496 case block_handler_type_symbol: func = vm_invoke_symbol_block; break;
4497 default: rb_bug("vm_invoke_block: unreachable");
4498 }
4499
4500 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
4501}
4502
4503static VALUE
4504vm_make_proc_with_iseq(const rb_iseq_t *blockiseq)
4505{
4506 const rb_execution_context_t *ec = GET_EC();
4507 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
4508 struct rb_captured_block *captured;
4509
4510 if (cfp == 0) {
4511 rb_bug("vm_make_proc_with_iseq: unreachable");
4512 }
4513
4514 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
4515 captured->code.iseq = blockiseq;
4516
4517 return rb_vm_make_proc(ec, captured, rb_cProc);
4518}
4519
4520static VALUE
4521vm_once_exec(VALUE iseq)
4522{
4523 VALUE proc = vm_make_proc_with_iseq((rb_iseq_t *)iseq);
4524 return rb_proc_call_with_block(proc, 0, 0, Qnil);
4525}
4526
4527static VALUE
4528vm_once_clear(VALUE data)
4529{
4530 union iseq_inline_storage_entry *is = (union iseq_inline_storage_entry *)data;
4531 is->once.running_thread = NULL;
4532 return Qnil;
4533}
4534
4535/* defined insn */
4536
4537static bool
4538check_respond_to_missing(VALUE obj, VALUE v)
4539{
4540 VALUE args[2];
4541 VALUE r;
4542
4543 args[0] = obj; args[1] = Qfalse;
4544 r = rb_check_funcall(v, idRespond_to_missing, 2, args);
4545 if (!UNDEF_P(r) && RTEST(r)) {
4546 return true;
4547 }
4548 else {
4549 return false;
4550 }
4551}
4552
4553static bool
4554vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type, VALUE obj, VALUE v)
4555{
4556 VALUE klass;
4557 enum defined_type type = (enum defined_type)op_type;
4558
4559 switch (type) {
4560 case DEFINED_IVAR:
4561 return rb_ivar_defined(GET_SELF(), SYM2ID(obj));
4562 break;
4563 case DEFINED_GVAR:
4564 return rb_gvar_defined(SYM2ID(obj));
4565 break;
4566 case DEFINED_CVAR: {
4567 const rb_cref_t *cref = vm_get_cref(GET_EP());
4568 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
4569 return rb_cvar_defined(klass, SYM2ID(obj));
4570 break;
4571 }
4572 case DEFINED_CONST:
4573 case DEFINED_CONST_FROM: {
4574 bool allow_nil = type == DEFINED_CONST;
4575 klass = v;
4576 return vm_get_ev_const(ec, klass, SYM2ID(obj), allow_nil, true);
4577 break;
4578 }
4579 case DEFINED_FUNC:
4580 klass = CLASS_OF(v);
4581 return rb_ec_obj_respond_to(ec, v, SYM2ID(obj), TRUE);
4582 break;
4583 case DEFINED_METHOD:{
4584 VALUE klass = CLASS_OF(v);
4585 const rb_method_entry_t *me = rb_method_entry_with_refinements(klass, SYM2ID(obj), NULL);
4586
4587 if (me) {
4588 switch (METHOD_ENTRY_VISI(me)) {
4589 case METHOD_VISI_PRIVATE:
4590 break;
4591 case METHOD_VISI_PROTECTED:
4592 if (!rb_obj_is_kind_of(GET_SELF(), rb_class_real(me->defined_class))) {
4593 break;
4594 }
4595 case METHOD_VISI_PUBLIC:
4596 return true;
4597 break;
4598 default:
4599 rb_bug("vm_defined: unreachable: %u", (unsigned int)METHOD_ENTRY_VISI(me));
4600 }
4601 }
4602 else {
4603 return check_respond_to_missing(obj, v);
4604 }
4605 break;
4606 }
4607 case DEFINED_YIELD:
4608 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
4609 return true;
4610 }
4611 break;
4612 case DEFINED_ZSUPER:
4613 {
4614 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(GET_CFP());
4615
4616 if (me) {
4617 VALUE klass = vm_search_normal_superclass(me->defined_class);
4618 ID id = me->def->original_id;
4619
4620 return rb_method_boundp(klass, id, 0);
4621 }
4622 }
4623 break;
4624 case DEFINED_REF:{
4625 return vm_getspecial(ec, GET_LEP(), Qfalse, FIX2INT(obj)) != Qnil;
4626 break;
4627 }
4628 default:
4629 rb_bug("unimplemented defined? type (VM)");
4630 break;
4631 }
4632
4633 return false;
4634}
4635
4636bool
4637rb_vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type, VALUE obj, VALUE v)
4638{
4639 return vm_defined(ec, reg_cfp, op_type, obj, v);
4640}
4641
4642static const VALUE *
4643vm_get_ep(const VALUE *const reg_ep, rb_num_t lv)
4644{
4645 rb_num_t i;
4646 const VALUE *ep = reg_ep;
4647 for (i = 0; i < lv; i++) {
4648 ep = GET_PREV_EP(ep);
4649 }
4650 return ep;
4651}
4652
4653static VALUE
4654vm_get_special_object(const VALUE *const reg_ep,
4655 enum vm_special_object_type type)
4656{
4657 switch (type) {
4658 case VM_SPECIAL_OBJECT_VMCORE:
4659 return rb_mRubyVMFrozenCore;
4660 case VM_SPECIAL_OBJECT_CBASE:
4661 return vm_get_cbase(reg_ep);
4662 case VM_SPECIAL_OBJECT_CONST_BASE:
4663 return vm_get_const_base(reg_ep);
4664 default:
4665 rb_bug("putspecialobject insn: unknown value_type %d", type);
4666 }
4667}
4668
4669static VALUE
4670vm_concat_array(VALUE ary1, VALUE ary2st)
4671{
4672 const VALUE ary2 = ary2st;
4673 VALUE tmp1 = rb_check_to_array(ary1);
4674 VALUE tmp2 = rb_check_to_array(ary2);
4675
4676 if (NIL_P(tmp1)) {
4677 tmp1 = rb_ary_new3(1, ary1);
4678 }
4679
4680 if (NIL_P(tmp2)) {
4681 tmp2 = rb_ary_new3(1, ary2);
4682 }
4683
4684 if (tmp1 == ary1) {
4685 tmp1 = rb_ary_dup(ary1);
4686 }
4687 return rb_ary_concat(tmp1, tmp2);
4688}
4689
4690// YJIT implementation is using the C function
4691// and needs to call a non-static function
4692VALUE
4693rb_vm_concat_array(VALUE ary1, VALUE ary2st)
4694{
4695 return vm_concat_array(ary1, ary2st);
4696}
4697
4698static VALUE
4699vm_splat_array(VALUE flag, VALUE ary)
4700{
4701 VALUE tmp = rb_check_to_array(ary);
4702 if (NIL_P(tmp)) {
4703 return rb_ary_new3(1, ary);
4704 }
4705 else if (RTEST(flag)) {
4706 return rb_ary_dup(tmp);
4707 }
4708 else {
4709 return tmp;
4710 }
4711}
4712
4713// YJIT implementation is using the C function
4714// and needs to call a non-static function
4715VALUE
4716rb_vm_splat_array(VALUE flag, VALUE ary)
4717{
4718 return vm_splat_array(flag, ary);
4719}
4720
4721static VALUE
4722vm_check_match(rb_execution_context_t *ec, VALUE target, VALUE pattern, rb_num_t flag)
4723{
4724 enum vm_check_match_type type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
4725
4726 if (flag & VM_CHECKMATCH_ARRAY) {
4727 long i;
4728 const long n = RARRAY_LEN(pattern);
4729
4730 for (i = 0; i < n; i++) {
4731 VALUE v = RARRAY_AREF(pattern, i);
4732 VALUE c = check_match(ec, v, target, type);
4733
4734 if (RTEST(c)) {
4735 return c;
4736 }
4737 }
4738 return Qfalse;
4739 }
4740 else {
4741 return check_match(ec, pattern, target, type);
4742 }
4743}
4744
4745static VALUE
4746vm_check_keyword(lindex_t bits, lindex_t idx, const VALUE *ep)
4747{
4748 const VALUE kw_bits = *(ep - bits);
4749
4750 if (FIXNUM_P(kw_bits)) {
4751 unsigned int b = (unsigned int)FIX2ULONG(kw_bits);
4752 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
4753 return Qfalse;
4754 }
4755 else {
4756 VM_ASSERT(RB_TYPE_P(kw_bits, T_HASH));
4757 if (rb_hash_has_key(kw_bits, INT2FIX(idx))) return Qfalse;
4758 }
4759 return Qtrue;
4760}
4761
4762static void
4763vm_dtrace(rb_event_flag_t flag, rb_execution_context_t *ec)
4764{
4765 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
4766 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
4767 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
4768 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
4769
4770 switch (flag) {
4771 case RUBY_EVENT_CALL:
4772 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
4773 return;
4774 case RUBY_EVENT_C_CALL:
4775 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
4776 return;
4777 case RUBY_EVENT_RETURN:
4778 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
4779 return;
4781 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
4782 return;
4783 }
4784 }
4785}
4786
4787static VALUE
4788vm_const_get_under(ID id, rb_num_t flags, VALUE cbase)
4789{
4790 if (!rb_const_defined_at(cbase, id)) {
4791 return 0;
4792 }
4793 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
4794 return rb_public_const_get_at(cbase, id);
4795 }
4796 else {
4797 return rb_const_get_at(cbase, id);
4798 }
4799}
4800
4801static VALUE
4802vm_check_if_class(ID id, rb_num_t flags, VALUE super, VALUE klass)
4803{
4804 if (!RB_TYPE_P(klass, T_CLASS)) {
4805 return 0;
4806 }
4807 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
4808 VALUE tmp = rb_class_real(RCLASS_SUPER(klass));
4809
4810 if (tmp != super) {
4812 "superclass mismatch for class %"PRIsVALUE"",
4813 rb_id2str(id));
4814 }
4815 else {
4816 return klass;
4817 }
4818 }
4819 else {
4820 return klass;
4821 }
4822}
4823
4824static VALUE
4825vm_check_if_module(ID id, VALUE mod)
4826{
4827 if (!RB_TYPE_P(mod, T_MODULE)) {
4828 return 0;
4829 }
4830 else {
4831 return mod;
4832 }
4833}
4834
4835static VALUE
4836declare_under(ID id, VALUE cbase, VALUE c)
4837{
4838 rb_set_class_path_string(c, cbase, rb_id2str(id));
4839 rb_const_set(cbase, id, c);
4840 return c;
4841}
4842
4843static VALUE
4844vm_declare_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
4845{
4846 /* new class declaration */
4847 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
4848 VALUE c = declare_under(id, cbase, rb_define_class_id(id, s));
4850 rb_class_inherited(s, c);
4851 return c;
4852}
4853
4854static VALUE
4855vm_declare_module(ID id, VALUE cbase)
4856{
4857 /* new module declaration */
4858 return declare_under(id, cbase, rb_module_new());
4859}
4860
4861NORETURN(static void unmatched_redefinition(const char *type, VALUE cbase, ID id, VALUE old));
4862static void
4863unmatched_redefinition(const char *type, VALUE cbase, ID id, VALUE old)
4864{
4865 VALUE name = rb_id2str(id);
4866 VALUE message = rb_sprintf("%"PRIsVALUE" is not a %s",
4867 name, type);
4868 VALUE location = rb_const_source_location_at(cbase, id);
4869 if (!NIL_P(location)) {
4870 rb_str_catf(message, "\n%"PRIsVALUE":%"PRIsVALUE":"
4871 " previous definition of %"PRIsVALUE" was here",
4872 rb_ary_entry(location, 0), rb_ary_entry(location, 1), name);
4873 }
4875}
4876
4877static VALUE
4878vm_define_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
4879{
4880 VALUE klass;
4881
4882 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !RB_TYPE_P(super, T_CLASS)) {
4884 "superclass must be an instance of Class (given an instance of %"PRIsVALUE")",
4885 rb_obj_class(super));
4886 }
4887
4888 vm_check_if_namespace(cbase);
4889
4890 /* find klass */
4891 rb_autoload_load(cbase, id);
4892 if ((klass = vm_const_get_under(id, flags, cbase)) != 0) {
4893 if (!vm_check_if_class(id, flags, super, klass))
4894 unmatched_redefinition("class", cbase, id, klass);
4895 return klass;
4896 }
4897 else {
4898 return vm_declare_class(id, flags, cbase, super);
4899 }
4900}
4901
4902static VALUE
4903vm_define_module(ID id, rb_num_t flags, VALUE cbase)
4904{
4905 VALUE mod;
4906
4907 vm_check_if_namespace(cbase);
4908 if ((mod = vm_const_get_under(id, flags, cbase)) != 0) {
4909 if (!vm_check_if_module(id, mod))
4910 unmatched_redefinition("module", cbase, id, mod);
4911 return mod;
4912 }
4913 else {
4914 return vm_declare_module(id, cbase);
4915 }
4916}
4917
4918static VALUE
4919vm_find_or_create_class_by_id(ID id,
4920 rb_num_t flags,
4921 VALUE cbase,
4922 VALUE super)
4923{
4924 rb_vm_defineclass_type_t type = VM_DEFINECLASS_TYPE(flags);
4925
4926 switch (type) {
4927 case VM_DEFINECLASS_TYPE_CLASS:
4928 /* classdef returns class scope value */
4929 return vm_define_class(id, flags, cbase, super);
4930
4931 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
4932 /* classdef returns class scope value */
4933 return rb_singleton_class(cbase);
4934
4935 case VM_DEFINECLASS_TYPE_MODULE:
4936 /* classdef returns class scope value */
4937 return vm_define_module(id, flags, cbase);
4938
4939 default:
4940 rb_bug("unknown defineclass type: %d", (int)type);
4941 }
4942}
4943
4944static rb_method_visibility_t
4945vm_scope_visibility_get(const rb_execution_context_t *ec)
4946{
4947 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
4948
4949 if (!vm_env_cref_by_cref(cfp->ep)) {
4950 return METHOD_VISI_PUBLIC;
4951 }
4952 else {
4953 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
4954 }
4955}
4956
4957static int
4958vm_scope_module_func_check(const rb_execution_context_t *ec)
4959{
4960 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
4961
4962 if (!vm_env_cref_by_cref(cfp->ep)) {
4963 return FALSE;
4964 }
4965 else {
4966 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
4967 }
4968}
4969
4970static void
4971vm_define_method(const rb_execution_context_t *ec, VALUE obj, ID id, VALUE iseqval, int is_singleton)
4972{
4973 VALUE klass;
4974 rb_method_visibility_t visi;
4975 rb_cref_t *cref = vm_ec_cref(ec);
4976
4977 if (is_singleton) {
4978 klass = rb_singleton_class(obj); /* class and frozen checked in this API */
4979 visi = METHOD_VISI_PUBLIC;
4980 }
4981 else {
4982 klass = CREF_CLASS_FOR_DEFINITION(cref);
4983 visi = vm_scope_visibility_get(ec);
4984 }
4985
4986 if (NIL_P(klass)) {
4987 rb_raise(rb_eTypeError, "no class/module to add method");
4988 }
4989
4990 rb_add_method_iseq(klass, id, (const rb_iseq_t *)iseqval, cref, visi);
4991 // Set max_iv_count on klasses based on number of ivar sets that are in the initialize method
4992 if (id == rb_intern("initialize") && klass != rb_cObject && RB_TYPE_P(klass, T_CLASS) && (rb_get_alloc_func(klass) == rb_class_allocate_instance)) {
4993
4994 RCLASS_EXT(klass)->max_iv_count = rb_estimate_iv_count(klass, (const rb_iseq_t *)iseqval);
4995 }
4996
4997 if (!is_singleton && vm_scope_module_func_check(ec)) {
4998 klass = rb_singleton_class(klass);
4999 rb_add_method_iseq(klass, id, (const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5000 }
5001}
5002
5003static VALUE
5004vm_invokeblock_i(struct rb_execution_context_struct *ec,
5005 struct rb_control_frame_struct *reg_cfp,
5006 struct rb_calling_info *calling)
5007{
5008 const struct rb_callinfo *ci = calling->ci;
5009 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5010
5011 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5012 rb_vm_localjump_error("no block given (yield)", Qnil, 0);
5013 }
5014 else {
5015 return vm_invoke_block(ec, GET_CFP(), calling, ci, false, block_handler);
5016 }
5017}
5018
5019#ifdef MJIT_HEADER
5020static const struct rb_callcache *
5021vm_search_method_wrap(const struct rb_control_frame_struct *reg_cfp, struct rb_call_data *cd, VALUE recv)
5022{
5023 return vm_search_method((VALUE)reg_cfp->iseq, cd, recv);
5024}
5025
5026static const struct rb_callcache *
5027vm_search_invokeblock(const struct rb_control_frame_struct *reg_cfp, struct rb_call_data *cd, VALUE recv)
5028{
5029 static const struct rb_callcache cc = {
5030 .flags = T_IMEMO | (imemo_callcache << FL_USHIFT) | VM_CALLCACHE_UNMARKABLE,
5031 .klass = 0,
5032 .cme_ = 0,
5033 .call_ = vm_invokeblock_i,
5034 .aux_ = {0},
5035 };
5036 return &cc;
5037}
5038
5039# define mexp_search_method vm_search_method_wrap
5040# define mexp_search_super vm_search_super_method
5041# define mexp_search_invokeblock vm_search_invokeblock
5042#else
5043enum method_explorer_type {
5044 mexp_search_method,
5045 mexp_search_invokeblock,
5046 mexp_search_super,
5047};
5048#endif
5049
5050static
5051#ifndef MJIT_HEADER
5052inline
5053#endif
5054VALUE
5055vm_sendish(
5056 struct rb_execution_context_struct *ec,
5057 struct rb_control_frame_struct *reg_cfp,
5058 struct rb_call_data *cd,
5059 VALUE block_handler,
5060#ifdef MJIT_HEADER
5061 const struct rb_callcache *(*method_explorer)(const struct rb_control_frame_struct *cfp, struct rb_call_data *cd, VALUE recv)
5062#else
5063 enum method_explorer_type method_explorer
5064#endif
5065) {
5066 VALUE val = Qundef;
5067 const struct rb_callinfo *ci = cd->ci;
5068 const struct rb_callcache *cc;
5069 int argc = vm_ci_argc(ci);
5070 VALUE recv = TOPN(argc);
5071 struct rb_calling_info calling = {
5072 .block_handler = block_handler,
5073 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5074 .recv = recv,
5075 .argc = argc,
5076 .ci = ci,
5077 };
5078
5079// The enum-based branch and inlining are faster in VM, but function pointers without inlining are faster in JIT.
5080#ifdef MJIT_HEADER
5081 calling.cc = cc = method_explorer(GET_CFP(), cd, recv);
5082 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5083#else
5084 switch (method_explorer) {
5085 case mexp_search_method:
5086 calling.cc = cc = vm_search_method_fastpath((VALUE)reg_cfp->iseq, cd, CLASS_OF(recv));
5087 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5088 break;
5089 case mexp_search_super:
5090 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5091 calling.ci = cd->ci; // TODO: does it safe?
5092 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5093 break;
5094 case mexp_search_invokeblock:
5095 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
5096 break;
5097 }
5098#endif
5099
5100 if (!UNDEF_P(val)) {
5101 return val; /* CFUNC normal return */
5102 }
5103 else {
5104 RESTORE_REGS(); /* CFP pushed in cc->call() */
5105 }
5106
5107#ifdef MJIT_HEADER
5108 /* When calling ISeq which may catch an exception from JIT-ed
5109 code, we should not call jit_exec directly to prevent the
5110 caller frame from being canceled. That's because the caller
5111 frame may have stack values in the local variables and the
5112 cancelling the caller frame will purge them. But directly
5113 calling jit_exec is faster... */
5114 if (ISEQ_BODY(GET_ISEQ())->catch_except_p) {
5115 VM_ENV_FLAGS_SET(GET_EP(), VM_FRAME_FLAG_FINISH);
5116 return vm_exec(ec, true);
5117 }
5118 else if (UNDEF_P(val = jit_exec(ec))) {
5119 VM_ENV_FLAGS_SET(GET_EP(), VM_FRAME_FLAG_FINISH);
5120 return vm_exec(ec, false);
5121 }
5122 else {
5123 return val;
5124 }
5125#else
5126 /* When calling from VM, longjmp in the callee won't purge any
5127 JIT-ed caller frames. So it's safe to directly call jit_exec. */
5128 return jit_exec(ec);
5129#endif
5130}
5131
5132/* object.c */
5133VALUE rb_nil_to_s(VALUE);
5134VALUE rb_true_to_s(VALUE);
5135VALUE rb_false_to_s(VALUE);
5136/* numeric.c */
5137VALUE rb_int_to_s(int argc, VALUE *argv, VALUE x);
5138VALUE rb_fix_to_s(VALUE);
5139/* variable.c */
5140VALUE rb_mod_to_s(VALUE);
5142
5143static VALUE
5144vm_objtostring(const rb_iseq_t *iseq, VALUE recv, CALL_DATA cd)
5145{
5146 int type = TYPE(recv);
5147 if (type == T_STRING) {
5148 return recv;
5149 }
5150
5151 const struct rb_callcache *cc = vm_search_method((VALUE)iseq, cd, recv);
5152
5153 switch (type) {
5154 case T_SYMBOL:
5155 if (check_cfunc(vm_cc_cme(cc), rb_sym_to_s)) {
5156 // rb_sym_to_s() allocates a mutable string, but since we are only
5157 // going to use this string for interpolation, it's fine to use the
5158 // frozen string.
5159 return rb_sym2str(recv);
5160 }
5161 break;
5162 case T_MODULE:
5163 case T_CLASS:
5164 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
5165 // rb_mod_to_s() allocates a mutable string, but since we are only
5166 // going to use this string for interpolation, it's fine to use the
5167 // frozen string.
5168 VALUE val = rb_mod_name(recv);
5169 if (NIL_P(val)) {
5170 val = rb_mod_to_s(recv);
5171 }
5172 return val;
5173 }
5174 break;
5175 case T_NIL:
5176 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
5177 return rb_nil_to_s(recv);
5178 }
5179 break;
5180 case T_TRUE:
5181 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
5182 return rb_true_to_s(recv);
5183 }
5184 break;
5185 case T_FALSE:
5186 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
5187 return rb_false_to_s(recv);
5188 }
5189 break;
5190 case T_FIXNUM:
5191 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
5192 return rb_fix_to_s(recv);
5193 }
5194 break;
5195 }
5196 return Qundef;
5197}
5198
5199static VALUE
5200vm_opt_str_freeze(VALUE str, int bop, ID id)
5201{
5202 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
5203 return str;
5204 }
5205 else {
5206 return Qundef;
5207 }
5208}
5209
5210/* this macro is mandatory to use OPTIMIZED_CMP. What a design! */
5211#define id_cmp idCmp
5212
5213static VALUE
5214vm_opt_newarray_max(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5215{
5216 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
5217 if (num == 0) {
5218 return Qnil;
5219 }
5220 else {
5221 VALUE result = *ptr;
5222 rb_snum_t i = num - 1;
5223 while (i-- > 0) {
5224 const VALUE v = *++ptr;
5225 if (OPTIMIZED_CMP(v, result) > 0) {
5226 result = v;
5227 }
5228 }
5229 return result;
5230 }
5231 }
5232 else {
5233 return rb_vm_call_with_refinements(ec, rb_ary_new4(num, ptr), idMax, 0, NULL, RB_NO_KEYWORDS);
5234 }
5235}
5236
5237VALUE
5238rb_vm_opt_newarray_max(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5239{
5240 return vm_opt_newarray_max(ec, num, ptr);
5241}
5242
5243static VALUE
5244vm_opt_newarray_min(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5245{
5246 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
5247 if (num == 0) {
5248 return Qnil;
5249 }
5250 else {
5251 VALUE result = *ptr;
5252 rb_snum_t i = num - 1;
5253 while (i-- > 0) {
5254 const VALUE v = *++ptr;
5255 if (OPTIMIZED_CMP(v, result) < 0) {
5256 result = v;
5257 }
5258 }
5259 return result;
5260 }
5261 }
5262 else {
5263 return rb_vm_call_with_refinements(ec, rb_ary_new4(num, ptr), idMin, 0, NULL, RB_NO_KEYWORDS);
5264 }
5265}
5266
5267VALUE
5268rb_vm_opt_newarray_min(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5269{
5270 return vm_opt_newarray_min(ec, num, ptr);
5271}
5272
5273#undef id_cmp
5274
5275#define IMEMO_CONST_CACHE_SHAREABLE IMEMO_FL_USER0
5276
5277static void
5278vm_track_constant_cache(ID id, void *ic)
5279{
5280 struct rb_id_table *const_cache = GET_VM()->constant_cache;
5281 VALUE lookup_result;
5282 st_table *ics;
5283
5284 if (rb_id_table_lookup(const_cache, id, &lookup_result)) {
5285 ics = (st_table *)lookup_result;
5286 }
5287 else {
5288 ics = st_init_numtable();
5289 rb_id_table_insert(const_cache, id, (VALUE)ics);
5290 }
5291
5292 st_insert(ics, (st_data_t) ic, (st_data_t) Qtrue);
5293}
5294
5295static void
5296vm_ic_track_const_chain(rb_control_frame_t *cfp, IC ic, const ID *segments)
5297{
5298 RB_VM_LOCK_ENTER();
5299
5300 for (int i = 0; segments[i]; i++) {
5301 ID id = segments[i];
5302 if (id == idNULL) continue;
5303 vm_track_constant_cache(id, ic);
5304 }
5305
5306 RB_VM_LOCK_LEAVE();
5307}
5308
5309// For MJIT inlining
5310static inline bool
5311vm_inlined_ic_hit_p(VALUE flags, VALUE value, const rb_cref_t *ic_cref, const VALUE *reg_ep)
5312{
5313 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
5314 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
5315
5316 return (ic_cref == NULL || // no need to check CREF
5317 ic_cref == vm_get_cref(reg_ep));
5318 }
5319 return false;
5320}
5321
5322static bool
5323vm_ic_hit_p(const struct iseq_inline_constant_cache_entry *ice, const VALUE *reg_ep)
5324{
5325 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
5326 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
5327}
5328
5329// YJIT needs this function to never allocate and never raise
5330bool
5331rb_vm_ic_hit_p(IC ic, const VALUE *reg_ep)
5332{
5333 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
5334}
5335
5336static void
5337vm_ic_update(const rb_iseq_t *iseq, IC ic, VALUE val, const VALUE *reg_ep, const VALUE *pc)
5338{
5339 if (ruby_vm_const_missing_count > 0) {
5340 ruby_vm_const_missing_count = 0;
5341 ic->entry = NULL;
5342 return;
5343 }
5344
5345 struct iseq_inline_constant_cache_entry *ice = (struct iseq_inline_constant_cache_entry *)rb_imemo_new(imemo_constcache, 0, 0, 0, 0);
5346 RB_OBJ_WRITE(ice, &ice->value, val);
5347 ice->ic_cref = vm_get_const_key_cref(reg_ep);
5348 if (rb_ractor_shareable_p(val)) ice->flags |= IMEMO_CONST_CACHE_SHAREABLE;
5349 RB_OBJ_WRITE(iseq, &ic->entry, ice);
5350
5351 RUBY_ASSERT(pc >= ISEQ_BODY(iseq)->iseq_encoded);
5352 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
5353 rb_yjit_constant_ic_update(iseq, ic, pos);
5354 rb_mjit_constant_ic_update(iseq, ic, pos);
5355}
5356
5357static VALUE
5358vm_once_dispatch(rb_execution_context_t *ec, ISEQ iseq, ISE is)
5359{
5360 rb_thread_t *th = rb_ec_thread_ptr(ec);
5361 rb_thread_t *const RUNNING_THREAD_ONCE_DONE = (rb_thread_t *)(0x1);
5362
5363 again:
5364 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
5365 return is->once.value;
5366 }
5367 else if (is->once.running_thread == NULL) {
5368 VALUE val;
5369 is->once.running_thread = th;
5370 val = rb_ensure(vm_once_exec, (VALUE)iseq, vm_once_clear, (VALUE)is);
5371 RB_OBJ_WRITE(ec->cfp->iseq, &is->once.value, val);
5372 /* is->once.running_thread is cleared by vm_once_clear() */
5373 is->once.running_thread = RUNNING_THREAD_ONCE_DONE; /* success */
5374 return val;
5375 }
5376 else if (is->once.running_thread == th) {
5377 /* recursive once */
5378 return vm_once_exec((VALUE)iseq);
5379 }
5380 else {
5381 /* waiting for finish */
5382 RUBY_VM_CHECK_INTS(ec);
5384 goto again;
5385 }
5386}
5387
5388static OFFSET
5389vm_case_dispatch(CDHASH hash, OFFSET else_offset, VALUE key)
5390{
5391 switch (OBJ_BUILTIN_TYPE(key)) {
5392 case -1:
5393 case T_FLOAT:
5394 case T_SYMBOL:
5395 case T_BIGNUM:
5396 case T_STRING:
5397 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
5398 SYMBOL_REDEFINED_OP_FLAG |
5399 INTEGER_REDEFINED_OP_FLAG |
5400 FLOAT_REDEFINED_OP_FLAG |
5401 NIL_REDEFINED_OP_FLAG |
5402 TRUE_REDEFINED_OP_FLAG |
5403 FALSE_REDEFINED_OP_FLAG |
5404 STRING_REDEFINED_OP_FLAG)) {
5405 st_data_t val;
5406 if (RB_FLOAT_TYPE_P(key)) {
5407 double kval = RFLOAT_VALUE(key);
5408 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
5409 key = FIXABLE(kval) ? LONG2FIX((long)kval) : rb_dbl2big(kval);
5410 }
5411 }
5412 if (rb_hash_stlike_lookup(hash, key, &val)) {
5413 return FIX2LONG((VALUE)val);
5414 }
5415 else {
5416 return else_offset;
5417 }
5418 }
5419 }
5420 return 0;
5421}
5422
5423NORETURN(static void
5424 vm_stack_consistency_error(const rb_execution_context_t *ec,
5425 const rb_control_frame_t *,
5426 const VALUE *));
5427static void
5428vm_stack_consistency_error(const rb_execution_context_t *ec,
5429 const rb_control_frame_t *cfp,
5430 const VALUE *bp)
5431{
5432 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
5433 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
5434 static const char stack_consistency_error[] =
5435 "Stack consistency error (sp: %"PRIdPTRDIFF", bp: %"PRIdPTRDIFF")";
5436#if defined RUBY_DEVEL
5437 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
5438 rb_str_cat_cstr(mesg, "\n");
5439 rb_str_append(mesg, rb_iseq_disasm(cfp->iseq));
5441#else
5442 rb_bug(stack_consistency_error, nsp, nbp);
5443#endif
5444}
5445
5446static VALUE
5447vm_opt_plus(VALUE recv, VALUE obj)
5448{
5449 if (FIXNUM_2_P(recv, obj) &&
5450 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
5451 return rb_fix_plus_fix(recv, obj);
5452 }
5453 else if (FLONUM_2_P(recv, obj) &&
5454 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
5455 return DBL2NUM(RFLOAT_VALUE(recv) + RFLOAT_VALUE(obj));
5456 }
5457 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5458 return Qundef;
5459 }
5460 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5461 RBASIC_CLASS(obj) == rb_cFloat &&
5462 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
5463 return DBL2NUM(RFLOAT_VALUE(recv) + RFLOAT_VALUE(obj));
5464 }
5465 else if (RBASIC_CLASS(recv) == rb_cString &&
5466 RBASIC_CLASS(obj) == rb_cString &&
5467 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
5468 return rb_str_opt_plus(recv, obj);
5469 }
5470 else if (RBASIC_CLASS(recv) == rb_cArray &&
5471 RBASIC_CLASS(obj) == rb_cArray &&
5472 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
5473 return rb_ary_plus(recv, obj);
5474 }
5475 else {
5476 return Qundef;
5477 }
5478}
5479
5480static VALUE
5481vm_opt_minus(VALUE recv, VALUE obj)
5482{
5483 if (FIXNUM_2_P(recv, obj) &&
5484 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
5485 return rb_fix_minus_fix(recv, obj);
5486 }
5487 else if (FLONUM_2_P(recv, obj) &&
5488 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
5489 return DBL2NUM(RFLOAT_VALUE(recv) - RFLOAT_VALUE(obj));
5490 }
5491 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5492 return Qundef;
5493 }
5494 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5495 RBASIC_CLASS(obj) == rb_cFloat &&
5496 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
5497 return DBL2NUM(RFLOAT_VALUE(recv) - RFLOAT_VALUE(obj));
5498 }
5499 else {
5500 return Qundef;
5501 }
5502}
5503
5504static VALUE
5505vm_opt_mult(VALUE recv, VALUE obj)
5506{
5507 if (FIXNUM_2_P(recv, obj) &&
5508 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
5509 return rb_fix_mul_fix(recv, obj);
5510 }
5511 else if (FLONUM_2_P(recv, obj) &&
5512 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
5513 return DBL2NUM(RFLOAT_VALUE(recv) * RFLOAT_VALUE(obj));
5514 }
5515 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5516 return Qundef;
5517 }
5518 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5519 RBASIC_CLASS(obj) == rb_cFloat &&
5520 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
5521 return DBL2NUM(RFLOAT_VALUE(recv) * RFLOAT_VALUE(obj));
5522 }
5523 else {
5524 return Qundef;
5525 }
5526}
5527
5528static VALUE
5529vm_opt_div(VALUE recv, VALUE obj)
5530{
5531 if (FIXNUM_2_P(recv, obj) &&
5532 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
5533 return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_div_fix(recv, obj);
5534 }
5535 else if (FLONUM_2_P(recv, obj) &&
5536 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
5537 return rb_flo_div_flo(recv, obj);
5538 }
5539 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5540 return Qundef;
5541 }
5542 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5543 RBASIC_CLASS(obj) == rb_cFloat &&
5544 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
5545 return rb_flo_div_flo(recv, obj);
5546 }
5547 else {
5548 return Qundef;
5549 }
5550}
5551
5552static VALUE
5553vm_opt_mod(VALUE recv, VALUE obj)
5554{
5555 if (FIXNUM_2_P(recv, obj) &&
5556 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
5557 return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_mod_fix(recv, obj);
5558 }
5559 else if (FLONUM_2_P(recv, obj) &&
5560 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
5561 return DBL2NUM(ruby_float_mod(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj)));
5562 }
5563 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5564 return Qundef;
5565 }
5566 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5567 RBASIC_CLASS(obj) == rb_cFloat &&
5568 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
5569 return DBL2NUM(ruby_float_mod(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj)));
5570 }
5571 else {
5572 return Qundef;
5573 }
5574}
5575
5576static VALUE
5577vm_opt_neq(const rb_iseq_t *iseq, CALL_DATA cd, CALL_DATA cd_eq, VALUE recv, VALUE obj)
5578{
5579 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
5580 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
5581
5582 if (!UNDEF_P(val)) {
5583 return RBOOL(!RTEST(val));
5584 }
5585 }
5586
5587 return Qundef;
5588}
5589
5590static VALUE
5591vm_opt_lt(VALUE recv, VALUE obj)
5592{
5593 if (FIXNUM_2_P(recv, obj) &&
5594 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
5595 return RBOOL((SIGNED_VALUE)recv < (SIGNED_VALUE)obj);
5596 }
5597 else if (FLONUM_2_P(recv, obj) &&
5598 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
5599 return RBOOL(RFLOAT_VALUE(recv) < RFLOAT_VALUE(obj));
5600 }
5601 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5602 return Qundef;
5603 }
5604 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5605 RBASIC_CLASS(obj) == rb_cFloat &&
5606 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
5607 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
5608 return RBOOL(RFLOAT_VALUE(recv) < RFLOAT_VALUE(obj));
5609 }
5610 else {
5611 return Qundef;
5612 }
5613}
5614
5615static VALUE
5616vm_opt_le(VALUE recv, VALUE obj)
5617{
5618 if (FIXNUM_2_P(recv, obj) &&
5619 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
5620 return RBOOL((SIGNED_VALUE)recv <= (SIGNED_VALUE)obj);
5621 }
5622 else if (FLONUM_2_P(recv, obj) &&
5623 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
5624 return RBOOL(RFLOAT_VALUE(recv) <= RFLOAT_VALUE(obj));
5625 }
5626 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5627 return Qundef;
5628 }
5629 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5630 RBASIC_CLASS(obj) == rb_cFloat &&
5631 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
5632 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
5633 return RBOOL(RFLOAT_VALUE(recv) <= RFLOAT_VALUE(obj));
5634 }
5635 else {
5636 return Qundef;
5637 }
5638}
5639
5640static VALUE
5641vm_opt_gt(VALUE recv, VALUE obj)
5642{
5643 if (FIXNUM_2_P(recv, obj) &&
5644 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
5645 return RBOOL((SIGNED_VALUE)recv > (SIGNED_VALUE)obj);
5646 }
5647 else if (FLONUM_2_P(recv, obj) &&
5648 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
5649 return RBOOL(RFLOAT_VALUE(recv) > RFLOAT_VALUE(obj));
5650 }
5651 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5652 return Qundef;
5653 }
5654 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5655 RBASIC_CLASS(obj) == rb_cFloat &&
5656 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
5657 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
5658 return RBOOL(RFLOAT_VALUE(recv) > RFLOAT_VALUE(obj));
5659 }
5660 else {
5661 return Qundef;
5662 }
5663}
5664
5665static VALUE
5666vm_opt_ge(VALUE recv, VALUE obj)
5667{
5668 if (FIXNUM_2_P(recv, obj) &&
5669 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
5670 return RBOOL((SIGNED_VALUE)recv >= (SIGNED_VALUE)obj);
5671 }
5672 else if (FLONUM_2_P(recv, obj) &&
5673 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
5674 return RBOOL(RFLOAT_VALUE(recv) >= RFLOAT_VALUE(obj));
5675 }
5676 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5677 return Qundef;
5678 }
5679 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5680 RBASIC_CLASS(obj) == rb_cFloat &&
5681 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
5682 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
5683 return RBOOL(RFLOAT_VALUE(recv) >= RFLOAT_VALUE(obj));
5684 }
5685 else {
5686 return Qundef;
5687 }
5688}
5689
5690
5691static VALUE
5692vm_opt_ltlt(VALUE recv, VALUE obj)
5693{
5694 if (SPECIAL_CONST_P(recv)) {
5695 return Qundef;
5696 }
5697 else if (RBASIC_CLASS(recv) == rb_cString &&
5698 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
5699 if (LIKELY(RB_TYPE_P(obj, T_STRING))) {
5700 return rb_str_buf_append(recv, obj);
5701 }
5702 else {
5703 return rb_str_concat(recv, obj);
5704 }
5705 }
5706 else if (RBASIC_CLASS(recv) == rb_cArray &&
5707 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
5708 return rb_ary_push(recv, obj);
5709 }
5710 else {
5711 return Qundef;
5712 }
5713}
5714
5715static VALUE
5716vm_opt_and(VALUE recv, VALUE obj)
5717{
5718 // If recv and obj are both fixnums, then the bottom tag bit
5719 // will be 1 on both. 1 & 1 == 1, so the result value will also
5720 // be a fixnum. If either side is *not* a fixnum, then the tag bit
5721 // will be 0, and we return Qundef.
5722 VALUE ret = ((SIGNED_VALUE) recv) & ((SIGNED_VALUE) obj);
5723
5724 if (FIXNUM_P(ret) &&
5725 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
5726 return ret;
5727 }
5728 else {
5729 return Qundef;
5730 }
5731}
5732
5733static VALUE
5734vm_opt_or(VALUE recv, VALUE obj)
5735{
5736 if (FIXNUM_2_P(recv, obj) &&
5737 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
5738 return recv | obj;
5739 }
5740 else {
5741 return Qundef;
5742 }
5743}
5744
5745static VALUE
5746vm_opt_aref(VALUE recv, VALUE obj)
5747{
5748 if (SPECIAL_CONST_P(recv)) {
5749 if (FIXNUM_2_P(recv, obj) &&
5750 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
5751 return rb_fix_aref(recv, obj);
5752 }
5753 return Qundef;
5754 }
5755 else if (RBASIC_CLASS(recv) == rb_cArray &&
5756 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
5757 if (FIXNUM_P(obj)) {
5758 return rb_ary_entry_internal(recv, FIX2LONG(obj));
5759 }
5760 else {
5761 return rb_ary_aref1(recv, obj);
5762 }
5763 }
5764 else if (RBASIC_CLASS(recv) == rb_cHash &&
5765 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
5766 return rb_hash_aref(recv, obj);
5767 }
5768 else {
5769 return Qundef;
5770 }
5771}
5772
5773static VALUE
5774vm_opt_aset(VALUE recv, VALUE obj, VALUE set)
5775{
5776 if (SPECIAL_CONST_P(recv)) {
5777 return Qundef;
5778 }
5779 else if (RBASIC_CLASS(recv) == rb_cArray &&
5780 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
5781 FIXNUM_P(obj)) {
5782 rb_ary_store(recv, FIX2LONG(obj), set);
5783 return set;
5784 }
5785 else if (RBASIC_CLASS(recv) == rb_cHash &&
5786 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
5787 rb_hash_aset(recv, obj, set);
5788 return set;
5789 }
5790 else {
5791 return Qundef;
5792 }
5793}
5794
5795static VALUE
5796vm_opt_aref_with(VALUE recv, VALUE key)
5797{
5798 if (!SPECIAL_CONST_P(recv) && RBASIC_CLASS(recv) == rb_cHash &&
5799 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
5800 rb_hash_compare_by_id_p(recv) == Qfalse &&
5801 !FL_TEST(recv, RHASH_PROC_DEFAULT)) {
5802 return rb_hash_aref(recv, key);
5803 }
5804 else {
5805 return Qundef;
5806 }
5807}
5808
5809static VALUE
5810vm_opt_aset_with(VALUE recv, VALUE key, VALUE val)
5811{
5812 if (!SPECIAL_CONST_P(recv) && RBASIC_CLASS(recv) == rb_cHash &&
5813 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
5814 rb_hash_compare_by_id_p(recv) == Qfalse) {
5815 return rb_hash_aset(recv, key, val);
5816 }
5817 else {
5818 return Qundef;
5819 }
5820}
5821
5822static VALUE
5823vm_opt_length(VALUE recv, int bop)
5824{
5825 if (SPECIAL_CONST_P(recv)) {
5826 return Qundef;
5827 }
5828 else if (RBASIC_CLASS(recv) == rb_cString &&
5829 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
5830 if (bop == BOP_EMPTY_P) {
5831 return LONG2NUM(RSTRING_LEN(recv));
5832 }
5833 else {
5834 return rb_str_length(recv);
5835 }
5836 }
5837 else if (RBASIC_CLASS(recv) == rb_cArray &&
5838 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
5839 return LONG2NUM(RARRAY_LEN(recv));
5840 }
5841 else if (RBASIC_CLASS(recv) == rb_cHash &&
5842 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
5843 return INT2FIX(RHASH_SIZE(recv));
5844 }
5845 else {
5846 return Qundef;
5847 }
5848}
5849
5850static VALUE
5851vm_opt_empty_p(VALUE recv)
5852{
5853 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
5854 case Qundef: return Qundef;
5855 case INT2FIX(0): return Qtrue;
5856 default: return Qfalse;
5857 }
5858}
5859
5860VALUE rb_false(VALUE obj);
5861
5862static VALUE
5863vm_opt_nil_p(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv)
5864{
5865 if (NIL_P(recv) &&
5866 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
5867 return Qtrue;
5868 }
5869 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
5870 return Qfalse;
5871 }
5872 else {
5873 return Qundef;
5874 }
5875}
5876
5877static VALUE
5878fix_succ(VALUE x)
5879{
5880 switch (x) {
5881 case ~0UL:
5882 /* 0xFFFF_FFFF == INT2FIX(-1)
5883 * `-1.succ` is of course 0. */
5884 return INT2FIX(0);
5885 case RSHIFT(~0UL, 1):
5886 /* 0x7FFF_FFFF == LONG2FIX(0x3FFF_FFFF)
5887 * 0x3FFF_FFFF + 1 == 0x4000_0000, which is a Bignum. */
5888 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
5889 default:
5890 /* LONG2FIX(FIX2LONG(x)+FIX2LONG(y))
5891 * == ((lx*2+1)/2 + (ly*2+1)/2)*2+1
5892 * == lx*2 + ly*2 + 1
5893 * == (lx*2+1) + (ly*2+1) - 1
5894 * == x + y - 1
5895 *
5896 * Here, if we put y := INT2FIX(1):
5897 *
5898 * == x + INT2FIX(1) - 1
5899 * == x + 2 .
5900 */
5901 return x + 2;
5902 }
5903}
5904
5905static VALUE
5906vm_opt_succ(VALUE recv)
5907{
5908 if (FIXNUM_P(recv) &&
5909 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
5910 return fix_succ(recv);
5911 }
5912 else if (SPECIAL_CONST_P(recv)) {
5913 return Qundef;
5914 }
5915 else if (RBASIC_CLASS(recv) == rb_cString &&
5916 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
5917 return rb_str_succ(recv);
5918 }
5919 else {
5920 return Qundef;
5921 }
5922}
5923
5924static VALUE
5925vm_opt_not(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv)
5926{
5927 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
5928 return RBOOL(!RTEST(recv));
5929 }
5930 else {
5931 return Qundef;
5932 }
5933}
5934
5935static VALUE
5936vm_opt_regexpmatch2(VALUE recv, VALUE obj)
5937{
5938 if (SPECIAL_CONST_P(recv)) {
5939 return Qundef;
5940 }
5941 else if (RBASIC_CLASS(recv) == rb_cString &&
5942 CLASS_OF(obj) == rb_cRegexp &&
5943 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
5944 return rb_reg_match(obj, recv);
5945 }
5946 else if (RBASIC_CLASS(recv) == rb_cRegexp &&
5947 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
5948 return rb_reg_match(recv, obj);
5949 }
5950 else {
5951 return Qundef;
5952 }
5953}
5954
5955rb_event_flag_t rb_iseq_event_flags(const rb_iseq_t *iseq, size_t pos);
5956
5957NOINLINE(static void vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp));
5958
5959static inline void
5960vm_trace_hook(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const VALUE *pc,
5961 rb_event_flag_t pc_events, rb_event_flag_t target_event,
5962 rb_hook_list_t *global_hooks, rb_hook_list_t *const *local_hooks_ptr, VALUE val)
5963{
5964 rb_event_flag_t event = pc_events & target_event;
5965 VALUE self = GET_SELF();
5966
5967 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
5968
5969 if (event & global_hooks->events) {
5970 /* increment PC because source line is calculated with PC-1 */
5971 reg_cfp->pc++;
5972 vm_dtrace(event, ec);
5973 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
5974 reg_cfp->pc--;
5975 }
5976
5977 // Load here since global hook above can add and free local hooks
5978 rb_hook_list_t *local_hooks = *local_hooks_ptr;
5979 if (local_hooks != NULL) {
5980 if (event & local_hooks->events) {
5981 /* increment PC because source line is calculated with PC-1 */
5982 reg_cfp->pc++;
5983 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
5984 reg_cfp->pc--;
5985 }
5986 }
5987}
5988
5989// Return true if given cc has cfunc which is NOT handled by opt_send_without_block.
5990bool
5991rb_vm_opt_cfunc_p(CALL_CACHE cc, int insn)
5992{
5993 switch (insn) {
5994 case BIN(opt_eq):
5995 return check_cfunc(vm_cc_cme(cc), rb_obj_equal);
5996 case BIN(opt_nil_p):
5997 return check_cfunc(vm_cc_cme(cc), rb_false);
5998 case BIN(opt_not):
5999 return check_cfunc(vm_cc_cme(cc), rb_obj_not);
6000 default:
6001 return false;
6002 }
6003}
6004
6005#define VM_TRACE_HOOK(target_event, val) do { \
6006 if ((pc_events & (target_event)) & enabled_flags) { \
6007 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
6008 } \
6009} while (0)
6010
6011static void
6012vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp)
6013{
6014 const VALUE *pc = reg_cfp->pc;
6015 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
6016 rb_event_flag_t global_events = enabled_flags;
6017
6018 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
6019 return;
6020 }
6021 else {
6022 const rb_iseq_t *iseq = reg_cfp->iseq;
6023 VALUE iseq_val = (VALUE)iseq;
6024 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
6025 rb_event_flag_t pc_events = rb_iseq_event_flags(iseq, pos);
6026 rb_hook_list_t *local_hooks = iseq->aux.exec.local_hooks;
6027 rb_hook_list_t *const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
6028 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
6029 rb_hook_list_t *bmethod_local_hooks = NULL;
6030 rb_hook_list_t **bmethod_local_hooks_ptr = NULL;
6031 rb_event_flag_t bmethod_local_events = 0;
6032 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
6033 enabled_flags |= iseq_local_events;
6034
6035 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
6036
6037 if (bmethod_frame) {
6038 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(reg_cfp);
6039 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
6040 bmethod_local_hooks = me->def->body.bmethod.hooks;
6041 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
6042 if (bmethod_local_hooks) {
6043 bmethod_local_events = bmethod_local_hooks->events;
6044 }
6045 }
6046
6047
6048 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
6049#if 0
6050 /* disable trace */
6051 /* TODO: incomplete */
6052 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
6053#else
6054 /* do not disable trace because of performance problem
6055 * (re-enable overhead)
6056 */
6057#endif
6058 return;
6059 }
6060 else if (ec->trace_arg != NULL) {
6061 /* already tracing */
6062 return;
6063 }
6064 else {
6065 rb_hook_list_t *global_hooks = rb_ec_ractor_hooks(ec);
6066 /* Note, not considering iseq local events here since the same
6067 * iseq could be used in multiple bmethods. */
6068 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
6069
6070 if (0) {
6071 ruby_debug_printf("vm_trace>>%4d (%4x) - %s:%d %s\n",
6072 (int)pos,
6073 (int)pc_events,
6074 RSTRING_PTR(rb_iseq_path(iseq)),
6075 (int)rb_iseq_line_no(iseq, pos),
6076 RSTRING_PTR(rb_iseq_label(iseq)));
6077 }
6078 VM_ASSERT(reg_cfp->pc == pc);
6079 VM_ASSERT(pc_events != 0);
6080
6081 /* check traces */
6082 if ((pc_events & RUBY_EVENT_B_CALL) && bmethod_frame && (bmethod_events & RUBY_EVENT_CALL)) {
6083 /* b_call instruction running as a method. Fire call event. */
6084 vm_trace_hook(ec, reg_cfp, pc, RUBY_EVENT_CALL, RUBY_EVENT_CALL, global_hooks, bmethod_local_hooks_ptr, Qundef);
6085 }
6087 VM_TRACE_HOOK(RUBY_EVENT_LINE, Qundef);
6088 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE, Qundef);
6089 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH, Qundef);
6090 VM_TRACE_HOOK(RUBY_EVENT_END | RUBY_EVENT_RETURN | RUBY_EVENT_B_RETURN, TOPN(0));
6091 if ((pc_events & RUBY_EVENT_B_RETURN) && bmethod_frame && (bmethod_events & RUBY_EVENT_RETURN)) {
6092 /* b_return instruction running as a method. Fire return event. */
6093 vm_trace_hook(ec, reg_cfp, pc, RUBY_EVENT_RETURN, RUBY_EVENT_RETURN, global_hooks, bmethod_local_hooks_ptr, TOPN(0));
6094 }
6095
6096 // Pin the iseq since `local_hooks_ptr` points inside the iseq's slot on the GC heap.
6097 // We need the pointer to stay valid in case compaction happens in a trace hook.
6098 //
6099 // Similar treatment is unnecessary for `bmethod_local_hooks_ptr` since
6100 // storage for `rb_method_definition_t` is not on the GC heap.
6101 RB_GC_GUARD(iseq_val);
6102 }
6103 }
6104}
6105#undef VM_TRACE_HOOK
6106
6107#if VM_CHECK_MODE > 0
6108NORETURN( NOINLINE( COLDFUNC
6109void rb_vm_canary_is_found_dead(enum ruby_vminsn_type i, VALUE c)));
6110
6111void
6112Init_vm_stack_canary(void)
6113{
6114 /* This has to be called _after_ our PRNG is properly set up. */
6115 int n = ruby_fill_random_bytes(&vm_stack_canary, sizeof vm_stack_canary, false);
6116 vm_stack_canary |= 0x01; // valid VALUE (Fixnum)
6117
6118 vm_stack_canary_was_born = true;
6119 VM_ASSERT(n == 0);
6120}
6121
6122#ifndef MJIT_HEADER
6123MJIT_FUNC_EXPORTED void
6124rb_vm_canary_is_found_dead(enum ruby_vminsn_type i, VALUE c)
6125{
6126 /* Because a method has already been called, why not call
6127 * another one. */
6128 const char *insn = rb_insns_name(i);
6129 VALUE inspection = rb_inspect(c);
6130 const char *str = StringValueCStr(inspection);
6131
6132 rb_bug("dead canary found at %s: %s", insn, str);
6133}
6134#endif
6135
6136#else
6137void Init_vm_stack_canary(void) { /* nothing to do */ }
6138#endif
6139
6140
6141/* a part of the following code is generated by this ruby script:
6142
614316.times{|i|
6144 typedef_args = (0...i).map{|j| "VALUE v#{j+1}"}.join(", ")
6145 typedef_args.prepend(", ") if i != 0
6146 call_args = (0...i).map{|j| "argv[#{j}]"}.join(", ")
6147 call_args.prepend(", ") if i != 0
6148 puts %Q{
6149static VALUE
6150builtin_invoker#{i}(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6151{
6152 typedef VALUE (*rb_invoke_funcptr#{i}_t)(rb_execution_context_t *ec, VALUE self#{typedef_args});
6153 return (*(rb_invoke_funcptr#{i}_t)funcptr)(ec, self#{call_args});
6154}}
6155}
6156
6157puts
6158puts "static VALUE (* const cfunc_invokers[])(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr) = {"
615916.times{|i|
6160 puts " builtin_invoker#{i},"
6161}
6162puts "};"
6163*/
6164
6165static VALUE
6166builtin_invoker0(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6167{
6168 typedef VALUE (*rb_invoke_funcptr0_t)(rb_execution_context_t *ec, VALUE self);
6169 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
6170}
6171
6172static VALUE
6173builtin_invoker1(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6174{
6175 typedef VALUE (*rb_invoke_funcptr1_t)(rb_execution_context_t *ec, VALUE self, VALUE v1);
6176 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
6177}
6178
6179static VALUE
6180builtin_invoker2(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6181{
6182 typedef VALUE (*rb_invoke_funcptr2_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2);
6183 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
6184}
6185
6186static VALUE
6187builtin_invoker3(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6188{
6189 typedef VALUE (*rb_invoke_funcptr3_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3);
6190 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
6191}
6192
6193static VALUE
6194builtin_invoker4(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6195{
6196 typedef VALUE (*rb_invoke_funcptr4_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4);
6197 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
6198}
6199
6200static VALUE
6201builtin_invoker5(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6202{
6203 typedef VALUE (*rb_invoke_funcptr5_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5);
6204 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
6205}
6206
6207static VALUE
6208builtin_invoker6(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6209{
6210 typedef VALUE (*rb_invoke_funcptr6_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6);
6211 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
6212}
6213
6214static VALUE
6215builtin_invoker7(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6216{
6217 typedef VALUE (*rb_invoke_funcptr7_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7);
6218 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
6219}
6220
6221static VALUE
6222builtin_invoker8(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6223{
6224 typedef VALUE (*rb_invoke_funcptr8_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8);
6225 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
6226}
6227
6228static VALUE
6229builtin_invoker9(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6230{
6231 typedef VALUE (*rb_invoke_funcptr9_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9);
6232 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
6233}
6234
6235static VALUE
6236builtin_invoker10(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6237{
6238 typedef VALUE (*rb_invoke_funcptr10_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10);
6239 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
6240}
6241
6242static VALUE
6243builtin_invoker11(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6244{
6245 typedef VALUE (*rb_invoke_funcptr11_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11);
6246 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
6247}
6248
6249static VALUE
6250builtin_invoker12(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6251{
6252 typedef VALUE (*rb_invoke_funcptr12_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12);
6253 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
6254}
6255
6256static VALUE
6257builtin_invoker13(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6258{
6259 typedef VALUE (*rb_invoke_funcptr13_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13);
6260 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
6261}
6262
6263static VALUE
6264builtin_invoker14(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6265{
6266 typedef VALUE (*rb_invoke_funcptr14_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13, VALUE v14);
6267 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
6268}
6269
6270static VALUE
6271builtin_invoker15(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6272{
6273 typedef VALUE (*rb_invoke_funcptr15_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13, VALUE v14, VALUE v15);
6274 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
6275}
6276
6277typedef VALUE (*builtin_invoker)(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr);
6278
6279static builtin_invoker
6280lookup_builtin_invoker(int argc)
6281{
6282 static const builtin_invoker invokers[] = {
6283 builtin_invoker0,
6284 builtin_invoker1,
6285 builtin_invoker2,
6286 builtin_invoker3,
6287 builtin_invoker4,
6288 builtin_invoker5,
6289 builtin_invoker6,
6290 builtin_invoker7,
6291 builtin_invoker8,
6292 builtin_invoker9,
6293 builtin_invoker10,
6294 builtin_invoker11,
6295 builtin_invoker12,
6296 builtin_invoker13,
6297 builtin_invoker14,
6298 builtin_invoker15,
6299 };
6300
6301 return invokers[argc];
6302}
6303
6304static inline VALUE
6305invoke_bf(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const struct rb_builtin_function* bf, const VALUE *argv)
6306{
6307 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_inline_p; // Verify an assumption of `Primitive.attr! 'inline'`
6308 SETUP_CANARY(canary_p);
6309 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, (rb_insn_func_t)bf->func_ptr);
6310 CHECK_CANARY(canary_p, BIN(invokebuiltin));
6311 return ret;
6312}
6313
6314static VALUE
6315vm_invoke_builtin(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_builtin_function* bf, const VALUE *argv)
6316{
6317 return invoke_bf(ec, cfp, bf, argv);
6318}
6319
6320static VALUE
6321vm_invoke_builtin_delegate(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_builtin_function *bf, unsigned int start_index)
6322{
6323 if (0) { // debug print
6324 fputs("vm_invoke_builtin_delegate: passing -> ", stderr);
6325 for (int i=0; i<bf->argc; i++) {
6326 ruby_debug_printf(":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
6327 }
6328 ruby_debug_printf("\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc, bf->func_ptr);
6329 }
6330
6331 if (bf->argc == 0) {
6332 return invoke_bf(ec, cfp, bf, NULL);
6333 }
6334 else {
6335 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
6336 return invoke_bf(ec, cfp, bf, argv);
6337 }
6338}
6339
6340// for __builtin_inline!()
6341
6342VALUE
6343rb_vm_lvar_exposed(rb_execution_context_t *ec, int index)
6344{
6345 const rb_control_frame_t *cfp = ec->cfp;
6346 return cfp->ep[index];
6347}
#define RUBY_ASSERT(expr)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
Definition assert.h:177
#define RUBY_EVENT_END
Encountered an end of a class clause.
Definition event.h:36
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
Definition event.h:39
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
Definition event.h:52
#define RUBY_EVENT_CLASS
Encountered a new class.
Definition event.h:35
#define RUBY_EVENT_LINE
Encountered a new line.
Definition event.h:34
#define RUBY_EVENT_RETURN
Encountered a return statement.
Definition event.h:38
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
Definition event.h:40
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
Definition event.h:51
uint32_t rb_event_flag_t
Represents event(s).
Definition event.h:103
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
Definition event.h:37
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
Definition class.c:2236
VALUE rb_module_new(void)
Creates a new, anonymous module.
Definition class.c:1014
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
Definition class.c:914
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
Definition class.c:893
#define TYPE(_)
Old name of rb_type.
Definition value_type.h:107
#define FL_SINGLETON
Old name of RUBY_FL_SINGLETON.
Definition fl_type.h:58
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
Definition fl_type.h:67
#define REALLOC_N
Old name of RB_REALLOC_N.
Definition memory.h:397
#define ALLOC
Old name of RB_ALLOC.
Definition memory.h:394
#define RFLOAT_VALUE
Old name of rb_float_value.
Definition double.h:28
#define T_STRING
Old name of RUBY_T_STRING.
Definition value_type.h:78
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition long.h:48
#define T_NIL
Old name of RUBY_T_NIL.
Definition value_type.h:72
#define T_FLOAT
Old name of RUBY_T_FLOAT.
Definition value_type.h:64
#define T_IMEMO
Old name of RUBY_T_IMEMO.
Definition value_type.h:67
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
Definition value_type.h:57
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
Definition value_type.h:79
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
Definition value_type.h:63
#define SYM2ID
Old name of RB_SYM2ID.
Definition symbol.h:45
#define CLASS_OF
Old name of rb_class_of.
Definition globals.h:203
#define rb_ary_new4
Old name of rb_ary_new_from_values.
Definition array.h:653
#define FIXABLE
Old name of RB_FIXABLE.
Definition fixnum.h:25
#define LONG2FIX
Old name of RB_INT2FIX.
Definition long.h:49
#define FIX2INT
Old name of RB_FIX2INT.
Definition int.h:41
#define T_MODULE
Old name of RUBY_T_MODULE.
Definition value_type.h:70
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
Definition assume.h:27
#define FIX2ULONG
Old name of RB_FIX2ULONG.
Definition long.h:47
#define T_TRUE
Old name of RUBY_T_TRUE.
Definition value_type.h:81
#define T_ICLASS
Old name of RUBY_T_ICLASS.
Definition value_type.h:66
#define T_HASH
Old name of RUBY_T_HASH.
Definition value_type.h:65
#define ALLOC_N
Old name of RB_ALLOC_N.
Definition memory.h:393
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
Definition fl_type.h:140
#define rb_ary_new3
Old name of rb_ary_new_from_args.
Definition array.h:652
#define LONG2NUM
Old name of RB_LONG2NUM.
Definition long.h:50
#define rb_exc_new3
Old name of rb_exc_new_str.
Definition error.h:38
#define T_FALSE
Old name of RUBY_T_FALSE.
Definition value_type.h:61
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
Definition long.h:46
#define T_ARRAY
Old name of RUBY_T_ARRAY.
Definition value_type.h:56
#define T_OBJECT
Old name of RUBY_T_OBJECT.
Definition value_type.h:75
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
Definition value_type.h:80
#define DBL2NUM
Old name of rb_float_new.
Definition double.h:29
#define T_CLASS
Old name of RUBY_T_CLASS.
Definition value_type.h:58
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
Definition value_type.h:85
#define FL_TEST
Old name of RB_FL_TEST.
Definition fl_type.h:139
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
Definition fl_type.h:70
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
Definition fl_type.h:138
#define SYMBOL_P
Old name of RB_SYMBOL_P.
Definition value_type.h:88
void rb_notimplement(void)
Definition error.c:3193
void rb_raise(VALUE exc, const char *fmt,...)
Exception entry point.
Definition error.c:3150
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
Definition eval.c:688
void rb_bug(const char *fmt,...)
Interpreter panic switch.
Definition error.c:794
VALUE rb_eTypeError
TypeError exception.
Definition error.c:1091
VALUE rb_eFatal
fatal exception.
Definition error.c:1087
VALUE rb_eNoMethodError
NoMethodError exception.
Definition error.c:1099
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
Definition eval.c:701
VALUE rb_eRuntimeError
RuntimeError exception.
Definition error.c:1089
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports always regardless of runtime -W flag.
Definition error.c:411
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
Definition error.c:3498
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
Definition error.c:1142
VALUE rb_eArgError
ArgumentError exception.
Definition error.c:1092
VALUE rb_cClass
Class class.
Definition object.c:54
VALUE rb_cArray
Array class.
Definition array.c:40
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
Definition object.c:1939
VALUE rb_cRegexp
Regexp class.
Definition re.c:2544
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
Definition object.c:1194
VALUE rb_cHash
Hash class.
Definition hash.c:94
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
Definition object.c:190
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
Definition object.c:600
VALUE rb_cBasicObject
BasicObject class.
Definition object.c:50
VALUE rb_cModule
Module class.
Definition object.c:53
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
Definition object.c:180
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
Definition object.c:787
VALUE rb_cFloat
Float class.
Definition numeric.c:191
VALUE rb_cProc
Proc class.
Definition proc.c:52
VALUE rb_cString
String class.
Definition string.c:79
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
Definition rgengc.h:232
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition rgengc.h:220
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
Definition error.h:35
#define rb_check_frozen
Just another name of rb_check_frozen.
Definition error.h:264
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
Definition error.h:280
#define rb_check_frozen_internal(obj)
Definition error.h:247
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
Definition proc.c:1027
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
Definition re.c:1886
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
Definition re.c:3597
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
Definition re.c:1861
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
Definition re.c:1943
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
Definition re.c:1910
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
Definition re.c:1960
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
Definition string.c:3353
VALUE rb_sym_to_s(VALUE sym)
This is an rb_sym2str() + rb_str_dup() combo.
Definition string.c:11595
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
Definition string.c:4826
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
Definition string.c:3319
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
Definition string.c:3453
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
Definition string.h:1656
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
Definition string.c:2163
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
Definition symbol.c:851
void rb_thread_schedule(void)
Tries to switch to another thread.
Definition thread.c:1472
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
Definition variable.c:2896
VALUE rb_attr_get(VALUE obj, ID name)
Identical to rb_ivar_get()
Definition variable.c:1226
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
Definition variable.c:1606
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
Definition variable.c:3677
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
Definition variable.c:3732
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
Definition variable.c:1218
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
Definition variable.c:3346
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
Definition variable.c:2731
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
Definition variable.c:137
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
Definition variable.c:2902
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
Definition variable.c:231
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
Definition variable.c:1623
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
Definition variable.c:3210
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
Definition variable.c:3754
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
Definition variable.c:188
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
Definition variable.c:3204
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
Definition vm_eval.c:664
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
Definition vm_method.c:1148
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
Definition vm_method.c:1681
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
Definition symbol.c:1085
ID rb_intern(const char *name)
Finds or creates a symbol of the given name.
Definition symbol.c:796
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
Definition symbol.c:943
const char * rb_id2name(ID id)
Retrieves the name mapped to the given id.
Definition symbol.c:960
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
Definition ractor.h:249
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
Definition ractor.h:235
VALUE rb_sprintf(const char *fmt,...)
Ruby's extended sprintf(3).
Definition sprintf.c:1219
VALUE rb_str_catf(VALUE dst, const char *fmt,...)
Identical to rb_sprintf(), except it renders the output to the specified object rather than creating ...
Definition sprintf.c:1242
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
Definition memory.h:366
#define ALLOCA_N(type, n)
Definition memory.h:286
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
Definition memory.h:161
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
Definition memory.h:378
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
Definition rarray.h:68
#define RARRAY_CONST_PTR_TRANSIENT
Just another name of rb_array_const_ptr_transient.
Definition rarray.h:70
#define RARRAY_AREF(a, i)
Definition rarray.h:583
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
Definition rbasic.h:152
#define RBASIC(obj)
Convenient casting macro.
Definition rbasic.h:40
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
Definition rclass.h:44
#define RHASH_SIZE(h)
Queries the size of the hash.
Definition rhash.h:82
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
Definition rhash.h:92
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
Definition robject.h:162
static long RSTRING_LEN(VALUE str)
Queries the length of the string.
Definition rstring.h:484
static char * RSTRING_PTR(VALUE str)
Queries the contents pointer of the string.
Definition rstring.h:498
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
Definition rstring.h:95
#define RB_NO_KEYWORDS
Do not pass keywords.
Definition scan_args.h:69
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
Definition stdarg.h:64
Definition hash.h:43
Definition iseq.h:263
Definition vm_core.h:247
Definition vm_core.h:281
Definition vm_core.h:276
Definition method.h:62
Definition constant.h:33
CREF (Class REFerence)
Definition method.h:44
Definition class.h:32
Definition method.h:54
rb_cref_t * cref
class reference, should be marked
Definition method.h:136
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
Definition method.h:135
Definition st.h:79
IFUNC (Internal FUNCtion)
Definition imemo.h:84
SVAR (Special VARiable)
Definition imemo.h:53
const VALUE cref_or_me
class reference or rb_method_entry_t
Definition imemo.h:55
THROW_DATA.
Definition imemo.h:62
Definition vm_core.h:285
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
Definition value.h:63
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
Definition value_type.h:181
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
Definition value_type.h:263
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
Definition value_type.h:375