1#ifndef RUBY_VM_CALLINFO_H
2#define RUBY_VM_CALLINFO_H
11#include "debug_counter.h"
12#include "internal/class.h"
15enum vm_call_flag_bits {
16 VM_CALL_ARGS_SPLAT_bit,
17 VM_CALL_ARGS_BLOCKARG_bit,
20 VM_CALL_ARGS_SIMPLE_bit,
21 VM_CALL_BLOCKISEQ_bit,
28 VM_CALL_KW_SPLAT_MUT_bit,
32#define VM_CALL_ARGS_SPLAT (0x01 << VM_CALL_ARGS_SPLAT_bit)
33#define VM_CALL_ARGS_BLOCKARG (0x01 << VM_CALL_ARGS_BLOCKARG_bit)
34#define VM_CALL_FCALL (0x01 << VM_CALL_FCALL_bit)
35#define VM_CALL_VCALL (0x01 << VM_CALL_VCALL_bit)
36#define VM_CALL_ARGS_SIMPLE (0x01 << VM_CALL_ARGS_SIMPLE_bit)
37#define VM_CALL_BLOCKISEQ (0x01 << VM_CALL_BLOCKISEQ_bit)
38#define VM_CALL_KWARG (0x01 << VM_CALL_KWARG_bit)
39#define VM_CALL_KW_SPLAT (0x01 << VM_CALL_KW_SPLAT_bit)
40#define VM_CALL_TAILCALL (0x01 << VM_CALL_TAILCALL_bit)
41#define VM_CALL_SUPER (0x01 << VM_CALL_SUPER_bit)
42#define VM_CALL_ZSUPER (0x01 << VM_CALL_ZSUPER_bit)
43#define VM_CALL_OPT_SEND (0x01 << VM_CALL_OPT_SEND_bit)
44#define VM_CALL_KW_SPLAT_MUT (0x01 << VM_CALL_KW_SPLAT_MUT_bit)
52rb_callinfo_kwarg_bytes(
int keyword_len)
54 return rb_size_mul_add_or_raise(
75#define CI_EMBED_TAG_bits 1
76#define CI_EMBED_ARGC_bits 15
77#define CI_EMBED_FLAG_bits 16
78#define CI_EMBED_ID_bits 32
79#elif SIZEOF_VALUE == 4
80#define CI_EMBED_TAG_bits 1
81#define CI_EMBED_ARGC_bits 3
82#define CI_EMBED_FLAG_bits 13
83#define CI_EMBED_ID_bits 15
86#if (CI_EMBED_TAG_bits + CI_EMBED_ARGC_bits + CI_EMBED_FLAG_bits + CI_EMBED_ID_bits) != (SIZEOF_VALUE * 8)
90#define CI_EMBED_FLAG 0x01
91#define CI_EMBED_ARGC_SHFT (CI_EMBED_TAG_bits)
92#define CI_EMBED_ARGC_MASK ((((VALUE)1)<<CI_EMBED_ARGC_bits) - 1)
93#define CI_EMBED_FLAG_SHFT (CI_EMBED_TAG_bits + CI_EMBED_ARGC_bits)
94#define CI_EMBED_FLAG_MASK ((((VALUE)1)<<CI_EMBED_FLAG_bits) - 1)
95#define CI_EMBED_ID_SHFT (CI_EMBED_TAG_bits + CI_EMBED_ARGC_bits + CI_EMBED_FLAG_bits)
96#define CI_EMBED_ID_MASK ((((VALUE)1)<<CI_EMBED_ID_bits) - 1)
102 if (LIKELY(((
VALUE)ci) & 0x01)) {
106 VM_ASSERT(IMEMO_TYPE_P(ci, imemo_callinfo));
117 if (vm_ci_packed_p(ci) || IMEMO_TYPE_P(ci, imemo_callinfo)) {
128 if (vm_ci_packed_p(ci)) {
129 return (((
VALUE)ci) >> CI_EMBED_ID_SHFT) & CI_EMBED_ID_MASK;
136static inline unsigned int
139 if (vm_ci_packed_p(ci)) {
140 return (
unsigned int)((((
VALUE)ci) >> CI_EMBED_FLAG_SHFT) & CI_EMBED_FLAG_MASK);
143 return (
unsigned int)ci->flag;
147static inline unsigned int
150 if (vm_ci_packed_p(ci)) {
151 return (
unsigned int)((((
VALUE)ci) >> CI_EMBED_ARGC_SHFT) & CI_EMBED_ARGC_MASK);
154 return (
unsigned int)ci->argc;
161 if (vm_ci_packed_p(ci)) {
172 if (vm_ci_packed_p(ci)) {
173 ruby_debug_printf(
"packed_ci ID:%s flag:%x argc:%u\n",
174 rb_id2name(vm_ci_mid(ci)), vm_ci_flag(ci), vm_ci_argc(ci));
181#define vm_ci_new(mid, flag, argc, kwarg) vm_ci_new_(mid, flag, argc, kwarg, __FILE__, __LINE__)
182#define vm_ci_new_runtime(mid, flag, argc, kwarg) vm_ci_new_runtime_(mid, flag, argc, kwarg, __FILE__, __LINE__)
185#define VM_CI_EMBEDDABLE_P(mid, flag, argc, kwarg) \
186 (((mid ) & ~CI_EMBED_ID_MASK) ? false : \
187 ((flag) & ~CI_EMBED_FLAG_MASK) ? false : \
188 ((argc) & ~CI_EMBED_ARGC_MASK) ? false : \
189 (kwarg) ? false : true)
191#define vm_ci_new_id(mid, flag, argc, must_zero) \
192 ((const struct rb_callinfo *) \
193 ((((VALUE)(mid )) << CI_EMBED_ID_SHFT) | \
194 (((VALUE)(flag)) << CI_EMBED_FLAG_SHFT) | \
195 (((VALUE)(argc)) << CI_EMBED_ARGC_SHFT) | \
199vm_ci_new_(
ID mid,
unsigned int flag,
unsigned int argc,
const struct rb_callinfo_kwarg *kwarg,
const char *file,
int line)
202 if (VM_CI_EMBEDDABLE_P(mid, flag, argc, kwarg)) {
203 RB_DEBUG_COUNTER_INC(ci_packed);
204 return vm_ci_new_id(mid, flag, argc, kwarg);
208 const bool debug = 0;
209 if (debug) ruby_debug_printf(
"%s:%d ", file, line);
213 rb_imemo_new(imemo_callinfo,
220 RB_DEBUG_COUNTER_INC(ci_kw);
223 RB_DEBUG_COUNTER_INC(ci_nokw);
226 VM_ASSERT(vm_ci_flag(ci) == flag);
227 VM_ASSERT(vm_ci_argc(ci) == argc);
234vm_ci_new_runtime_(
ID mid,
unsigned int flag,
unsigned int argc,
const struct rb_callinfo_kwarg *kwarg,
const char *file,
int line)
236 RB_DEBUG_COUNTER_INC(ci_runtime);
237 return vm_ci_new_(mid, flag, argc, kwarg, file, line);
240#define VM_CALLINFO_NOT_UNDER_GC IMEMO_FL_USER0
248 else if (vm_ci_packed_p(ci)) {
252 VM_ASSERT(IMEMO_TYPE_P(ci, imemo_callinfo));
257#define VM_CI_ON_STACK(mid_, flags_, argc_, kwarg_) \
258 (struct rb_callinfo) { \
260 (imemo_callinfo << FL_USHIFT) | \
261 VM_CALLINFO_NOT_UNDER_GC, \
268typedef VALUE (*vm_call_handler)(
285 const vm_call_handler call_;
291 const enum method_missing_reason method_missing_reason;
296#define VM_CALLCACHE_UNMARKABLE FL_FREEZE
297#define VM_CALLCACHE_ON_STACK FL_EXIVAR
300extern const struct rb_callcache *rb_vm_empty_cc_for_super(
void);
302#define vm_cc_empty() rb_vm_empty_cc()
304static inline void vm_cc_attr_index_set(
const struct rb_callcache *cc, attr_index_t index, shape_id_t dest_shape_id);
307vm_cc_attr_index_initialize(
const struct rb_callcache *cc, shape_id_t shape_id)
309 vm_cc_attr_index_set(cc, (attr_index_t)-1, shape_id);
313vm_cc_new(
VALUE klass,
315 vm_call_handler call)
318 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
319 RB_DEBUG_COUNTER_INC(cc_new);
323#define VM_CC_ON_STACK(clazz, call, aux, cme) \
324 (struct rb_callcache) { \
326 (imemo_callcache << FL_USHIFT) | \
327 VM_CALLCACHE_UNMARKABLE | \
328 VM_CALLCACHE_ON_STACK, \
338 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
339 VM_ASSERT(cc->klass == 0 ||
341 return cc->klass == klass;
347 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
354 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
355 VM_ASSERT(cc->call_ == NULL ||
356 !vm_cc_markable(cc) ||
362static inline vm_call_handler
365 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
366 VM_ASSERT(cc->call_ != NULL);
370static inline attr_index_t
373 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
374 return (attr_index_t)((cc->aux_.attr.value & SHAPE_FLAG_MASK) - 1);
377static inline shape_id_t
378vm_cc_attr_index_dest_shape_id(
const struct rb_callcache *cc)
380 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
382 return cc->aux_.attr.value >> SHAPE_FLAG_SHIFT;
386vm_cc_atomic_shape_and_index(
const struct rb_callcache *cc, shape_id_t * shape_id, attr_index_t * index)
388 uintptr_t cache_value = cc->aux_.attr.value;
389 *shape_id = (shape_id_t)(cache_value >> SHAPE_FLAG_SHIFT);
390 *index = (attr_index_t)(cache_value & SHAPE_FLAG_MASK) - 1;
397 uintptr_t cache_value = ic->value;
398 *shape_id = (shape_id_t)(cache_value >> SHAPE_FLAG_SHIFT);
399 *index = (attr_index_t)(cache_value & SHAPE_FLAG_MASK) - 1;
403static inline shape_id_t
406 return (shape_id_t)(ic->value >> SHAPE_FLAG_SHIFT);
409static inline unsigned int
410vm_cc_cmethod_missing_reason(
const struct rb_callcache *cc)
412 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
413 return cc->aux_.method_missing_reason;
419 if (cc->klass && !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc))) {
431 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
432 if (cc->klass == klass && !METHOD_ENTRY_INVALIDATED(cc_cme)) {
443vm_cc_call_set(
const struct rb_callcache *cc, vm_call_handler call)
445 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
446 VM_ASSERT(cc != vm_cc_empty());
447 *(vm_call_handler *)&cc->call_ = call;
451vm_cc_attr_index_set(
const struct rb_callcache *cc, attr_index_t index, shape_id_t dest_shape_id)
453 uintptr_t *attr_value = (uintptr_t *)&cc->aux_.attr.value;
454 if (!vm_cc_markable(cc)) {
455 *attr_value = (uintptr_t)INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT;
458 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
459 VM_ASSERT(cc != vm_cc_empty());
460 *attr_value = (attr_index_t)(index + 1) | ((uintptr_t)(dest_shape_id) << SHAPE_FLAG_SHIFT);
466 *(uintptr_t *)&ic->value = ((uintptr_t)dest_shape_id << SHAPE_FLAG_SHIFT) | (attr_index_t)(index + 1);
472 *(uintptr_t *)&ic->value = (uintptr_t)shape_id << SHAPE_FLAG_SHIFT;
476vm_cc_method_missing_reason_set(
const struct rb_callcache *cc,
enum method_missing_reason reason)
478 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
479 VM_ASSERT(cc != vm_cc_empty());
480 *(
enum method_missing_reason *)&cc->aux_.method_missing_reason = reason;
486 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
487 VM_ASSERT(cc != vm_cc_empty());
488 VM_ASSERT(cc->klass != 0);
490 *(
VALUE *)&cc->klass = 0;
491 RB_DEBUG_COUNTER_INC(cc_ent_invalidate);
517void rb_vm_dump_overloaded_cme_table(
void);
522 return ccs->debug_sig == ~(
VALUE)ccs;
528 if (vm_cc_cme(cc) == cme ||
529 (cme->def->iseq_overload && vm_cc_cme(cc) == rb_vm_lookup_overloaded_cme(cme))) {
536 fprintf(stderr,
"iseq_overload:%d\n", (
int)cme->def->iseq_overload);
539 rb_vm_lookup_overloaded_cme(cme);
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define FL_ANY_RAW
Old name of RB_FL_ANY_RAW.
#define T_CLASS
Old name of RUBY_T_CLASS.
VALUE rb_eRuntimeError
RuntimeError exception.
const char * rb_id2name(ID id)
Retrieves the name mapped to the given id.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
uintptr_t VALUE
Type that represents a Ruby object.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.