1 /**********************************************************************
2
3 vm_insnhelper.c - instruction helper functions.
4
5 $Author: usa $
6
7 Copyright (C) 2007 Koichi Sasada
8
9 **********************************************************************/
10
11 /* finish iseq array */
12 #include "insns.inc"
13 #ifndef MJIT_HEADER
14 #include "insns_info.inc"
15 #endif
16 #include <math.h>
17 #include "constant.h"
18 #include "internal.h"
19 #include "ruby/config.h"
20 #include "debug_counter.h"
21
22 /* control stack frame */
23
24 static rb_control_frame_t *vm_get_ruby_level_caller_cfp(const rb_execution_context_t *ec, const rb_control_frame_t *cfp);
25
26 MJIT_STATIC VALUE
ruby_vm_special_exception_copy(VALUE exc)27 ruby_vm_special_exception_copy(VALUE exc)
28 {
29 VALUE e = rb_obj_alloc(rb_class_real(RBASIC_CLASS(exc)));
30 rb_obj_copy_ivar(e, exc);
31 return e;
32 }
33
34 NORETURN(static void ec_stack_overflow(rb_execution_context_t *ec, int));
35 static void
ec_stack_overflow(rb_execution_context_t * ec,int setup)36 ec_stack_overflow(rb_execution_context_t *ec, int setup)
37 {
38 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
39 ec->raised_flag = RAISED_STACKOVERFLOW;
40 if (setup) {
41 VALUE at = rb_ec_backtrace_object(ec);
42 mesg = ruby_vm_special_exception_copy(mesg);
43 rb_ivar_set(mesg, idBt, at);
44 rb_ivar_set(mesg, idBt_locations, at);
45 }
46 ec->errinfo = mesg;
47 EC_JUMP_TAG(ec, TAG_RAISE);
48 }
49
50 NORETURN(static void vm_stackoverflow(void));
51
52 static void
vm_stackoverflow(void)53 vm_stackoverflow(void)
54 {
55 ec_stack_overflow(GET_EC(), TRUE);
56 }
57
58 NORETURN(MJIT_STATIC void rb_ec_stack_overflow(rb_execution_context_t *ec, int crit));
59 MJIT_STATIC void
rb_ec_stack_overflow(rb_execution_context_t * ec,int crit)60 rb_ec_stack_overflow(rb_execution_context_t *ec, int crit)
61 {
62 if (crit || rb_during_gc()) {
63 ec->raised_flag = RAISED_STACKOVERFLOW;
64 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
65 EC_JUMP_TAG(ec, TAG_RAISE);
66 }
67 #ifdef USE_SIGALTSTACK
68 ec_stack_overflow(ec, TRUE);
69 #else
70 ec_stack_overflow(ec, FALSE);
71 #endif
72 }
73
74
75 #if VM_CHECK_MODE > 0
76 static int
callable_class_p(VALUE klass)77 callable_class_p(VALUE klass)
78 {
79 #if VM_CHECK_MODE >= 2
80 if (!klass) return FALSE;
81 switch (RB_BUILTIN_TYPE(klass)) {
82 case T_ICLASS:
83 if (!RB_TYPE_P(RCLASS_SUPER(klass), T_MODULE)) break;
84 case T_MODULE:
85 return TRUE;
86 }
87 while (klass) {
88 if (klass == rb_cBasicObject) {
89 return TRUE;
90 }
91 klass = RCLASS_SUPER(klass);
92 }
93 return FALSE;
94 #else
95 return klass != 0;
96 #endif
97 }
98
99 static int
callable_method_entry_p(const rb_callable_method_entry_t * me)100 callable_method_entry_p(const rb_callable_method_entry_t *me)
101 {
102 if (me == NULL || callable_class_p(me->defined_class)) {
103 return TRUE;
104 }
105 else {
106 return FALSE;
107 }
108 }
109
110 static void
vm_check_frame_detail(VALUE type,int req_block,int req_me,int req_cref,VALUE specval,VALUE cref_or_me,int is_cframe,const rb_iseq_t * iseq)111 vm_check_frame_detail(VALUE type, int req_block, int req_me, int req_cref, VALUE specval, VALUE cref_or_me, int is_cframe, const rb_iseq_t *iseq)
112 {
113 unsigned int magic = (unsigned int)(type & VM_FRAME_MAGIC_MASK);
114 enum imemo_type cref_or_me_type = imemo_env; /* impossible value */
115
116 if (RB_TYPE_P(cref_or_me, T_IMEMO)) {
117 cref_or_me_type = imemo_type(cref_or_me);
118 }
119 if (type & VM_FRAME_FLAG_BMETHOD) {
120 req_me = TRUE;
121 }
122
123 if (req_block && (type & VM_ENV_FLAG_LOCAL) == 0) {
124 rb_bug("vm_push_frame: specval (%p) should be a block_ptr on %x frame", (void *)specval, magic);
125 }
126 if (!req_block && (type & VM_ENV_FLAG_LOCAL) != 0) {
127 rb_bug("vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (void *)specval, magic);
128 }
129
130 if (req_me) {
131 if (cref_or_me_type != imemo_ment) {
132 rb_bug("vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
133 }
134 }
135 else {
136 if (req_cref && cref_or_me_type != imemo_cref) {
137 rb_bug("vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
138 }
139 else { /* cref or Qfalse */
140 if (cref_or_me != Qfalse && cref_or_me_type != imemo_cref) {
141 if (((type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
142 /* ignore */
143 }
144 else {
145 rb_bug("vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
146 }
147 }
148 }
149 }
150
151 if (cref_or_me_type == imemo_ment) {
152 const rb_callable_method_entry_t *me = (const rb_callable_method_entry_t *)cref_or_me;
153
154 if (!callable_method_entry_p(me)) {
155 rb_bug("vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
156 }
157 }
158
159 if ((type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
160 VM_ASSERT(iseq == NULL ||
161 RUBY_VM_NORMAL_ISEQ_P(iseq) /* argument error. it shold be fixed */);
162 }
163 else {
164 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
165 }
166 }
167
168 static void
vm_check_frame(VALUE type,VALUE specval,VALUE cref_or_me,const rb_iseq_t * iseq)169 vm_check_frame(VALUE type,
170 VALUE specval,
171 VALUE cref_or_me,
172 const rb_iseq_t *iseq)
173 {
174 VALUE given_magic = type & VM_FRAME_MAGIC_MASK;
175 VM_ASSERT(FIXNUM_P(type));
176
177 #define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
178 case magic: \
179 vm_check_frame_detail(type, req_block, req_me, req_cref, \
180 specval, cref_or_me, is_cframe, iseq); \
181 break
182 switch (given_magic) {
183 /* BLK ME CREF CFRAME */
184 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
185 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
186 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
187 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
188 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
189 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
190 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
191 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
192 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
193 default:
194 rb_bug("vm_push_frame: unknown type (%x)", (unsigned int)given_magic);
195 }
196 #undef CHECK
197 }
198 #else
199 #define vm_check_frame(a, b, c, d)
200 #endif /* VM_CHECK_MODE > 0 */
201
202 static inline rb_control_frame_t *
vm_push_frame(rb_execution_context_t * ec,const rb_iseq_t * iseq,VALUE type,VALUE self,VALUE specval,VALUE cref_or_me,const VALUE * pc,VALUE * sp,int local_size,int stack_max)203 vm_push_frame(rb_execution_context_t *ec,
204 const rb_iseq_t *iseq,
205 VALUE type,
206 VALUE self,
207 VALUE specval,
208 VALUE cref_or_me,
209 const VALUE *pc,
210 VALUE *sp,
211 int local_size,
212 int stack_max)
213 {
214 rb_control_frame_t *const cfp = ec->cfp - 1;
215 int i;
216
217 vm_check_frame(type, specval, cref_or_me, iseq);
218 VM_ASSERT(local_size >= 0);
219
220 /* check stack overflow */
221 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
222
223 ec->cfp = cfp;
224
225 /* setup new frame */
226 cfp->pc = (VALUE *)pc;
227 cfp->iseq = (rb_iseq_t *)iseq;
228 cfp->self = self;
229 cfp->block_code = NULL;
230
231 /* setup vm value stack */
232
233 /* initialize local variables */
234 for (i=0; i < local_size; i++) {
235 *sp++ = Qnil;
236 }
237
238 /* setup ep with managing data */
239 VM_ASSERT(VM_ENV_DATA_INDEX_ME_CREF == -2);
240 VM_ASSERT(VM_ENV_DATA_INDEX_SPECVAL == -1);
241 VM_ASSERT(VM_ENV_DATA_INDEX_FLAGS == -0);
242 *sp++ = cref_or_me; /* ep[-2] / Qnil or T_IMEMO(cref) or T_IMEMO(ment) */
243 *sp++ = specval /* ep[-1] / block handler or prev env ptr */;
244 *sp = type; /* ep[-0] / ENV_FLAGS */
245
246 /* Store initial value of ep as bp to skip calculation cost of bp on JIT cancellation. */
247 cfp->ep = cfp->bp = sp;
248 cfp->sp = sp + 1;
249
250 #if VM_DEBUG_BP_CHECK
251 cfp->bp_check = sp + 1;
252 #endif
253
254 if (VMDEBUG == 2) {
255 SDR();
256 }
257
258 #if USE_DEBUG_COUNTER
259 RB_DEBUG_COUNTER_INC(frame_push);
260 switch (type & VM_FRAME_MAGIC_MASK) {
261 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method); break;
262 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block); break;
263 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class); break;
264 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top); break;
265 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc); break;
266 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc); break;
267 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval); break;
268 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue); break;
269 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy); break;
270 default: rb_bug("unreachable");
271 }
272 {
273 rb_control_frame_t *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
274 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
275 int cur_ruby_frame = VM_FRAME_RUBYFRAME_P(cfp);
276 int pre_ruby_frame = VM_FRAME_RUBYFRAME_P(prev_cfp);
277
278 pre_ruby_frame ? (cur_ruby_frame ? RB_DEBUG_COUNTER_INC(frame_R2R) :
279 RB_DEBUG_COUNTER_INC(frame_R2C)):
280 (cur_ruby_frame ? RB_DEBUG_COUNTER_INC(frame_C2R) :
281 RB_DEBUG_COUNTER_INC(frame_C2C));
282 }
283 }
284 #endif
285
286 return cfp;
287 }
288
289 rb_control_frame_t *
rb_vm_push_frame(rb_execution_context_t * ec,const rb_iseq_t * iseq,VALUE type,VALUE self,VALUE specval,VALUE cref_or_me,const VALUE * pc,VALUE * sp,int local_size,int stack_max)290 rb_vm_push_frame(rb_execution_context_t *ec,
291 const rb_iseq_t *iseq,
292 VALUE type,
293 VALUE self,
294 VALUE specval,
295 VALUE cref_or_me,
296 const VALUE *pc,
297 VALUE *sp,
298 int local_size,
299 int stack_max)
300 {
301 return vm_push_frame(ec, iseq, type, self, specval, cref_or_me, pc, sp, local_size, stack_max);
302 }
303
304 /* return TRUE if the frame is finished */
305 static inline int
vm_pop_frame(rb_execution_context_t * ec,rb_control_frame_t * cfp,const VALUE * ep)306 vm_pop_frame(rb_execution_context_t *ec, rb_control_frame_t *cfp, const VALUE *ep)
307 {
308 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
309
310 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
311 if (VMDEBUG == 2) SDR();
312
313 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
314
315 return flags & VM_FRAME_FLAG_FINISH;
316 }
317
318 MJIT_STATIC void
rb_vm_pop_frame(rb_execution_context_t * ec)319 rb_vm_pop_frame(rb_execution_context_t *ec)
320 {
321 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
322 }
323
324 /* method dispatch */
325 static inline VALUE
rb_arity_error_new(int argc,int min,int max)326 rb_arity_error_new(int argc, int min, int max)
327 {
328 VALUE err_mess = 0;
329 if (min == max) {
330 err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d)", argc, min);
331 }
332 else if (max == UNLIMITED_ARGUMENTS) {
333 err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d+)", argc, min);
334 }
335 else {
336 err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d..%d)", argc, min, max);
337 }
338 return rb_exc_new3(rb_eArgError, err_mess);
339 }
340
341 MJIT_STATIC void
rb_error_arity(int argc,int min,int max)342 rb_error_arity(int argc, int min, int max)
343 {
344 rb_exc_raise(rb_arity_error_new(argc, min, max));
345 }
346
347 /* lvar */
348
349 NOINLINE(static void vm_env_write_slowpath(const VALUE *ep, int index, VALUE v));
350
351 static void
vm_env_write_slowpath(const VALUE * ep,int index,VALUE v)352 vm_env_write_slowpath(const VALUE *ep, int index, VALUE v)
353 {
354 /* remember env value forcely */
355 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
356 VM_FORCE_WRITE(&ep[index], v);
357 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
358 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
359 }
360
361 static inline void
vm_env_write(const VALUE * ep,int index,VALUE v)362 vm_env_write(const VALUE *ep, int index, VALUE v)
363 {
364 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
365 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
366 VM_STACK_ENV_WRITE(ep, index, v);
367 }
368 else {
369 vm_env_write_slowpath(ep, index, v);
370 }
371 }
372
373 MJIT_STATIC VALUE
rb_vm_bh_to_procval(const rb_execution_context_t * ec,VALUE block_handler)374 rb_vm_bh_to_procval(const rb_execution_context_t *ec, VALUE block_handler)
375 {
376 if (block_handler == VM_BLOCK_HANDLER_NONE) {
377 return Qnil;
378 }
379 else {
380 switch (vm_block_handler_type(block_handler)) {
381 case block_handler_type_iseq:
382 case block_handler_type_ifunc:
383 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler), rb_cProc);
384 case block_handler_type_symbol:
385 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
386 case block_handler_type_proc:
387 return VM_BH_TO_PROC(block_handler);
388 default:
389 VM_UNREACHABLE(rb_vm_bh_to_procval);
390 }
391 }
392 }
393
394 /* svar */
395
396 #if VM_CHECK_MODE > 0
397 static int
vm_svar_valid_p(VALUE svar)398 vm_svar_valid_p(VALUE svar)
399 {
400 if (RB_TYPE_P((VALUE)svar, T_IMEMO)) {
401 switch (imemo_type(svar)) {
402 case imemo_svar:
403 case imemo_cref:
404 case imemo_ment:
405 return TRUE;
406 default:
407 break;
408 }
409 }
410 rb_bug("vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
411 return FALSE;
412 }
413 #endif
414
415 static inline struct vm_svar *
lep_svar(const rb_execution_context_t * ec,const VALUE * lep)416 lep_svar(const rb_execution_context_t *ec, const VALUE *lep)
417 {
418 VALUE svar;
419
420 if (lep && (ec == NULL || ec->root_lep != lep)) {
421 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
422 }
423 else {
424 svar = ec->root_svar;
425 }
426
427 VM_ASSERT(svar == Qfalse || vm_svar_valid_p(svar));
428
429 return (struct vm_svar *)svar;
430 }
431
432 static inline void
lep_svar_write(const rb_execution_context_t * ec,const VALUE * lep,const struct vm_svar * svar)433 lep_svar_write(const rb_execution_context_t *ec, const VALUE *lep, const struct vm_svar *svar)
434 {
435 VM_ASSERT(vm_svar_valid_p((VALUE)svar));
436
437 if (lep && (ec == NULL || ec->root_lep != lep)) {
438 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (VALUE)svar);
439 }
440 else {
441 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
442 }
443 }
444
445 static VALUE
lep_svar_get(const rb_execution_context_t * ec,const VALUE * lep,rb_num_t key)446 lep_svar_get(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key)
447 {
448 const struct vm_svar *svar = lep_svar(ec, lep);
449
450 if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) return Qnil;
451
452 switch (key) {
453 case VM_SVAR_LASTLINE:
454 return svar->lastline;
455 case VM_SVAR_BACKREF:
456 return svar->backref;
457 default: {
458 const VALUE ary = svar->others;
459
460 if (NIL_P(ary)) {
461 return Qnil;
462 }
463 else {
464 return rb_ary_entry(ary, key - VM_SVAR_EXTRA_START);
465 }
466 }
467 }
468 }
469
470 static struct vm_svar *
svar_new(VALUE obj)471 svar_new(VALUE obj)
472 {
473 return (struct vm_svar *)rb_imemo_new(imemo_svar, Qnil, Qnil, Qnil, obj);
474 }
475
476 static void
lep_svar_set(const rb_execution_context_t * ec,const VALUE * lep,rb_num_t key,VALUE val)477 lep_svar_set(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, VALUE val)
478 {
479 struct vm_svar *svar = lep_svar(ec, lep);
480
481 if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) {
482 lep_svar_write(ec, lep, svar = svar_new((VALUE)svar));
483 }
484
485 switch (key) {
486 case VM_SVAR_LASTLINE:
487 RB_OBJ_WRITE(svar, &svar->lastline, val);
488 return;
489 case VM_SVAR_BACKREF:
490 RB_OBJ_WRITE(svar, &svar->backref, val);
491 return;
492 default: {
493 VALUE ary = svar->others;
494
495 if (NIL_P(ary)) {
496 RB_OBJ_WRITE(svar, &svar->others, ary = rb_ary_new());
497 }
498 rb_ary_store(ary, key - VM_SVAR_EXTRA_START, val);
499 }
500 }
501 }
502
503 static inline VALUE
vm_getspecial(const rb_execution_context_t * ec,const VALUE * lep,rb_num_t key,rb_num_t type)504 vm_getspecial(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, rb_num_t type)
505 {
506 VALUE val;
507
508 if (type == 0) {
509 val = lep_svar_get(ec, lep, key);
510 }
511 else {
512 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
513
514 if (type & 0x01) {
515 switch (type >> 1) {
516 case '&':
517 val = rb_reg_last_match(backref);
518 break;
519 case '`':
520 val = rb_reg_match_pre(backref);
521 break;
522 case '\'':
523 val = rb_reg_match_post(backref);
524 break;
525 case '+':
526 val = rb_reg_match_last(backref);
527 break;
528 default:
529 rb_bug("unexpected back-ref");
530 }
531 }
532 else {
533 val = rb_reg_nth_match((int)(type >> 1), backref);
534 }
535 }
536 return val;
537 }
538
539 PUREFUNC(static rb_callable_method_entry_t *check_method_entry(VALUE obj, int can_be_svar));
540 static rb_callable_method_entry_t *
check_method_entry(VALUE obj,int can_be_svar)541 check_method_entry(VALUE obj, int can_be_svar)
542 {
543 if (obj == Qfalse) return NULL;
544
545 #if VM_CHECK_MODE > 0
546 if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_method_entry: unknown type: %s", rb_obj_info(obj));
547 #endif
548
549 switch (imemo_type(obj)) {
550 case imemo_ment:
551 return (rb_callable_method_entry_t *)obj;
552 case imemo_cref:
553 return NULL;
554 case imemo_svar:
555 if (can_be_svar) {
556 return check_method_entry(((struct vm_svar *)obj)->cref_or_me, FALSE);
557 }
558 default:
559 #if VM_CHECK_MODE > 0
560 rb_bug("check_method_entry: svar should not be there:");
561 #endif
562 return NULL;
563 }
564 }
565
566 MJIT_STATIC const rb_callable_method_entry_t *
rb_vm_frame_method_entry(const rb_control_frame_t * cfp)567 rb_vm_frame_method_entry(const rb_control_frame_t *cfp)
568 {
569 const VALUE *ep = cfp->ep;
570 rb_callable_method_entry_t *me;
571
572 while (!VM_ENV_LOCAL_P(ep)) {
573 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return me;
574 ep = VM_ENV_PREV_EP(ep);
575 }
576
577 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
578 }
579
580 static rb_cref_t *
method_entry_cref(rb_callable_method_entry_t * me)581 method_entry_cref(rb_callable_method_entry_t *me)
582 {
583 switch (me->def->type) {
584 case VM_METHOD_TYPE_ISEQ:
585 return me->def->body.iseq.cref;
586 default:
587 return NULL;
588 }
589 }
590
591 #if VM_CHECK_MODE == 0
592 PUREFUNC(static rb_cref_t *check_cref(VALUE, int));
593 #endif
594 static rb_cref_t *
check_cref(VALUE obj,int can_be_svar)595 check_cref(VALUE obj, int can_be_svar)
596 {
597 if (obj == Qfalse) return NULL;
598
599 #if VM_CHECK_MODE > 0
600 if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_cref: unknown type: %s", rb_obj_info(obj));
601 #endif
602
603 switch (imemo_type(obj)) {
604 case imemo_ment:
605 return method_entry_cref((rb_callable_method_entry_t *)obj);
606 case imemo_cref:
607 return (rb_cref_t *)obj;
608 case imemo_svar:
609 if (can_be_svar) {
610 return check_cref(((struct vm_svar *)obj)->cref_or_me, FALSE);
611 }
612 default:
613 #if VM_CHECK_MODE > 0
614 rb_bug("check_method_entry: svar should not be there:");
615 #endif
616 return NULL;
617 }
618 }
619
620 static inline rb_cref_t *
vm_env_cref(const VALUE * ep)621 vm_env_cref(const VALUE *ep)
622 {
623 rb_cref_t *cref;
624
625 while (!VM_ENV_LOCAL_P(ep)) {
626 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return cref;
627 ep = VM_ENV_PREV_EP(ep);
628 }
629
630 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
631 }
632
633 static int
is_cref(const VALUE v,int can_be_svar)634 is_cref(const VALUE v, int can_be_svar)
635 {
636 if (RB_TYPE_P(v, T_IMEMO)) {
637 switch (imemo_type(v)) {
638 case imemo_cref:
639 return TRUE;
640 case imemo_svar:
641 if (can_be_svar) return is_cref(((struct vm_svar *)v)->cref_or_me, FALSE);
642 default:
643 break;
644 }
645 }
646 return FALSE;
647 }
648
649 static int
vm_env_cref_by_cref(const VALUE * ep)650 vm_env_cref_by_cref(const VALUE *ep)
651 {
652 while (!VM_ENV_LOCAL_P(ep)) {
653 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) return TRUE;
654 ep = VM_ENV_PREV_EP(ep);
655 }
656 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
657 }
658
659 static rb_cref_t *
cref_replace_with_duplicated_cref_each_frame(const VALUE * vptr,int can_be_svar,VALUE parent)660 cref_replace_with_duplicated_cref_each_frame(const VALUE *vptr, int can_be_svar, VALUE parent)
661 {
662 const VALUE v = *vptr;
663 rb_cref_t *cref, *new_cref;
664
665 if (RB_TYPE_P(v, T_IMEMO)) {
666 switch (imemo_type(v)) {
667 case imemo_cref:
668 cref = (rb_cref_t *)v;
669 new_cref = vm_cref_dup(cref);
670 if (parent) {
671 RB_OBJ_WRITE(parent, vptr, new_cref);
672 }
673 else {
674 VM_FORCE_WRITE(vptr, (VALUE)new_cref);
675 }
676 return (rb_cref_t *)new_cref;
677 case imemo_svar:
678 if (can_be_svar) {
679 return cref_replace_with_duplicated_cref_each_frame((const VALUE *)&((struct vm_svar *)v)->cref_or_me, FALSE, v);
680 }
681 case imemo_ment:
682 rb_bug("cref_replace_with_duplicated_cref_each_frame: unreachable");
683 default:
684 break;
685 }
686 }
687 return FALSE;
688 }
689
690 static rb_cref_t *
vm_cref_replace_with_duplicated_cref(const VALUE * ep)691 vm_cref_replace_with_duplicated_cref(const VALUE *ep)
692 {
693 if (vm_env_cref_by_cref(ep)) {
694 rb_cref_t *cref;
695 VALUE envval;
696
697 while (!VM_ENV_LOCAL_P(ep)) {
698 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
699 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
700 return cref;
701 }
702 ep = VM_ENV_PREV_EP(ep);
703 }
704 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
705 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
706 }
707 else {
708 rb_bug("vm_cref_dup: unreachable");
709 }
710 }
711
712
713 static rb_cref_t *
rb_vm_get_cref(const VALUE * ep)714 rb_vm_get_cref(const VALUE *ep)
715 {
716 rb_cref_t *cref = vm_env_cref(ep);
717
718 if (cref != NULL) {
719 return cref;
720 }
721 else {
722 rb_bug("rb_vm_get_cref: unreachable");
723 }
724 }
725
726 static const rb_cref_t *
vm_get_const_key_cref(const VALUE * ep)727 vm_get_const_key_cref(const VALUE *ep)
728 {
729 const rb_cref_t *cref = rb_vm_get_cref(ep);
730 const rb_cref_t *key_cref = cref;
731
732 while (cref) {
733 if (FL_TEST(CREF_CLASS(cref), FL_SINGLETON)) {
734 return key_cref;
735 }
736 cref = CREF_NEXT(cref);
737 }
738
739 /* does not include singleton class */
740 return NULL;
741 }
742
743 void
rb_vm_rewrite_cref(rb_cref_t * cref,VALUE old_klass,VALUE new_klass,rb_cref_t ** new_cref_ptr)744 rb_vm_rewrite_cref(rb_cref_t *cref, VALUE old_klass, VALUE new_klass, rb_cref_t **new_cref_ptr)
745 {
746 rb_cref_t *new_cref;
747
748 while (cref) {
749 if (CREF_CLASS(cref) == old_klass) {
750 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
751 *new_cref_ptr = new_cref;
752 return;
753 }
754 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
755 cref = CREF_NEXT(cref);
756 *new_cref_ptr = new_cref;
757 new_cref_ptr = (rb_cref_t **)&new_cref->next;
758 }
759 *new_cref_ptr = NULL;
760 }
761
762 static rb_cref_t *
vm_cref_push(const rb_execution_context_t * ec,VALUE klass,const VALUE * ep,int pushed_by_eval)763 vm_cref_push(const rb_execution_context_t *ec, VALUE klass, const VALUE *ep, int pushed_by_eval)
764 {
765 rb_cref_t *prev_cref = NULL;
766
767 if (ep) {
768 prev_cref = vm_env_cref(ep);
769 }
770 else {
771 rb_control_frame_t *cfp = vm_get_ruby_level_caller_cfp(ec, ec->cfp);
772
773 if (cfp) {
774 prev_cref = vm_env_cref(cfp->ep);
775 }
776 }
777
778 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval);
779 }
780
781 static inline VALUE
vm_get_cbase(const VALUE * ep)782 vm_get_cbase(const VALUE *ep)
783 {
784 const rb_cref_t *cref = rb_vm_get_cref(ep);
785 VALUE klass = Qundef;
786
787 while (cref) {
788 if ((klass = CREF_CLASS(cref)) != 0) {
789 break;
790 }
791 cref = CREF_NEXT(cref);
792 }
793
794 return klass;
795 }
796
797 static inline VALUE
vm_get_const_base(const VALUE * ep)798 vm_get_const_base(const VALUE *ep)
799 {
800 const rb_cref_t *cref = rb_vm_get_cref(ep);
801 VALUE klass = Qundef;
802
803 while (cref) {
804 if (!CREF_PUSHED_BY_EVAL(cref) &&
805 (klass = CREF_CLASS(cref)) != 0) {
806 break;
807 }
808 cref = CREF_NEXT(cref);
809 }
810
811 return klass;
812 }
813
814 static inline void
vm_check_if_namespace(VALUE klass)815 vm_check_if_namespace(VALUE klass)
816 {
817 if (!RB_TYPE_P(klass, T_CLASS) && !RB_TYPE_P(klass, T_MODULE)) {
818 rb_raise(rb_eTypeError, "%+"PRIsVALUE" is not a class/module", klass);
819 }
820 }
821
822 static inline void
vm_ensure_not_refinement_module(VALUE self)823 vm_ensure_not_refinement_module(VALUE self)
824 {
825 if (RB_TYPE_P(self, T_MODULE) && FL_TEST(self, RMODULE_IS_REFINEMENT)) {
826 rb_warn("not defined at the refinement, but at the outer class/module");
827 }
828 }
829
830 static inline VALUE
vm_get_iclass(rb_control_frame_t * cfp,VALUE klass)831 vm_get_iclass(rb_control_frame_t *cfp, VALUE klass)
832 {
833 return klass;
834 }
835
836 static inline VALUE
vm_get_ev_const(rb_execution_context_t * ec,VALUE orig_klass,ID id,int is_defined)837 vm_get_ev_const(rb_execution_context_t *ec, VALUE orig_klass, ID id, int is_defined)
838 {
839 void rb_const_warn_if_deprecated(const rb_const_entry_t *ce, VALUE klass, ID id);
840 VALUE val;
841
842 if (orig_klass == Qnil) {
843 /* in current lexical scope */
844 const rb_cref_t *root_cref = rb_vm_get_cref(ec->cfp->ep);
845 const rb_cref_t *cref;
846 VALUE klass = Qnil;
847
848 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
849 root_cref = CREF_NEXT(root_cref);
850 }
851 cref = root_cref;
852 while (cref && CREF_NEXT(cref)) {
853 if (CREF_PUSHED_BY_EVAL(cref)) {
854 klass = Qnil;
855 }
856 else {
857 klass = CREF_CLASS(cref);
858 }
859 cref = CREF_NEXT(cref);
860
861 if (!NIL_P(klass)) {
862 VALUE av, am = 0;
863 rb_const_entry_t *ce;
864 search_continue:
865 if ((ce = rb_const_lookup(klass, id))) {
866 rb_const_warn_if_deprecated(ce, klass, id);
867 val = ce->value;
868 if (val == Qundef) {
869 if (am == klass) break;
870 am = klass;
871 if (is_defined) return 1;
872 if (rb_autoloading_value(klass, id, &av, NULL)) return av;
873 rb_autoload_load(klass, id);
874 goto search_continue;
875 }
876 else {
877 if (is_defined) {
878 return 1;
879 }
880 else {
881 return val;
882 }
883 }
884 }
885 }
886 }
887
888 /* search self */
889 if (root_cref && !NIL_P(CREF_CLASS(root_cref))) {
890 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
891 }
892 else {
893 klass = CLASS_OF(ec->cfp->self);
894 }
895
896 if (is_defined) {
897 return rb_const_defined(klass, id);
898 }
899 else {
900 return rb_const_get(klass, id);
901 }
902 }
903 else {
904 vm_check_if_namespace(orig_klass);
905 if (is_defined) {
906 return rb_public_const_defined_from(orig_klass, id);
907 }
908 else {
909 return rb_public_const_get_from(orig_klass, id);
910 }
911 }
912 }
913
914 static inline VALUE
vm_get_cvar_base(const rb_cref_t * cref,rb_control_frame_t * cfp)915 vm_get_cvar_base(const rb_cref_t *cref, rb_control_frame_t *cfp)
916 {
917 VALUE klass;
918
919 if (!cref) {
920 rb_bug("vm_get_cvar_base: no cref");
921 }
922
923 while (CREF_NEXT(cref) &&
924 (NIL_P(CREF_CLASS(cref)) || FL_TEST(CREF_CLASS(cref), FL_SINGLETON) ||
925 CREF_PUSHED_BY_EVAL(cref))) {
926 cref = CREF_NEXT(cref);
927 }
928 if (!CREF_NEXT(cref)) {
929 rb_warn("class variable access from toplevel");
930 }
931
932 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
933
934 if (NIL_P(klass)) {
935 rb_raise(rb_eTypeError, "no class variables available");
936 }
937 return klass;
938 }
939
940 static VALUE
vm_search_const_defined_class(const VALUE cbase,ID id)941 vm_search_const_defined_class(const VALUE cbase, ID id)
942 {
943 if (rb_const_defined_at(cbase, id)) return cbase;
944 if (cbase == rb_cObject) {
945 VALUE tmp = RCLASS_SUPER(cbase);
946 while (tmp) {
947 if (rb_const_defined_at(tmp, id)) return tmp;
948 tmp = RCLASS_SUPER(tmp);
949 }
950 }
951 return 0;
952 }
953
954 ALWAYS_INLINE(static VALUE vm_getivar(VALUE, ID, IC, struct rb_call_cache *, int));
955 static inline VALUE
vm_getivar(VALUE obj,ID id,IC ic,struct rb_call_cache * cc,int is_attr)956 vm_getivar(VALUE obj, ID id, IC ic, struct rb_call_cache *cc, int is_attr)
957 {
958 #if OPT_IC_FOR_IVAR
959 if (LIKELY(RB_TYPE_P(obj, T_OBJECT))) {
960 VALUE val = Qundef;
961 if (LIKELY(is_attr ?
962 RB_DEBUG_COUNTER_INC_UNLESS(ivar_get_ic_miss_unset, cc->aux.index > 0) :
963 RB_DEBUG_COUNTER_INC_UNLESS(ivar_get_ic_miss_serial,
964 ic->ic_serial == RCLASS_SERIAL(RBASIC(obj)->klass)))) {
965 st_index_t index = !is_attr ? ic->ic_value.index : (cc->aux.index - 1);
966 if (LIKELY(index < ROBJECT_NUMIV(obj))) {
967 val = ROBJECT_IVPTR(obj)[index];
968 }
969 }
970 else {
971 st_data_t index;
972 struct st_table *iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj);
973
974 if (iv_index_tbl) {
975 if (st_lookup(iv_index_tbl, id, &index)) {
976 if (index < ROBJECT_NUMIV(obj)) {
977 val = ROBJECT_IVPTR(obj)[index];
978 }
979 if (!is_attr) {
980 ic->ic_value.index = index;
981 ic->ic_serial = RCLASS_SERIAL(RBASIC(obj)->klass);
982 }
983 else { /* call_info */
984 cc->aux.index = (int)index + 1;
985 }
986 }
987 }
988 }
989 if (UNLIKELY(val == Qundef)) {
990 if (!is_attr && RTEST(ruby_verbose))
991 rb_warning("instance variable %"PRIsVALUE" not initialized", QUOTE_ID(id));
992 val = Qnil;
993 }
994 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
995 return val;
996 }
997 else {
998 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_noobject);
999 }
1000 #endif /* OPT_IC_FOR_IVAR */
1001 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1002
1003 if (is_attr)
1004 return rb_attr_get(obj, id);
1005 return rb_ivar_get(obj, id);
1006 }
1007
1008 static inline VALUE
vm_setivar(VALUE obj,ID id,VALUE val,IC ic,struct rb_call_cache * cc,int is_attr)1009 vm_setivar(VALUE obj, ID id, VALUE val, IC ic, struct rb_call_cache *cc, int is_attr)
1010 {
1011 #if OPT_IC_FOR_IVAR
1012 rb_check_frozen_internal(obj);
1013
1014 if (LIKELY(RB_TYPE_P(obj, T_OBJECT))) {
1015 VALUE klass = RBASIC(obj)->klass;
1016 st_data_t index;
1017
1018 if (LIKELY(
1019 (!is_attr && RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_serial, ic->ic_serial == RCLASS_SERIAL(klass))) ||
1020 ( is_attr && RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_unset, cc->aux.index > 0)))) {
1021 VALUE *ptr = ROBJECT_IVPTR(obj);
1022 index = !is_attr ? ic->ic_value.index : cc->aux.index-1;
1023
1024 if (RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_oorange, index < ROBJECT_NUMIV(obj))) {
1025 RB_OBJ_WRITE(obj, &ptr[index], val);
1026 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1027 return val; /* inline cache hit */
1028 }
1029 }
1030 else {
1031 struct st_table *iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj);
1032
1033 if (iv_index_tbl && st_lookup(iv_index_tbl, (st_data_t)id, &index)) {
1034 if (!is_attr) {
1035 ic->ic_value.index = index;
1036 ic->ic_serial = RCLASS_SERIAL(klass);
1037 }
1038 else if (index >= INT_MAX) {
1039 rb_raise(rb_eArgError, "too many instance variables");
1040 }
1041 else {
1042 cc->aux.index = (int)(index + 1);
1043 }
1044 }
1045 /* fall through */
1046 }
1047 }
1048 else {
1049 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1050 }
1051 #endif /* OPT_IC_FOR_IVAR */
1052 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1053 return rb_ivar_set(obj, id, val);
1054 }
1055
1056 static inline VALUE
vm_getinstancevariable(VALUE obj,ID id,IC ic)1057 vm_getinstancevariable(VALUE obj, ID id, IC ic)
1058 {
1059 return vm_getivar(obj, id, ic, NULL, FALSE);
1060 }
1061
1062 static inline void
vm_setinstancevariable(VALUE obj,ID id,VALUE val,IC ic)1063 vm_setinstancevariable(VALUE obj, ID id, VALUE val, IC ic)
1064 {
1065 vm_setivar(obj, id, val, ic, 0, 0);
1066 }
1067
1068 static VALUE
vm_throw_continue(const rb_execution_context_t * ec,VALUE err)1069 vm_throw_continue(const rb_execution_context_t *ec, VALUE err)
1070 {
1071 /* continue throw */
1072
1073 if (FIXNUM_P(err)) {
1074 ec->tag->state = FIX2INT(err);
1075 }
1076 else if (SYMBOL_P(err)) {
1077 ec->tag->state = TAG_THROW;
1078 }
1079 else if (THROW_DATA_P(err)) {
1080 ec->tag->state = THROW_DATA_STATE((struct vm_throw_data *)err);
1081 }
1082 else {
1083 ec->tag->state = TAG_RAISE;
1084 }
1085 return err;
1086 }
1087
1088 static VALUE
vm_throw_start(const rb_execution_context_t * ec,rb_control_frame_t * const reg_cfp,enum ruby_tag_type state,const int flag,const VALUE throwobj)1089 vm_throw_start(const rb_execution_context_t *ec, rb_control_frame_t *const reg_cfp, enum ruby_tag_type state,
1090 const int flag, const VALUE throwobj)
1091 {
1092 const rb_control_frame_t *escape_cfp = NULL;
1093 const rb_control_frame_t * const eocfp = RUBY_VM_END_CONTROL_FRAME(ec); /* end of control frame pointer */
1094
1095 if (flag != 0) {
1096 /* do nothing */
1097 }
1098 else if (state == TAG_BREAK) {
1099 int is_orphan = 1;
1100 const VALUE *ep = GET_EP();
1101 const rb_iseq_t *base_iseq = GET_ISEQ();
1102 escape_cfp = reg_cfp;
1103
1104 while (base_iseq->body->type != ISEQ_TYPE_BLOCK) {
1105 if (escape_cfp->iseq->body->type == ISEQ_TYPE_CLASS) {
1106 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1107 ep = escape_cfp->ep;
1108 base_iseq = escape_cfp->iseq;
1109 }
1110 else {
1111 ep = VM_ENV_PREV_EP(ep);
1112 base_iseq = base_iseq->body->parent_iseq;
1113 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1114 VM_ASSERT(escape_cfp->iseq == base_iseq);
1115 }
1116 }
1117
1118 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1119 /* lambda{... break ...} */
1120 is_orphan = 0;
1121 state = TAG_RETURN;
1122 }
1123 else {
1124 ep = VM_ENV_PREV_EP(ep);
1125
1126 while (escape_cfp < eocfp) {
1127 if (escape_cfp->ep == ep) {
1128 const rb_iseq_t *const iseq = escape_cfp->iseq;
1129 const VALUE epc = escape_cfp->pc - iseq->body->iseq_encoded;
1130 const struct iseq_catch_table *const ct = iseq->body->catch_table;
1131 unsigned int i;
1132
1133 if (!ct) break;
1134 for (i=0; i < ct->size; i++) {
1135 const struct iseq_catch_table_entry * const entry = &ct->entries[i];
1136
1137 if (entry->type == CATCH_TYPE_BREAK &&
1138 entry->iseq == base_iseq &&
1139 entry->start < epc && entry->end >= epc) {
1140 if (entry->cont == epc) { /* found! */
1141 is_orphan = 0;
1142 }
1143 break;
1144 }
1145 }
1146 break;
1147 }
1148
1149 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1150 }
1151 }
1152
1153 if (is_orphan) {
1154 rb_vm_localjump_error("break from proc-closure", throwobj, TAG_BREAK);
1155 }
1156 }
1157 else if (state == TAG_RETRY) {
1158 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1159
1160 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1161 }
1162 else if (state == TAG_RETURN) {
1163 const VALUE *current_ep = GET_EP();
1164 const VALUE *target_lep = VM_EP_LEP(current_ep);
1165 int in_class_frame = 0;
1166 int toplevel = 1;
1167 escape_cfp = reg_cfp;
1168
1169 while (escape_cfp < eocfp) {
1170 const VALUE *lep = VM_CF_LEP(escape_cfp);
1171
1172 if (!target_lep) {
1173 target_lep = lep;
1174 }
1175
1176 if (lep == target_lep &&
1177 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1178 escape_cfp->iseq->body->type == ISEQ_TYPE_CLASS) {
1179 in_class_frame = 1;
1180 target_lep = 0;
1181 }
1182
1183 if (lep == target_lep) {
1184 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1185 toplevel = 0;
1186 if (in_class_frame) {
1187 /* lambda {class A; ... return ...; end} */
1188 goto valid_return;
1189 }
1190 else {
1191 const VALUE *tep = current_ep;
1192
1193 while (target_lep != tep) {
1194 if (escape_cfp->ep == tep) {
1195 /* in lambda */
1196 goto valid_return;
1197 }
1198 tep = VM_ENV_PREV_EP(tep);
1199 }
1200 }
1201 }
1202 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1203 switch (escape_cfp->iseq->body->type) {
1204 case ISEQ_TYPE_TOP:
1205 case ISEQ_TYPE_MAIN:
1206 if (toplevel) goto valid_return;
1207 break;
1208 case ISEQ_TYPE_EVAL:
1209 case ISEQ_TYPE_CLASS:
1210 toplevel = 0;
1211 break;
1212 default:
1213 break;
1214 }
1215 }
1216 }
1217
1218 if (escape_cfp->ep == target_lep && escape_cfp->iseq->body->type == ISEQ_TYPE_METHOD) {
1219 goto valid_return;
1220 }
1221
1222 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1223 }
1224 rb_vm_localjump_error("unexpected return", throwobj, TAG_RETURN);
1225
1226 valid_return:;
1227 /* do nothing */
1228 }
1229 else {
1230 rb_bug("isns(throw): unsupport throw type");
1231 }
1232
1233 ec->tag->state = state;
1234 return (VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1235 }
1236
1237 static VALUE
vm_throw(const rb_execution_context_t * ec,rb_control_frame_t * reg_cfp,rb_num_t throw_state,VALUE throwobj)1238 vm_throw(const rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
1239 rb_num_t throw_state, VALUE throwobj)
1240 {
1241 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1242 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1243
1244 if (state != 0) {
1245 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1246 }
1247 else {
1248 return vm_throw_continue(ec, throwobj);
1249 }
1250 }
1251
1252 static inline void
vm_expandarray(VALUE * sp,VALUE ary,rb_num_t num,int flag)1253 vm_expandarray(VALUE *sp, VALUE ary, rb_num_t num, int flag)
1254 {
1255 int is_splat = flag & 0x01;
1256 rb_num_t space_size = num + is_splat;
1257 VALUE *base = sp - 1;
1258 const VALUE *ptr;
1259 rb_num_t len;
1260 const VALUE obj = ary;
1261
1262 if (!RB_TYPE_P(ary, T_ARRAY) && NIL_P(ary = rb_check_array_type(ary))) {
1263 ary = obj;
1264 ptr = &ary;
1265 len = 1;
1266 }
1267 else {
1268 ptr = RARRAY_CONST_PTR_TRANSIENT(ary);
1269 len = (rb_num_t)RARRAY_LEN(ary);
1270 }
1271
1272 if (space_size == 0) {
1273 /* no space left on stack */
1274 }
1275 else if (flag & 0x02) {
1276 /* post: ..., nil ,ary[-1], ..., ary[0..-num] # top */
1277 rb_num_t i = 0, j;
1278
1279 if (len < num) {
1280 for (i=0; i<num-len; i++) {
1281 *base++ = Qnil;
1282 }
1283 }
1284 for (j=0; i<num; i++, j++) {
1285 VALUE v = ptr[len - j - 1];
1286 *base++ = v;
1287 }
1288 if (is_splat) {
1289 *base = rb_ary_new4(len - j, ptr);
1290 }
1291 }
1292 else {
1293 /* normal: ary[num..-1], ary[num-2], ary[num-3], ..., ary[0] # top */
1294 rb_num_t i;
1295 VALUE *bptr = &base[space_size - 1];
1296
1297 for (i=0; i<num; i++) {
1298 if (len <= i) {
1299 for (; i<num; i++) {
1300 *bptr-- = Qnil;
1301 }
1302 break;
1303 }
1304 *bptr-- = ptr[i];
1305 }
1306 if (is_splat) {
1307 if (num > len) {
1308 *bptr = rb_ary_new();
1309 }
1310 else {
1311 *bptr = rb_ary_new4(len - num, ptr + num);
1312 }
1313 }
1314 }
1315 RB_GC_GUARD(ary);
1316 }
1317
1318 static VALUE vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc);
1319
1320 MJIT_FUNC_EXPORTED void
rb_vm_search_method_slowpath(const struct rb_call_info * ci,struct rb_call_cache * cc,VALUE klass)1321 rb_vm_search_method_slowpath(const struct rb_call_info *ci, struct rb_call_cache *cc, VALUE klass)
1322 {
1323 cc->me = rb_callable_method_entry(klass, ci->mid);
1324 VM_ASSERT(callable_method_entry_p(cc->me));
1325 cc->call = vm_call_general;
1326 #if OPT_INLINE_METHOD_CACHE
1327 cc->method_state = GET_GLOBAL_METHOD_STATE();
1328 cc->class_serial = RCLASS_SERIAL(klass);
1329 #endif
1330 }
1331
1332 static void
vm_search_method(const struct rb_call_info * ci,struct rb_call_cache * cc,VALUE recv)1333 vm_search_method(const struct rb_call_info *ci, struct rb_call_cache *cc, VALUE recv)
1334 {
1335 VALUE klass = CLASS_OF(recv);
1336
1337 VM_ASSERT(klass != Qfalse);
1338 VM_ASSERT(RBASIC_CLASS(klass) == 0 || rb_obj_is_kind_of(klass, rb_cClass));
1339
1340 #if OPT_INLINE_METHOD_CACHE
1341 if (LIKELY(RB_DEBUG_COUNTER_INC_UNLESS(mc_global_state_miss,
1342 GET_GLOBAL_METHOD_STATE() == cc->method_state) &&
1343 RB_DEBUG_COUNTER_INC_UNLESS(mc_class_serial_miss,
1344 RCLASS_SERIAL(klass) == cc->class_serial))) {
1345 /* cache hit! */
1346 VM_ASSERT(cc->call != NULL);
1347 RB_DEBUG_COUNTER_INC(mc_inline_hit);
1348 return;
1349 }
1350 RB_DEBUG_COUNTER_INC(mc_inline_miss);
1351 #endif
1352 rb_vm_search_method_slowpath(ci, cc, klass);
1353 }
1354
1355 static inline int
check_cfunc(const rb_callable_method_entry_t * me,VALUE (* func)())1356 check_cfunc(const rb_callable_method_entry_t *me, VALUE (*func)())
1357 {
1358 if (me && me->def->type == VM_METHOD_TYPE_CFUNC &&
1359 me->def->body.cfunc.func == func) {
1360 return 1;
1361 }
1362 else {
1363 return 0;
1364 }
1365 }
1366
1367 static inline int
vm_method_cfunc_is(CALL_INFO ci,CALL_CACHE cc,VALUE recv,VALUE (* func)())1368 vm_method_cfunc_is(CALL_INFO ci, CALL_CACHE cc,
1369 VALUE recv, VALUE (*func)())
1370 {
1371 vm_search_method(ci, cc, recv);
1372 return check_cfunc(cc->me, func);
1373 }
1374
1375 static VALUE
opt_equal_fallback(VALUE recv,VALUE obj,CALL_INFO ci,CALL_CACHE cc)1376 opt_equal_fallback(VALUE recv, VALUE obj, CALL_INFO ci, CALL_CACHE cc)
1377 {
1378 if (vm_method_cfunc_is(ci, cc, recv, rb_obj_equal)) {
1379 return recv == obj ? Qtrue : Qfalse;
1380 }
1381
1382 return Qundef;
1383 }
1384
1385 #define BUILTIN_CLASS_P(x, k) (!SPECIAL_CONST_P(x) && RBASIC_CLASS(x) == k)
1386 #define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
1387
1388 /* 1: compare by identity, 0: not applicable, -1: redefined */
1389 static inline int
comparable_by_identity(VALUE recv,VALUE obj)1390 comparable_by_identity(VALUE recv, VALUE obj)
1391 {
1392 if (FIXNUM_2_P(recv, obj)) {
1393 return (EQ_UNREDEFINED_P(INTEGER) != 0) * 2 - 1;
1394 }
1395 if (FLONUM_2_P(recv, obj)) {
1396 return (EQ_UNREDEFINED_P(FLOAT) != 0) * 2 - 1;
1397 }
1398 if (SYMBOL_P(recv) && SYMBOL_P(obj)) {
1399 return (EQ_UNREDEFINED_P(SYMBOL) != 0) * 2 - 1;
1400 }
1401 return 0;
1402 }
1403
1404 static
1405 #ifndef NO_BIG_INLINE
1406 inline
1407 #endif
1408 VALUE
opt_eq_func(VALUE recv,VALUE obj,CALL_INFO ci,CALL_CACHE cc)1409 opt_eq_func(VALUE recv, VALUE obj, CALL_INFO ci, CALL_CACHE cc)
1410 {
1411 switch (comparable_by_identity(recv, obj)) {
1412 case 1:
1413 return (recv == obj) ? Qtrue : Qfalse;
1414 case -1:
1415 goto fallback;
1416 }
1417 if (0) {
1418 }
1419 else if (BUILTIN_CLASS_P(recv, rb_cFloat)) {
1420 if (EQ_UNREDEFINED_P(FLOAT)) {
1421 return rb_float_equal(recv, obj);
1422 }
1423 }
1424 else if (BUILTIN_CLASS_P(recv, rb_cString)) {
1425 if (EQ_UNREDEFINED_P(STRING)) {
1426 return rb_str_equal(recv, obj);
1427 }
1428 }
1429
1430 fallback:
1431 return opt_equal_fallback(recv, obj, ci, cc);
1432 }
1433
1434 static
1435 #ifndef NO_BIG_INLINE
1436 inline
1437 #endif
1438 VALUE
opt_eql_func(VALUE recv,VALUE obj,CALL_INFO ci,CALL_CACHE cc)1439 opt_eql_func(VALUE recv, VALUE obj, CALL_INFO ci, CALL_CACHE cc)
1440 {
1441 switch (comparable_by_identity(recv, obj)) {
1442 case 1:
1443 return (recv == obj) ? Qtrue : Qfalse;
1444 case -1:
1445 goto fallback;
1446 }
1447 if (0) {
1448 }
1449 else if (BUILTIN_CLASS_P(recv, rb_cFloat)) {
1450 if (EQ_UNREDEFINED_P(FLOAT)) {
1451 return rb_float_eql(recv, obj);
1452 }
1453 }
1454 else if (BUILTIN_CLASS_P(recv, rb_cString)) {
1455 if (EQ_UNREDEFINED_P(STRING)) {
1456 return rb_str_eql(recv, obj);
1457 }
1458 }
1459
1460 fallback:
1461 return opt_equal_fallback(recv, obj, ci, cc);
1462 }
1463 #undef BUILTIN_CLASS_P
1464 #undef EQ_UNREDEFINED_P
1465
1466 VALUE
rb_equal_opt(VALUE obj1,VALUE obj2)1467 rb_equal_opt(VALUE obj1, VALUE obj2)
1468 {
1469 struct rb_call_info ci;
1470 struct rb_call_cache cc;
1471
1472 ci.mid = idEq;
1473 cc.method_state = 0;
1474 cc.class_serial = 0;
1475 cc.me = NULL;
1476 return opt_eq_func(obj1, obj2, &ci, &cc);
1477 }
1478
1479 VALUE
rb_eql_opt(VALUE obj1,VALUE obj2)1480 rb_eql_opt(VALUE obj1, VALUE obj2)
1481 {
1482 struct rb_call_info ci;
1483 struct rb_call_cache cc;
1484
1485 ci.mid = idEqlP;
1486 cc.method_state = 0;
1487 cc.class_serial = 0;
1488 cc.me = NULL;
1489 return opt_eql_func(obj1, obj2, &ci, &cc);
1490 }
1491
1492 extern VALUE rb_vm_call0(rb_execution_context_t *ec, VALUE, ID, int, const VALUE*, const rb_callable_method_entry_t *);
1493
1494 static VALUE
check_match(rb_execution_context_t * ec,VALUE pattern,VALUE target,enum vm_check_match_type type)1495 check_match(rb_execution_context_t *ec, VALUE pattern, VALUE target, enum vm_check_match_type type)
1496 {
1497 switch (type) {
1498 case VM_CHECKMATCH_TYPE_WHEN:
1499 return pattern;
1500 case VM_CHECKMATCH_TYPE_RESCUE:
1501 if (!rb_obj_is_kind_of(pattern, rb_cModule)) {
1502 rb_raise(rb_eTypeError, "class or module required for rescue clause");
1503 }
1504 /* fall through */
1505 case VM_CHECKMATCH_TYPE_CASE: {
1506 const rb_callable_method_entry_t *me =
1507 rb_callable_method_entry_with_refinements(CLASS_OF(pattern), idEqq, NULL);
1508 if (me) {
1509 return rb_vm_call0(ec, pattern, idEqq, 1, &target, me);
1510 }
1511 else {
1512 /* fallback to funcall (e.g. method_missing) */
1513 return rb_funcallv(pattern, idEqq, 1, &target);
1514 }
1515 }
1516 default:
1517 rb_bug("check_match: unreachable");
1518 }
1519 }
1520
1521
1522 #if defined(_MSC_VER) && _MSC_VER < 1300
1523 #define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
1524 #else
1525 #define CHECK_CMP_NAN(a, b) /* do nothing */
1526 #endif
1527
1528 static inline VALUE
double_cmp_lt(double a,double b)1529 double_cmp_lt(double a, double b)
1530 {
1531 CHECK_CMP_NAN(a, b);
1532 return a < b ? Qtrue : Qfalse;
1533 }
1534
1535 static inline VALUE
double_cmp_le(double a,double b)1536 double_cmp_le(double a, double b)
1537 {
1538 CHECK_CMP_NAN(a, b);
1539 return a <= b ? Qtrue : Qfalse;
1540 }
1541
1542 static inline VALUE
double_cmp_gt(double a,double b)1543 double_cmp_gt(double a, double b)
1544 {
1545 CHECK_CMP_NAN(a, b);
1546 return a > b ? Qtrue : Qfalse;
1547 }
1548
1549 static inline VALUE
double_cmp_ge(double a,double b)1550 double_cmp_ge(double a, double b)
1551 {
1552 CHECK_CMP_NAN(a, b);
1553 return a >= b ? Qtrue : Qfalse;
1554 }
1555
1556 static VALUE *
vm_base_ptr(const rb_control_frame_t * cfp)1557 vm_base_ptr(const rb_control_frame_t *cfp)
1558 {
1559 const rb_control_frame_t *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
1560
1561 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
1562 VALUE *bp = prev_cfp->sp + cfp->iseq->body->local_table_size + VM_ENV_DATA_SIZE;
1563 if (cfp->iseq->body->type == ISEQ_TYPE_METHOD) {
1564 /* adjust `self' */
1565 bp += 1;
1566 }
1567 #if VM_DEBUG_BP_CHECK
1568 if (bp != cfp->bp_check) {
1569 fprintf(stderr, "bp_check: %ld, bp: %ld\n",
1570 (long)(cfp->bp_check - GET_EC()->vm_stack),
1571 (long)(bp - GET_EC()->vm_stack));
1572 rb_bug("vm_base_ptr: unreachable");
1573 }
1574 #endif
1575 return bp;
1576 }
1577 else {
1578 return NULL;
1579 }
1580 }
1581
1582 /* method call processes with call_info */
1583
1584 #include "vm_args.c"
1585
1586 static inline VALUE vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc, int opt_pc, int param_size, int local_size);
1587 ALWAYS_INLINE(static VALUE vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const rb_callable_method_entry_t *me, int opt_pc, int param_size, int local_size));
1588 static inline VALUE vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc, int opt_pc);
1589 static VALUE vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc);
1590 static VALUE vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc);
1591 static VALUE vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc);
1592 static inline VALUE vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc);
1593
1594 static vm_call_handler vm_call_iseq_setup_func(const struct rb_call_info *ci, const int param_size, const int local_size);
1595
1596 static VALUE
vm_call_iseq_setup_tailcall_0start(rb_execution_context_t * ec,rb_control_frame_t * cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc)1597 vm_call_iseq_setup_tailcall_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
1598 {
1599 return vm_call_iseq_setup_tailcall(ec, cfp, calling, ci, cc, 0);
1600 }
1601
1602 static VALUE
vm_call_iseq_setup_normal_0start(rb_execution_context_t * ec,rb_control_frame_t * cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc)1603 vm_call_iseq_setup_normal_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
1604 {
1605 const rb_iseq_t *iseq = def_iseq_ptr(cc->me->def);
1606 int param = iseq->body->param.size;
1607 int local = iseq->body->local_table_size;
1608 return vm_call_iseq_setup_normal(ec, cfp, calling, cc->me, 0, param, local);
1609 }
1610
1611 MJIT_STATIC int
rb_simple_iseq_p(const rb_iseq_t * iseq)1612 rb_simple_iseq_p(const rb_iseq_t *iseq)
1613 {
1614 return iseq->body->param.flags.has_opt == FALSE &&
1615 iseq->body->param.flags.has_rest == FALSE &&
1616 iseq->body->param.flags.has_post == FALSE &&
1617 iseq->body->param.flags.has_kw == FALSE &&
1618 iseq->body->param.flags.has_kwrest == FALSE &&
1619 iseq->body->param.flags.has_block == FALSE;
1620 }
1621
1622 static inline int
vm_callee_setup_arg(rb_execution_context_t * ec,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc,const rb_iseq_t * iseq,VALUE * argv,int param_size,int local_size)1623 vm_callee_setup_arg(rb_execution_context_t *ec, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc,
1624 const rb_iseq_t *iseq, VALUE *argv, int param_size, int local_size)
1625 {
1626 if (LIKELY(rb_simple_iseq_p(iseq) && !(ci->flag & VM_CALL_KW_SPLAT))) {
1627 rb_control_frame_t *cfp = ec->cfp;
1628
1629 CALLER_SETUP_ARG(cfp, calling, ci); /* splat arg */
1630
1631 if (calling->argc != iseq->body->param.lead_num) {
1632 argument_arity_error(ec, iseq, calling->argc, iseq->body->param.lead_num, iseq->body->param.lead_num);
1633 }
1634
1635 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
1636 (!IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
1637 !(METHOD_ENTRY_VISI(cc->me) == METHOD_VISI_PROTECTED)));
1638 return 0;
1639 }
1640 else {
1641 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
1642 }
1643 }
1644
1645 static VALUE
vm_call_iseq_setup(rb_execution_context_t * ec,rb_control_frame_t * cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc)1646 vm_call_iseq_setup(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
1647 {
1648 const rb_iseq_t *iseq = def_iseq_ptr(cc->me->def);
1649 const int param_size = iseq->body->param.size;
1650 const int local_size = iseq->body->local_table_size;
1651 const int opt_pc = vm_callee_setup_arg(ec, calling, ci, cc, def_iseq_ptr(cc->me->def), cfp->sp - calling->argc, param_size, local_size);
1652 return vm_call_iseq_setup_2(ec, cfp, calling, ci, cc, opt_pc, param_size, local_size);
1653 }
1654
1655 static inline VALUE
vm_call_iseq_setup_2(rb_execution_context_t * ec,rb_control_frame_t * cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc,int opt_pc,int param_size,int local_size)1656 vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc,
1657 int opt_pc, int param_size, int local_size)
1658 {
1659 if (LIKELY(!(ci->flag & VM_CALL_TAILCALL))) {
1660 return vm_call_iseq_setup_normal(ec, cfp, calling, cc->me, opt_pc, param_size, local_size);
1661 }
1662 else {
1663 return vm_call_iseq_setup_tailcall(ec, cfp, calling, ci, cc, opt_pc);
1664 }
1665 }
1666
1667 static inline VALUE
vm_call_iseq_setup_normal(rb_execution_context_t * ec,rb_control_frame_t * cfp,struct rb_calling_info * calling,const rb_callable_method_entry_t * me,int opt_pc,int param_size,int local_size)1668 vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const rb_callable_method_entry_t *me,
1669 int opt_pc, int param_size, int local_size)
1670 {
1671 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
1672 VALUE *argv = cfp->sp - calling->argc;
1673 VALUE *sp = argv + param_size;
1674 cfp->sp = argv - 1 /* recv */;
1675
1676 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
1677 calling->block_handler, (VALUE)me,
1678 iseq->body->iseq_encoded + opt_pc, sp,
1679 local_size - param_size,
1680 iseq->body->stack_max);
1681 return Qundef;
1682 }
1683
1684 static inline VALUE
vm_call_iseq_setup_tailcall(rb_execution_context_t * ec,rb_control_frame_t * cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc,int opt_pc)1685 vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc,
1686 int opt_pc)
1687 {
1688 unsigned int i;
1689 VALUE *argv = cfp->sp - calling->argc;
1690 const rb_callable_method_entry_t *me = cc->me;
1691 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
1692 VALUE *src_argv = argv;
1693 VALUE *sp_orig, *sp;
1694 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
1695
1696 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
1697 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
1698 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
1699 dst_captured->code.val = src_captured->code.val;
1700 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
1701 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
1702 }
1703 else {
1704 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
1705 }
1706 }
1707
1708 vm_pop_frame(ec, cfp, cfp->ep);
1709 cfp = ec->cfp;
1710
1711 sp_orig = sp = cfp->sp;
1712
1713 /* push self */
1714 sp[0] = calling->recv;
1715 sp++;
1716
1717 /* copy arguments */
1718 for (i=0; i < iseq->body->param.size; i++) {
1719 *sp++ = src_argv[i];
1720 }
1721
1722 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
1723 calling->recv, calling->block_handler, (VALUE)me,
1724 iseq->body->iseq_encoded + opt_pc, sp,
1725 iseq->body->local_table_size - iseq->body->param.size,
1726 iseq->body->stack_max);
1727
1728 cfp->sp = sp_orig;
1729 RUBY_VM_CHECK_INTS(ec);
1730
1731 return Qundef;
1732 }
1733
1734 static VALUE
call_cfunc_m2(VALUE (* func)(ANYARGS),VALUE recv,int argc,const VALUE * argv)1735 call_cfunc_m2(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1736 {
1737 return (*func)(recv, rb_ary_new4(argc, argv));
1738 }
1739
1740 static VALUE
call_cfunc_m1(VALUE (* func)(ANYARGS),VALUE recv,int argc,const VALUE * argv)1741 call_cfunc_m1(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1742 {
1743 return (*func)(argc, argv, recv);
1744 }
1745
1746 static VALUE
call_cfunc_0(VALUE (* func)(ANYARGS),VALUE recv,int argc,const VALUE * argv)1747 call_cfunc_0(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1748 {
1749 return (*func)(recv);
1750 }
1751
1752 static VALUE
call_cfunc_1(VALUE (* func)(ANYARGS),VALUE recv,int argc,const VALUE * argv)1753 call_cfunc_1(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1754 {
1755 return (*func)(recv, argv[0]);
1756 }
1757
1758 static VALUE
call_cfunc_2(VALUE (* func)(ANYARGS),VALUE recv,int argc,const VALUE * argv)1759 call_cfunc_2(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1760 {
1761 return (*func)(recv, argv[0], argv[1]);
1762 }
1763
1764 static VALUE
call_cfunc_3(VALUE (* func)(ANYARGS),VALUE recv,int argc,const VALUE * argv)1765 call_cfunc_3(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1766 {
1767 return (*func)(recv, argv[0], argv[1], argv[2]);
1768 }
1769
1770 static VALUE
call_cfunc_4(VALUE (* func)(ANYARGS),VALUE recv,int argc,const VALUE * argv)1771 call_cfunc_4(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1772 {
1773 return (*func)(recv, argv[0], argv[1], argv[2], argv[3]);
1774 }
1775
1776 static VALUE
call_cfunc_5(VALUE (* func)(ANYARGS),VALUE recv,int argc,const VALUE * argv)1777 call_cfunc_5(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1778 {
1779 return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
1780 }
1781
1782 static VALUE
call_cfunc_6(VALUE (* func)(ANYARGS),VALUE recv,int argc,const VALUE * argv)1783 call_cfunc_6(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1784 {
1785 return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
1786 }
1787
1788 static VALUE
call_cfunc_7(VALUE (* func)(ANYARGS),VALUE recv,int argc,const VALUE * argv)1789 call_cfunc_7(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1790 {
1791 return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
1792 }
1793
1794 static VALUE
call_cfunc_8(VALUE (* func)(ANYARGS),VALUE recv,int argc,const VALUE * argv)1795 call_cfunc_8(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1796 {
1797 return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
1798 }
1799
1800 static VALUE
call_cfunc_9(VALUE (* func)(ANYARGS),VALUE recv,int argc,const VALUE * argv)1801 call_cfunc_9(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1802 {
1803 return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
1804 }
1805
1806 static VALUE
call_cfunc_10(VALUE (* func)(ANYARGS),VALUE recv,int argc,const VALUE * argv)1807 call_cfunc_10(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1808 {
1809 return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
1810 }
1811
1812 static VALUE
call_cfunc_11(VALUE (* func)(ANYARGS),VALUE recv,int argc,const VALUE * argv)1813 call_cfunc_11(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1814 {
1815 return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
1816 }
1817
1818 static VALUE
call_cfunc_12(VALUE (* func)(ANYARGS),VALUE recv,int argc,const VALUE * argv)1819 call_cfunc_12(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1820 {
1821 return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
1822 }
1823
1824 static VALUE
call_cfunc_13(VALUE (* func)(ANYARGS),VALUE recv,int argc,const VALUE * argv)1825 call_cfunc_13(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1826 {
1827 return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
1828 }
1829
1830 static VALUE
call_cfunc_14(VALUE (* func)(ANYARGS),VALUE recv,int argc,const VALUE * argv)1831 call_cfunc_14(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1832 {
1833 return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
1834 }
1835
1836 static VALUE
call_cfunc_15(VALUE (* func)(ANYARGS),VALUE recv,int argc,const VALUE * argv)1837 call_cfunc_15(VALUE (*func)(ANYARGS), VALUE recv, int argc, const VALUE *argv)
1838 {
1839 return (*func)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
1840 }
1841
1842 static inline int
vm_cfp_consistent_p(rb_execution_context_t * ec,const rb_control_frame_t * reg_cfp)1843 vm_cfp_consistent_p(rb_execution_context_t *ec, const rb_control_frame_t *reg_cfp)
1844 {
1845 const int ov_flags = RAISED_STACKOVERFLOW;
1846 if (LIKELY(reg_cfp == ec->cfp + 1)) return TRUE;
1847 if (rb_ec_raised_p(ec, ov_flags)) {
1848 rb_ec_raised_reset(ec, ov_flags);
1849 return TRUE;
1850 }
1851 return FALSE;
1852 }
1853
1854 #define CHECK_CFP_CONSISTENCY(func) \
1855 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
1856 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
1857
1858 static inline
1859 const rb_method_cfunc_t *
vm_method_cfunc_entry(const rb_callable_method_entry_t * me)1860 vm_method_cfunc_entry(const rb_callable_method_entry_t *me)
1861 {
1862 #if VM_DEBUG_VERIFY_METHOD_CACHE
1863 switch (me->def->type) {
1864 case VM_METHOD_TYPE_CFUNC:
1865 case VM_METHOD_TYPE_NOTIMPLEMENTED:
1866 break;
1867 # define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
1868 METHOD_BUG(ISEQ);
1869 METHOD_BUG(ATTRSET);
1870 METHOD_BUG(IVAR);
1871 METHOD_BUG(BMETHOD);
1872 METHOD_BUG(ZSUPER);
1873 METHOD_BUG(UNDEF);
1874 METHOD_BUG(OPTIMIZED);
1875 METHOD_BUG(MISSING);
1876 METHOD_BUG(REFINED);
1877 METHOD_BUG(ALIAS);
1878 # undef METHOD_BUG
1879 default:
1880 rb_bug("wrong method type: %d", me->def->type);
1881 }
1882 #endif
1883 return &me->def->body.cfunc;
1884 }
1885
1886 static VALUE
vm_call_cfunc_with_frame(rb_execution_context_t * ec,rb_control_frame_t * reg_cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc)1887 vm_call_cfunc_with_frame(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
1888 {
1889 VALUE val;
1890 const rb_callable_method_entry_t *me = cc->me;
1891 const rb_method_cfunc_t *cfunc = vm_method_cfunc_entry(me);
1892 int len = cfunc->argc;
1893
1894 VALUE recv = calling->recv;
1895 VALUE block_handler = calling->block_handler;
1896 int argc = calling->argc;
1897
1898 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
1899 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, recv, me->def->original_id, ci->mid, me->owner, Qundef);
1900
1901 vm_push_frame(ec, NULL, VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL, recv,
1902 block_handler, (VALUE)me,
1903 0, ec->cfp->sp, 0, 0);
1904
1905 if (len >= 0) rb_check_arity(argc, len, len);
1906
1907 reg_cfp->sp -= argc + 1;
1908 val = (*cfunc->invoker)(cfunc->func, recv, argc, reg_cfp->sp + 1);
1909
1910 CHECK_CFP_CONSISTENCY("vm_call_cfunc");
1911
1912 rb_vm_pop_frame(ec);
1913
1914 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, recv, me->def->original_id, ci->mid, me->owner, val);
1915 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
1916
1917 return val;
1918 }
1919
1920 static VALUE
vm_call_cfunc(rb_execution_context_t * ec,rb_control_frame_t * reg_cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc)1921 vm_call_cfunc(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
1922 {
1923 CALLER_SETUP_ARG(reg_cfp, calling, ci);
1924 return vm_call_cfunc_with_frame(ec, reg_cfp, calling, ci, cc);
1925 }
1926
1927 static VALUE
vm_call_ivar(rb_execution_context_t * ec,rb_control_frame_t * cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc)1928 vm_call_ivar(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
1929 {
1930 cfp->sp -= 1;
1931 return vm_getivar(calling->recv, cc->me->def->body.attr.id, NULL, cc, TRUE);
1932 }
1933
1934 static VALUE
vm_call_attrset(rb_execution_context_t * ec,rb_control_frame_t * cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc)1935 vm_call_attrset(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
1936 {
1937 VALUE val = *(cfp->sp - 1);
1938 cfp->sp -= 2;
1939 return vm_setivar(calling->recv, cc->me->def->body.attr.id, val, NULL, cc, 1);
1940 }
1941
1942 static inline VALUE
vm_call_bmethod_body(rb_execution_context_t * ec,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc,const VALUE * argv)1943 vm_call_bmethod_body(rb_execution_context_t *ec, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc, const VALUE *argv)
1944 {
1945 rb_proc_t *proc;
1946 VALUE val;
1947
1948 /* control block frame */
1949 GetProcPtr(cc->me->def->body.bmethod.proc, proc);
1950 val = rb_vm_invoke_bmethod(ec, proc, calling->recv, calling->argc, argv, calling->block_handler, cc->me);
1951
1952 return val;
1953 }
1954
1955 static VALUE
vm_call_bmethod(rb_execution_context_t * ec,rb_control_frame_t * cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc)1956 vm_call_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
1957 {
1958 VALUE *argv;
1959 int argc;
1960
1961 CALLER_SETUP_ARG(cfp, calling, ci);
1962 argc = calling->argc;
1963 argv = ALLOCA_N(VALUE, argc);
1964 MEMCPY(argv, cfp->sp - argc, VALUE, argc);
1965 cfp->sp += - argc - 1;
1966
1967 return vm_call_bmethod_body(ec, calling, ci, cc, argv);
1968 }
1969
1970 static enum method_missing_reason
ci_missing_reason(const struct rb_call_info * ci)1971 ci_missing_reason(const struct rb_call_info *ci)
1972 {
1973 enum method_missing_reason stat = MISSING_NOENTRY;
1974 if (ci->flag & VM_CALL_VCALL) stat |= MISSING_VCALL;
1975 if (ci->flag & VM_CALL_FCALL) stat |= MISSING_FCALL;
1976 if (ci->flag & VM_CALL_SUPER) stat |= MISSING_SUPER;
1977 return stat;
1978 }
1979
1980 static VALUE
vm_call_opt_send(rb_execution_context_t * ec,rb_control_frame_t * reg_cfp,struct rb_calling_info * calling,const struct rb_call_info * orig_ci,struct rb_call_cache * orig_cc)1981 vm_call_opt_send(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *orig_ci, struct rb_call_cache *orig_cc)
1982 {
1983 int i;
1984 VALUE sym;
1985 struct rb_call_info *ci;
1986 struct rb_call_info_with_kwarg ci_entry;
1987 struct rb_call_cache cc_entry, *cc;
1988
1989 CALLER_SETUP_ARG(reg_cfp, calling, orig_ci);
1990
1991 i = calling->argc - 1;
1992
1993 if (calling->argc == 0) {
1994 rb_raise(rb_eArgError, "no method name given");
1995 }
1996
1997 /* setup new ci */
1998 if (orig_ci->flag & VM_CALL_KWARG) {
1999 ci = (struct rb_call_info *)&ci_entry;
2000 ci_entry = *(struct rb_call_info_with_kwarg *)orig_ci;
2001 }
2002 else {
2003 ci = &ci_entry.ci;
2004 ci_entry.ci = *orig_ci;
2005 }
2006 ci->flag = ci->flag & ~VM_CALL_KWARG; /* TODO: delegate kw_arg without making a Hash object */
2007
2008 /* setup new cc */
2009 cc_entry = *orig_cc;
2010 cc = &cc_entry;
2011
2012 sym = TOPN(i);
2013
2014 if (!(ci->mid = rb_check_id(&sym))) {
2015 if (rb_method_basic_definition_p(CLASS_OF(calling->recv), idMethodMissing)) {
2016 VALUE exc =
2017 rb_make_no_method_exception(rb_eNoMethodError, 0, calling->recv,
2018 rb_long2int(calling->argc), &TOPN(i),
2019 ci->flag & (VM_CALL_FCALL|VM_CALL_VCALL));
2020 rb_exc_raise(exc);
2021 }
2022 TOPN(i) = rb_str_intern(sym);
2023 ci->mid = idMethodMissing;
2024 ec->method_missing_reason = cc->aux.method_missing_reason = ci_missing_reason(ci);
2025 }
2026 else {
2027 /* shift arguments */
2028 if (i > 0) {
2029 MEMMOVE(&TOPN(i), &TOPN(i-1), VALUE, i);
2030 }
2031 calling->argc -= 1;
2032 DEC_SP(1);
2033 }
2034
2035 cc->me = rb_callable_method_entry_with_refinements(CLASS_OF(calling->recv), ci->mid, NULL);
2036 ci->flag = VM_CALL_FCALL | VM_CALL_OPT_SEND;
2037 return vm_call_method(ec, reg_cfp, calling, ci, cc);
2038 }
2039
2040 static inline VALUE vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, VALUE block_handler);
2041
2042 NOINLINE(static VALUE
2043 vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
2044 struct rb_calling_info *calling, const struct rb_call_info *ci, VALUE block_handler));
2045
2046 static VALUE
vm_invoke_block_opt_call(rb_execution_context_t * ec,rb_control_frame_t * reg_cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,VALUE block_handler)2047 vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
2048 struct rb_calling_info *calling, const struct rb_call_info *ci, VALUE block_handler)
2049 {
2050 int argc = calling->argc;
2051
2052 /* remove self */
2053 if (argc > 0) MEMMOVE(&TOPN(argc), &TOPN(argc-1), VALUE, argc);
2054 DEC_SP(1);
2055
2056 return vm_invoke_block(ec, reg_cfp, calling, ci, block_handler);
2057 }
2058
2059 static VALUE
vm_call_opt_call(rb_execution_context_t * ec,rb_control_frame_t * reg_cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc)2060 vm_call_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
2061 {
2062 VALUE procval = calling->recv;
2063 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
2064 }
2065
2066 static VALUE
vm_call_opt_block_call(rb_execution_context_t * ec,rb_control_frame_t * reg_cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc)2067 vm_call_opt_block_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
2068 {
2069 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
2070
2071 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
2072 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
2073 }
2074 else {
2075 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
2076 vm_search_method(ci, cc, calling->recv);
2077 return vm_call_general(ec, reg_cfp, calling, ci, cc);
2078 }
2079 }
2080
2081 static VALUE
vm_call_method_missing(rb_execution_context_t * ec,rb_control_frame_t * reg_cfp,struct rb_calling_info * calling,const struct rb_call_info * orig_ci,struct rb_call_cache * orig_cc)2082 vm_call_method_missing(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *orig_ci, struct rb_call_cache *orig_cc)
2083 {
2084 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
2085 struct rb_call_info ci_entry;
2086 const struct rb_call_info *ci;
2087 struct rb_call_cache cc_entry, *cc;
2088 unsigned int argc;
2089
2090 CALLER_SETUP_ARG(reg_cfp, calling, orig_ci);
2091 argc = calling->argc+1;
2092
2093 ci_entry.flag = VM_CALL_FCALL | VM_CALL_OPT_SEND;
2094 ci_entry.mid = idMethodMissing;
2095 ci_entry.orig_argc = argc;
2096 ci = &ci_entry;
2097
2098 cc_entry = *orig_cc;
2099 cc_entry.me =
2100 rb_callable_method_entry_without_refinements(CLASS_OF(calling->recv),
2101 idMethodMissing, NULL);
2102 cc = &cc_entry;
2103
2104 calling->argc = argc;
2105
2106 /* shift arguments: m(a, b, c) #=> method_missing(:m, a, b, c) */
2107 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
2108 if (argc > 1) {
2109 MEMMOVE(argv+1, argv, VALUE, argc-1);
2110 }
2111 argv[0] = ID2SYM(orig_ci->mid);
2112 INC_SP(1);
2113
2114 ec->method_missing_reason = orig_cc->aux.method_missing_reason;
2115 return vm_call_method(ec, reg_cfp, calling, ci, cc);
2116 }
2117
2118 static const rb_callable_method_entry_t *refined_method_callable_without_refinement(const rb_callable_method_entry_t *me);
2119 static VALUE
vm_call_zsuper(rb_execution_context_t * ec,rb_control_frame_t * cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc,VALUE klass)2120 vm_call_zsuper(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc, VALUE klass)
2121 {
2122 klass = RCLASS_SUPER(klass);
2123 cc->me = klass ? rb_callable_method_entry(klass, ci->mid) : NULL;
2124
2125 if (!cc->me) {
2126 return vm_call_method_nome(ec, cfp, calling, ci, cc);
2127 }
2128 if (cc->me->def->type == VM_METHOD_TYPE_REFINED &&
2129 cc->me->def->body.refined.orig_me) {
2130 cc->me = refined_method_callable_without_refinement(cc->me);
2131 }
2132 return vm_call_method_each_type(ec, cfp, calling, ci, cc);
2133 }
2134
2135 static inline VALUE
find_refinement(VALUE refinements,VALUE klass)2136 find_refinement(VALUE refinements, VALUE klass)
2137 {
2138 if (NIL_P(refinements)) {
2139 return Qnil;
2140 }
2141 return rb_hash_lookup(refinements, klass);
2142 }
2143
2144 PUREFUNC(static rb_control_frame_t * current_method_entry(const rb_execution_context_t *ec, rb_control_frame_t *cfp));
2145 static rb_control_frame_t *
current_method_entry(const rb_execution_context_t * ec,rb_control_frame_t * cfp)2146 current_method_entry(const rb_execution_context_t *ec, rb_control_frame_t *cfp)
2147 {
2148 rb_control_frame_t *top_cfp = cfp;
2149
2150 if (cfp->iseq && cfp->iseq->body->type == ISEQ_TYPE_BLOCK) {
2151 const rb_iseq_t *local_iseq = cfp->iseq->body->local_iseq;
2152
2153 do {
2154 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
2155 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
2156 /* TODO: orphan block */
2157 return top_cfp;
2158 }
2159 } while (cfp->iseq != local_iseq);
2160 }
2161 return cfp;
2162 }
2163
2164 MJIT_FUNC_EXPORTED VALUE
rb_find_defined_class_by_owner(VALUE current_class,VALUE target_owner)2165 rb_find_defined_class_by_owner(VALUE current_class, VALUE target_owner)
2166 {
2167 VALUE klass = current_class;
2168
2169 /* for prepended Module, then start from cover class */
2170 if (RB_TYPE_P(klass, T_ICLASS) && FL_TEST(klass, RICLASS_IS_ORIGIN)) klass = RBASIC_CLASS(klass);
2171
2172 while (RTEST(klass)) {
2173 VALUE owner = RB_TYPE_P(klass, T_ICLASS) ? RBASIC_CLASS(klass) : klass;
2174 if (owner == target_owner) {
2175 return klass;
2176 }
2177 klass = RCLASS_SUPER(klass);
2178 }
2179
2180 return current_class; /* maybe module function */
2181 }
2182
2183 static const rb_callable_method_entry_t *
aliased_callable_method_entry(const rb_callable_method_entry_t * me)2184 aliased_callable_method_entry(const rb_callable_method_entry_t *me)
2185 {
2186 const rb_method_entry_t *orig_me = me->def->body.alias.original_me;
2187 const rb_callable_method_entry_t *cme;
2188
2189 if (orig_me->defined_class == 0) {
2190 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
2191 VM_ASSERT(RB_TYPE_P(orig_me->owner, T_MODULE));
2192 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
2193
2194 if (me->def->alias_count + me->def->complemented_count == 0) {
2195 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
2196 }
2197 else {
2198 rb_method_definition_t *def =
2199 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
2200 rb_method_definition_set((rb_method_entry_t *)me, def, (void *)cme);
2201 }
2202 }
2203 else {
2204 cme = (const rb_callable_method_entry_t *)orig_me;
2205 }
2206
2207 VM_ASSERT(callable_method_entry_p(cme));
2208 return cme;
2209 }
2210
2211 static const rb_callable_method_entry_t *
refined_method_callable_without_refinement(const rb_callable_method_entry_t * me)2212 refined_method_callable_without_refinement(const rb_callable_method_entry_t *me)
2213 {
2214 const rb_method_entry_t *orig_me = me->def->body.refined.orig_me;
2215 const rb_callable_method_entry_t *cme;
2216
2217 if (orig_me->defined_class == 0) {
2218 cme = NULL;
2219 rb_notimplement();
2220 }
2221 else {
2222 cme = (const rb_callable_method_entry_t *)orig_me;
2223 }
2224
2225 VM_ASSERT(callable_method_entry_p(cme));
2226
2227 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
2228 cme = NULL;
2229 }
2230
2231 return cme;
2232 }
2233
2234 static VALUE
vm_call_method_each_type(rb_execution_context_t * ec,rb_control_frame_t * cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc)2235 vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
2236 {
2237 switch (cc->me->def->type) {
2238 case VM_METHOD_TYPE_ISEQ:
2239 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
2240 return vm_call_iseq_setup(ec, cfp, calling, ci, cc);
2241
2242 case VM_METHOD_TYPE_NOTIMPLEMENTED:
2243 case VM_METHOD_TYPE_CFUNC:
2244 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
2245 return vm_call_cfunc(ec, cfp, calling, ci, cc);
2246
2247 case VM_METHOD_TYPE_ATTRSET:
2248 CALLER_SETUP_ARG(cfp, calling, ci);
2249 rb_check_arity(calling->argc, 1, 1);
2250 cc->aux.index = 0;
2251 CC_SET_FASTPATH(cc, vm_call_attrset, !((ci->flag & VM_CALL_ARGS_SPLAT) || (ci->flag & VM_CALL_KWARG)));
2252 return vm_call_attrset(ec, cfp, calling, ci, cc);
2253
2254 case VM_METHOD_TYPE_IVAR:
2255 CALLER_SETUP_ARG(cfp, calling, ci);
2256 rb_check_arity(calling->argc, 0, 0);
2257 cc->aux.index = 0;
2258 CC_SET_FASTPATH(cc, vm_call_ivar, !(ci->flag & VM_CALL_ARGS_SPLAT));
2259 return vm_call_ivar(ec, cfp, calling, ci, cc);
2260
2261 case VM_METHOD_TYPE_MISSING:
2262 cc->aux.method_missing_reason = 0;
2263 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
2264 return vm_call_method_missing(ec, cfp, calling, ci, cc);
2265
2266 case VM_METHOD_TYPE_BMETHOD:
2267 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
2268 return vm_call_bmethod(ec, cfp, calling, ci, cc);
2269
2270 case VM_METHOD_TYPE_ALIAS:
2271 cc->me = aliased_callable_method_entry(cc->me);
2272 VM_ASSERT(cc->me != NULL);
2273 return vm_call_method_each_type(ec, cfp, calling, ci, cc);
2274
2275 case VM_METHOD_TYPE_OPTIMIZED:
2276 switch (cc->me->def->body.optimize_type) {
2277 case OPTIMIZED_METHOD_TYPE_SEND:
2278 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
2279 return vm_call_opt_send(ec, cfp, calling, ci, cc);
2280 case OPTIMIZED_METHOD_TYPE_CALL:
2281 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
2282 return vm_call_opt_call(ec, cfp, calling, ci, cc);
2283 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
2284 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
2285 return vm_call_opt_block_call(ec, cfp, calling, ci, cc);
2286 default:
2287 rb_bug("vm_call_method: unsupported optimized method type (%d)",
2288 cc->me->def->body.optimize_type);
2289 }
2290
2291 case VM_METHOD_TYPE_UNDEF:
2292 break;
2293
2294 case VM_METHOD_TYPE_ZSUPER:
2295 return vm_call_zsuper(ec, cfp, calling, ci, cc, RCLASS_ORIGIN(cc->me->defined_class));
2296
2297 case VM_METHOD_TYPE_REFINED: {
2298 const rb_cref_t *cref = rb_vm_get_cref(cfp->ep);
2299 VALUE refinements = cref ? CREF_REFINEMENTS(cref) : Qnil;
2300 VALUE refinement;
2301 const rb_callable_method_entry_t *ref_me;
2302
2303 refinement = find_refinement(refinements, cc->me->owner);
2304
2305 if (NIL_P(refinement)) {
2306 goto no_refinement_dispatch;
2307 }
2308 ref_me = rb_callable_method_entry(refinement, ci->mid);
2309
2310 if (ref_me) {
2311 if (cc->call == vm_call_super_method) {
2312 const rb_control_frame_t *top_cfp = current_method_entry(ec, cfp);
2313 const rb_callable_method_entry_t *top_me = rb_vm_frame_method_entry(top_cfp);
2314 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
2315 goto no_refinement_dispatch;
2316 }
2317 }
2318 if (cc->me->def->type != VM_METHOD_TYPE_REFINED ||
2319 cc->me->def != ref_me->def) {
2320 cc->me = ref_me;
2321 }
2322 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
2323 return vm_call_method(ec, cfp, calling, ci, cc);
2324 }
2325 }
2326 else {
2327 cc->me = NULL;
2328 return vm_call_method_nome(ec, cfp, calling, ci, cc);
2329 }
2330
2331 no_refinement_dispatch:
2332 if (cc->me->def->body.refined.orig_me) {
2333 cc->me = refined_method_callable_without_refinement(cc->me);
2334 }
2335 else {
2336 VALUE klass = RCLASS_SUPER(cc->me->defined_class);
2337 cc->me = klass ? rb_callable_method_entry(klass, ci->mid) : NULL;
2338 }
2339 return vm_call_method(ec, cfp, calling, ci, cc);
2340 }
2341 }
2342
2343 rb_bug("vm_call_method: unsupported method type (%d)", cc->me->def->type);
2344 }
2345
2346 NORETURN(static void vm_raise_method_missing(rb_execution_context_t *ec, int argc, const VALUE *argv, VALUE obj, int call_status));
2347
2348 static VALUE
vm_call_method_nome(rb_execution_context_t * ec,rb_control_frame_t * cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc)2349 vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
2350 {
2351 /* method missing */
2352 const int stat = ci_missing_reason(ci);
2353
2354 if (ci->mid == idMethodMissing) {
2355 rb_control_frame_t *reg_cfp = cfp;
2356 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
2357 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
2358 }
2359 else {
2360 cc->aux.method_missing_reason = stat;
2361 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
2362 return vm_call_method_missing(ec, cfp, calling, ci, cc);
2363 }
2364 }
2365
2366 static inline VALUE
vm_call_method(rb_execution_context_t * ec,rb_control_frame_t * cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc)2367 vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
2368 {
2369 VM_ASSERT(callable_method_entry_p(cc->me));
2370
2371 if (cc->me != NULL) {
2372 switch (METHOD_ENTRY_VISI(cc->me)) {
2373 case METHOD_VISI_PUBLIC: /* likely */
2374 return vm_call_method_each_type(ec, cfp, calling, ci, cc);
2375
2376 case METHOD_VISI_PRIVATE:
2377 if (!(ci->flag & VM_CALL_FCALL)) {
2378 enum method_missing_reason stat = MISSING_PRIVATE;
2379 if (ci->flag & VM_CALL_VCALL) stat |= MISSING_VCALL;
2380
2381 cc->aux.method_missing_reason = stat;
2382 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
2383 return vm_call_method_missing(ec, cfp, calling, ci, cc);
2384 }
2385 return vm_call_method_each_type(ec, cfp, calling, ci, cc);
2386
2387 case METHOD_VISI_PROTECTED:
2388 if (!(ci->flag & VM_CALL_OPT_SEND)) {
2389 if (!rb_obj_is_kind_of(cfp->self, cc->me->defined_class)) {
2390 cc->aux.method_missing_reason = MISSING_PROTECTED;
2391 return vm_call_method_missing(ec, cfp, calling, ci, cc);
2392 }
2393 else {
2394 /* caching method info to dummy cc */
2395 struct rb_call_cache cc_entry;
2396 cc_entry = *cc;
2397 cc = &cc_entry;
2398
2399 VM_ASSERT(cc->me != NULL);
2400 return vm_call_method_each_type(ec, cfp, calling, ci, cc);
2401 }
2402 }
2403 return vm_call_method_each_type(ec, cfp, calling, ci, cc);
2404
2405 default:
2406 rb_bug("unreachable");
2407 }
2408 }
2409 else {
2410 return vm_call_method_nome(ec, cfp, calling, ci, cc);
2411 }
2412 }
2413
2414 static VALUE
vm_call_general(rb_execution_context_t * ec,rb_control_frame_t * reg_cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc)2415 vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
2416 {
2417 return vm_call_method(ec, reg_cfp, calling, ci, cc);
2418 }
2419
2420 static VALUE
vm_call_super_method(rb_execution_context_t * ec,rb_control_frame_t * reg_cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,struct rb_call_cache * cc)2421 vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_call_info *ci, struct rb_call_cache *cc)
2422 {
2423 /* this check is required to distinguish with other functions. */
2424 if (cc->call != vm_call_super_method) rb_bug("bug");
2425 return vm_call_method(ec, reg_cfp, calling, ci, cc);
2426 }
2427
2428 /* super */
2429
2430 static inline VALUE
vm_search_normal_superclass(VALUE klass)2431 vm_search_normal_superclass(VALUE klass)
2432 {
2433 if (BUILTIN_TYPE(klass) == T_ICLASS &&
2434 FL_TEST(RBASIC(klass)->klass, RMODULE_IS_REFINEMENT)) {
2435 klass = RBASIC(klass)->klass;
2436 }
2437 klass = RCLASS_ORIGIN(klass);
2438 return RCLASS_SUPER(klass);
2439 }
2440
2441 NORETURN(static void vm_super_outside(void));
2442
2443 static void
vm_super_outside(void)2444 vm_super_outside(void)
2445 {
2446 rb_raise(rb_eNoMethodError, "super called outside of method");
2447 }
2448
2449 static void
vm_search_super_method(const rb_execution_context_t * ec,rb_control_frame_t * reg_cfp,struct rb_calling_info * calling,struct rb_call_info * ci,struct rb_call_cache * cc)2450 vm_search_super_method(const rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
2451 struct rb_calling_info *calling, struct rb_call_info *ci, struct rb_call_cache *cc)
2452 {
2453 VALUE current_defined_class, klass;
2454 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(reg_cfp);
2455
2456 if (!me) {
2457 vm_super_outside();
2458 }
2459
2460 current_defined_class = me->defined_class;
2461
2462 if (!NIL_P(RCLASS_REFINED_CLASS(current_defined_class))) {
2463 current_defined_class = RCLASS_REFINED_CLASS(current_defined_class);
2464 }
2465
2466 if (BUILTIN_TYPE(current_defined_class) != T_MODULE &&
2467 BUILTIN_TYPE(current_defined_class) != T_ICLASS && /* bound UnboundMethod */
2468 !FL_TEST(current_defined_class, RMODULE_INCLUDED_INTO_REFINEMENT) &&
2469 !rb_obj_is_kind_of(calling->recv, current_defined_class)) {
2470 VALUE m = RB_TYPE_P(current_defined_class, T_ICLASS) ?
2471 RBASIC(current_defined_class)->klass : current_defined_class;
2472
2473 rb_raise(rb_eTypeError,
2474 "self has wrong type to call super in this context: "
2475 "%"PRIsVALUE" (expected %"PRIsVALUE")",
2476 rb_obj_class(calling->recv), m);
2477 }
2478
2479 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (ci->flag & VM_CALL_ZSUPER)) {
2480 rb_raise(rb_eRuntimeError,
2481 "implicit argument passing of super from method defined"
2482 " by define_method() is not supported."
2483 " Specify all arguments explicitly.");
2484 }
2485
2486 ci->mid = me->def->original_id;
2487 klass = vm_search_normal_superclass(me->defined_class);
2488
2489 if (!klass) {
2490 /* bound instance method of module */
2491 cc->aux.method_missing_reason = MISSING_SUPER;
2492 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
2493 }
2494 else {
2495 /* TODO: use inline cache */
2496 cc->me = rb_callable_method_entry(klass, ci->mid);
2497 CC_SET_FASTPATH(cc, vm_call_super_method, TRUE);
2498 }
2499 }
2500
2501 /* yield */
2502
2503 static inline int
block_proc_is_lambda(const VALUE procval)2504 block_proc_is_lambda(const VALUE procval)
2505 {
2506 rb_proc_t *proc;
2507
2508 if (procval) {
2509 GetProcPtr(procval, proc);
2510 return proc->is_lambda;
2511 }
2512 else {
2513 return 0;
2514 }
2515 }
2516
2517 static VALUE
vm_yield_with_cfunc(rb_execution_context_t * ec,const struct rb_captured_block * captured,VALUE self,int argc,const VALUE * argv,VALUE block_handler,const rb_callable_method_entry_t * me)2518 vm_yield_with_cfunc(rb_execution_context_t *ec,
2519 const struct rb_captured_block *captured,
2520 VALUE self, int argc, const VALUE *argv, VALUE block_handler,
2521 const rb_callable_method_entry_t *me)
2522 {
2523 int is_lambda = FALSE; /* TODO */
2524 VALUE val, arg, blockarg;
2525 const struct vm_ifunc *ifunc = captured->code.ifunc;
2526
2527 if (is_lambda) {
2528 arg = rb_ary_new4(argc, argv);
2529 }
2530 else if (argc == 0) {
2531 arg = Qnil;
2532 }
2533 else {
2534 arg = argv[0];
2535 }
2536
2537 blockarg = rb_vm_bh_to_procval(ec, block_handler);
2538
2539 vm_push_frame(ec, (const rb_iseq_t *)captured->code.ifunc,
2540 VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME |
2541 (me ? VM_FRAME_FLAG_BMETHOD : 0),
2542 self,
2543 VM_GUARDED_PREV_EP(captured->ep),
2544 (VALUE)me,
2545 0, ec->cfp->sp, 0, 0);
2546 val = (*ifunc->func)(arg, ifunc->data, argc, argv, blockarg);
2547 rb_vm_pop_frame(ec);
2548
2549 return val;
2550 }
2551
2552 static VALUE
vm_yield_with_symbol(rb_execution_context_t * ec,VALUE symbol,int argc,const VALUE * argv,VALUE block_handler)2553 vm_yield_with_symbol(rb_execution_context_t *ec, VALUE symbol, int argc, const VALUE *argv, VALUE block_handler)
2554 {
2555 return rb_sym_proc_call(SYM2ID(symbol), argc, argv, rb_vm_bh_to_procval(ec, block_handler));
2556 }
2557
2558 static inline int
vm_callee_setup_block_arg_arg0_splat(rb_control_frame_t * cfp,const rb_iseq_t * iseq,VALUE * argv,VALUE ary)2559 vm_callee_setup_block_arg_arg0_splat(rb_control_frame_t *cfp, const rb_iseq_t *iseq, VALUE *argv, VALUE ary)
2560 {
2561 int i;
2562 long len = RARRAY_LEN(ary);
2563
2564 CHECK_VM_STACK_OVERFLOW(cfp, iseq->body->param.lead_num);
2565
2566 for (i=0; i<len && i<iseq->body->param.lead_num; i++) {
2567 argv[i] = RARRAY_AREF(ary, i);
2568 }
2569
2570 return i;
2571 }
2572
2573 static inline VALUE
vm_callee_setup_block_arg_arg0_check(VALUE * argv)2574 vm_callee_setup_block_arg_arg0_check(VALUE *argv)
2575 {
2576 VALUE ary, arg0 = argv[0];
2577 ary = rb_check_array_type(arg0);
2578 #if 0
2579 argv[0] = arg0;
2580 #else
2581 VM_ASSERT(argv[0] == arg0);
2582 #endif
2583 return ary;
2584 }
2585
2586 static int
vm_callee_setup_block_arg(rb_execution_context_t * ec,struct rb_calling_info * calling,const struct rb_call_info * ci,const rb_iseq_t * iseq,VALUE * argv,const enum arg_setup_type arg_setup_type)2587 vm_callee_setup_block_arg(rb_execution_context_t *ec, struct rb_calling_info *calling, const struct rb_call_info *ci, const rb_iseq_t *iseq, VALUE *argv, const enum arg_setup_type arg_setup_type)
2588 {
2589 if (rb_simple_iseq_p(iseq)) {
2590 rb_control_frame_t *cfp = ec->cfp;
2591 VALUE arg0;
2592
2593 CALLER_SETUP_ARG(cfp, calling, ci); /* splat arg */
2594
2595 if (arg_setup_type == arg_setup_block &&
2596 calling->argc == 1 &&
2597 iseq->body->param.flags.has_lead &&
2598 !iseq->body->param.flags.ambiguous_param0 &&
2599 !NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
2600 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
2601 }
2602
2603 if (calling->argc != iseq->body->param.lead_num) {
2604 if (arg_setup_type == arg_setup_block) {
2605 if (calling->argc < iseq->body->param.lead_num) {
2606 int i;
2607 CHECK_VM_STACK_OVERFLOW(cfp, iseq->body->param.lead_num);
2608 for (i=calling->argc; i<iseq->body->param.lead_num; i++) argv[i] = Qnil;
2609 calling->argc = iseq->body->param.lead_num; /* fill rest parameters */
2610 }
2611 else if (calling->argc > iseq->body->param.lead_num) {
2612 calling->argc = iseq->body->param.lead_num; /* simply truncate arguments */
2613 }
2614 }
2615 else {
2616 argument_arity_error(ec, iseq, calling->argc, iseq->body->param.lead_num, iseq->body->param.lead_num);
2617 }
2618 }
2619
2620 return 0;
2621 }
2622 else {
2623 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
2624 }
2625 }
2626
2627 static int
vm_yield_setup_args(rb_execution_context_t * ec,const rb_iseq_t * iseq,const int argc,VALUE * argv,VALUE block_handler,enum arg_setup_type arg_setup_type)2628 vm_yield_setup_args(rb_execution_context_t *ec, const rb_iseq_t *iseq, const int argc, VALUE *argv, VALUE block_handler, enum arg_setup_type arg_setup_type)
2629 {
2630 struct rb_calling_info calling_entry, *calling;
2631 struct rb_call_info ci_entry, *ci;
2632
2633 calling = &calling_entry;
2634 calling->argc = argc;
2635 calling->block_handler = block_handler;
2636
2637 ci_entry.flag = 0;
2638 ci = &ci_entry;
2639
2640 return vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, arg_setup_type);
2641 }
2642
2643 /* ruby iseq -> ruby block */
2644
2645 static VALUE
vm_invoke_iseq_block(rb_execution_context_t * ec,rb_control_frame_t * reg_cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,int is_lambda,const struct rb_captured_block * captured)2646 vm_invoke_iseq_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
2647 struct rb_calling_info *calling, const struct rb_call_info *ci,
2648 int is_lambda, const struct rb_captured_block *captured)
2649 {
2650 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
2651 const int arg_size = iseq->body->param.size;
2652 VALUE * const rsp = GET_SP() - calling->argc;
2653 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, rsp, is_lambda ? arg_setup_method : arg_setup_block);
2654
2655 SET_SP(rsp);
2656
2657 vm_push_frame(ec, iseq,
2658 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
2659 captured->self,
2660 VM_GUARDED_PREV_EP(captured->ep), 0,
2661 iseq->body->iseq_encoded + opt_pc,
2662 rsp + arg_size,
2663 iseq->body->local_table_size - arg_size, iseq->body->stack_max);
2664
2665 return Qundef;
2666 }
2667
2668 static VALUE
vm_invoke_symbol_block(rb_execution_context_t * ec,rb_control_frame_t * reg_cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,VALUE symbol)2669 vm_invoke_symbol_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
2670 struct rb_calling_info *calling, const struct rb_call_info *ci,
2671 VALUE symbol)
2672 {
2673 VALUE val;
2674 int argc;
2675 CALLER_SETUP_ARG(ec->cfp, calling, ci);
2676 argc = calling->argc;
2677 val = vm_yield_with_symbol(ec, symbol, argc, STACK_ADDR_FROM_TOP(argc), calling->block_handler);
2678 POPN(argc);
2679 return val;
2680 }
2681
2682 static VALUE
vm_invoke_ifunc_block(rb_execution_context_t * ec,rb_control_frame_t * reg_cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,const struct rb_captured_block * captured)2683 vm_invoke_ifunc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
2684 struct rb_calling_info *calling, const struct rb_call_info *ci,
2685 const struct rb_captured_block *captured)
2686 {
2687 VALUE val;
2688 int argc;
2689 CALLER_SETUP_ARG(ec->cfp, calling, ci);
2690 argc = calling->argc;
2691 val = vm_yield_with_cfunc(ec, captured, captured->self, argc, STACK_ADDR_FROM_TOP(argc), calling->block_handler, NULL);
2692 POPN(argc); /* TODO: should put before C/yield? */
2693 return val;
2694 }
2695
2696 static VALUE
vm_proc_to_block_handler(VALUE procval)2697 vm_proc_to_block_handler(VALUE procval)
2698 {
2699 const struct rb_block *block = vm_proc_block(procval);
2700
2701 switch (vm_block_type(block)) {
2702 case block_type_iseq:
2703 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
2704 case block_type_ifunc:
2705 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
2706 case block_type_symbol:
2707 return VM_BH_FROM_SYMBOL(block->as.symbol);
2708 case block_type_proc:
2709 return VM_BH_FROM_PROC(block->as.proc);
2710 }
2711 VM_UNREACHABLE(vm_yield_with_proc);
2712 return Qundef;
2713 }
2714
2715 static inline VALUE
vm_invoke_block(rb_execution_context_t * ec,rb_control_frame_t * reg_cfp,struct rb_calling_info * calling,const struct rb_call_info * ci,VALUE block_handler)2716 vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
2717 struct rb_calling_info *calling, const struct rb_call_info *ci, VALUE block_handler)
2718 {
2719 int is_lambda = FALSE;
2720
2721 again:
2722 switch (vm_block_handler_type(block_handler)) {
2723 case block_handler_type_iseq:
2724 {
2725 const struct rb_captured_block *captured = VM_BH_TO_ISEQ_BLOCK(block_handler);
2726 return vm_invoke_iseq_block(ec, reg_cfp, calling, ci, is_lambda, captured);
2727 }
2728 case block_handler_type_ifunc:
2729 {
2730 const struct rb_captured_block *captured = VM_BH_TO_IFUNC_BLOCK(block_handler);
2731 return vm_invoke_ifunc_block(ec, reg_cfp, calling, ci, captured);
2732 }
2733 case block_handler_type_proc:
2734 is_lambda = block_proc_is_lambda(VM_BH_TO_PROC(block_handler));
2735 block_handler = vm_proc_to_block_handler(VM_BH_TO_PROC(block_handler));
2736 goto again;
2737 case block_handler_type_symbol:
2738 return vm_invoke_symbol_block(ec, reg_cfp, calling, ci, VM_BH_TO_SYMBOL(block_handler));
2739 }
2740 VM_UNREACHABLE(vm_invoke_block: unreachable);
2741 return Qnil;
2742 }
2743
2744 static VALUE
vm_make_proc_with_iseq(const rb_iseq_t * blockiseq)2745 vm_make_proc_with_iseq(const rb_iseq_t *blockiseq)
2746 {
2747 const rb_execution_context_t *ec = GET_EC();
2748 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
2749 struct rb_captured_block *captured;
2750
2751 if (cfp == 0) {
2752 rb_bug("vm_make_proc_with_iseq: unreachable");
2753 }
2754
2755 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
2756 captured->code.iseq = blockiseq;
2757
2758 return rb_vm_make_proc(ec, captured, rb_cProc);
2759 }
2760
2761 static VALUE
vm_once_exec(VALUE iseq)2762 vm_once_exec(VALUE iseq)
2763 {
2764 VALUE proc = vm_make_proc_with_iseq((rb_iseq_t *)iseq);
2765 return rb_proc_call_with_block(proc, 0, 0, Qnil);
2766 }
2767
2768 static VALUE
vm_once_clear(VALUE data)2769 vm_once_clear(VALUE data)
2770 {
2771 union iseq_inline_storage_entry *is = (union iseq_inline_storage_entry *)data;
2772 is->once.running_thread = NULL;
2773 return Qnil;
2774 }
2775
2776 rb_control_frame_t *
FUNC_FASTCALL(rb_vm_opt_struct_aref)2777 FUNC_FASTCALL(rb_vm_opt_struct_aref)(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp)
2778 {
2779 TOPN(0) = rb_struct_aref(GET_SELF(), TOPN(0));
2780 return reg_cfp;
2781 }
2782
2783 rb_control_frame_t *
FUNC_FASTCALL(rb_vm_opt_struct_aset)2784 FUNC_FASTCALL(rb_vm_opt_struct_aset)(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp)
2785 {
2786 rb_struct_aset(GET_SELF(), TOPN(0), TOPN(1));
2787 return reg_cfp;
2788 }
2789
2790 /* defined insn */
2791
2792 static enum defined_type
check_respond_to_missing(VALUE obj,VALUE v)2793 check_respond_to_missing(VALUE obj, VALUE v)
2794 {
2795 VALUE args[2];
2796 VALUE r;
2797
2798 args[0] = obj; args[1] = Qfalse;
2799 r = rb_check_funcall(v, idRespond_to_missing, 2, args);
2800 if (r != Qundef && RTEST(r)) {
2801 return DEFINED_METHOD;
2802 }
2803 else {
2804 return DEFINED_NOT_DEFINED;
2805 }
2806 }
2807
2808 static VALUE
vm_defined(rb_execution_context_t * ec,rb_control_frame_t * reg_cfp,rb_num_t op_type,VALUE obj,VALUE needstr,VALUE v)2809 vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type, VALUE obj, VALUE needstr, VALUE v)
2810 {
2811 VALUE klass;
2812 enum defined_type expr_type = DEFINED_NOT_DEFINED;
2813 enum defined_type type = (enum defined_type)op_type;
2814
2815 switch (type) {
2816 case DEFINED_IVAR:
2817 if (rb_ivar_defined(GET_SELF(), SYM2ID(obj))) {
2818 expr_type = DEFINED_IVAR;
2819 }
2820 break;
2821 case DEFINED_IVAR2:
2822 klass = vm_get_cbase(GET_EP());
2823 break;
2824 case DEFINED_GVAR:
2825 if (rb_gvar_defined(rb_global_entry(SYM2ID(obj)))) {
2826 expr_type = DEFINED_GVAR;
2827 }
2828 break;
2829 case DEFINED_CVAR: {
2830 const rb_cref_t *cref = rb_vm_get_cref(GET_EP());
2831 klass = vm_get_cvar_base(cref, GET_CFP());
2832 if (rb_cvar_defined(klass, SYM2ID(obj))) {
2833 expr_type = DEFINED_CVAR;
2834 }
2835 break;
2836 }
2837 case DEFINED_CONST:
2838 klass = v;
2839 if (vm_get_ev_const(ec, klass, SYM2ID(obj), 1)) {
2840 expr_type = DEFINED_CONST;
2841 }
2842 break;
2843 case DEFINED_FUNC:
2844 klass = CLASS_OF(v);
2845 if (rb_method_boundp(klass, SYM2ID(obj), 0)) {
2846 expr_type = DEFINED_METHOD;
2847 }
2848 else {
2849 expr_type = check_respond_to_missing(obj, v);
2850 }
2851 break;
2852 case DEFINED_METHOD:{
2853 VALUE klass = CLASS_OF(v);
2854 const rb_method_entry_t *me = rb_method_entry(klass, SYM2ID(obj));
2855
2856 if (me) {
2857 switch (METHOD_ENTRY_VISI(me)) {
2858 case METHOD_VISI_PRIVATE:
2859 break;
2860 case METHOD_VISI_PROTECTED:
2861 if (!rb_obj_is_kind_of(GET_SELF(), rb_class_real(me->defined_class))) {
2862 break;
2863 }
2864 case METHOD_VISI_PUBLIC:
2865 expr_type = DEFINED_METHOD;
2866 break;
2867 default:
2868 rb_bug("vm_defined: unreachable: %u", (unsigned int)METHOD_ENTRY_VISI(me));
2869 }
2870 }
2871 else {
2872 expr_type = check_respond_to_missing(obj, v);
2873 }
2874 break;
2875 }
2876 case DEFINED_YIELD:
2877 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
2878 expr_type = DEFINED_YIELD;
2879 }
2880 break;
2881 case DEFINED_ZSUPER:
2882 {
2883 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(GET_CFP());
2884
2885 if (me) {
2886 VALUE klass = vm_search_normal_superclass(me->defined_class);
2887 ID id = me->def->original_id;
2888
2889 if (rb_method_boundp(klass, id, 0)) {
2890 expr_type = DEFINED_ZSUPER;
2891 }
2892 }
2893 }
2894 break;
2895 case DEFINED_REF:{
2896 if (vm_getspecial(ec, GET_LEP(), Qfalse, FIX2INT(obj)) != Qnil) {
2897 expr_type = DEFINED_GVAR;
2898 }
2899 break;
2900 }
2901 default:
2902 rb_bug("unimplemented defined? type (VM)");
2903 break;
2904 }
2905
2906 if (expr_type != 0) {
2907 if (needstr != Qfalse) {
2908 return rb_iseq_defined_string(expr_type);
2909 }
2910 else {
2911 return Qtrue;
2912 }
2913 }
2914 else {
2915 return Qnil;
2916 }
2917 }
2918
2919 static const VALUE *
vm_get_ep(const VALUE * const reg_ep,rb_num_t lv)2920 vm_get_ep(const VALUE *const reg_ep, rb_num_t lv)
2921 {
2922 rb_num_t i;
2923 const VALUE *ep = reg_ep;
2924 for (i = 0; i < lv; i++) {
2925 ep = GET_PREV_EP(ep);
2926 }
2927 return ep;
2928 }
2929
2930 static VALUE
vm_get_special_object(const VALUE * const reg_ep,enum vm_special_object_type type)2931 vm_get_special_object(const VALUE *const reg_ep,
2932 enum vm_special_object_type type)
2933 {
2934 switch (type) {
2935 case VM_SPECIAL_OBJECT_VMCORE:
2936 return rb_mRubyVMFrozenCore;
2937 case VM_SPECIAL_OBJECT_CBASE:
2938 return vm_get_cbase(reg_ep);
2939 case VM_SPECIAL_OBJECT_CONST_BASE:
2940 return vm_get_const_base(reg_ep);
2941 default:
2942 rb_bug("putspecialobject insn: unknown value_type %d", type);
2943 }
2944 }
2945
2946 static void
vm_freezestring(VALUE str,VALUE debug)2947 vm_freezestring(VALUE str, VALUE debug)
2948 {
2949 if (!NIL_P(debug)) {
2950 rb_ivar_set(str, id_debug_created_info, debug);
2951 }
2952 rb_str_freeze(str);
2953 }
2954
2955 static VALUE
vm_concat_array(VALUE ary1,VALUE ary2st)2956 vm_concat_array(VALUE ary1, VALUE ary2st)
2957 {
2958 const VALUE ary2 = ary2st;
2959 VALUE tmp1 = rb_check_to_array(ary1);
2960 VALUE tmp2 = rb_check_to_array(ary2);
2961
2962 if (NIL_P(tmp1)) {
2963 tmp1 = rb_ary_new3(1, ary1);
2964 }
2965
2966 if (NIL_P(tmp2)) {
2967 tmp2 = rb_ary_new3(1, ary2);
2968 }
2969
2970 if (tmp1 == ary1) {
2971 tmp1 = rb_ary_dup(ary1);
2972 }
2973 return rb_ary_concat(tmp1, tmp2);
2974 }
2975
2976 static VALUE
vm_splat_array(VALUE flag,VALUE ary)2977 vm_splat_array(VALUE flag, VALUE ary)
2978 {
2979 VALUE tmp = rb_check_to_array(ary);
2980 if (NIL_P(tmp)) {
2981 return rb_ary_new3(1, ary);
2982 }
2983 else if (RTEST(flag)) {
2984 return rb_ary_dup(tmp);
2985 }
2986 else {
2987 return tmp;
2988 }
2989 }
2990
2991 static VALUE
vm_check_match(rb_execution_context_t * ec,VALUE target,VALUE pattern,rb_num_t flag)2992 vm_check_match(rb_execution_context_t *ec, VALUE target, VALUE pattern, rb_num_t flag)
2993 {
2994 enum vm_check_match_type type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
2995
2996 if (flag & VM_CHECKMATCH_ARRAY) {
2997 long i;
2998 const long n = RARRAY_LEN(pattern);
2999
3000 for (i = 0; i < n; i++) {
3001 VALUE v = RARRAY_AREF(pattern, i);
3002 VALUE c = check_match(ec, v, target, type);
3003
3004 if (RTEST(c)) {
3005 return c;
3006 }
3007 }
3008 return Qfalse;
3009 }
3010 else {
3011 return check_match(ec, pattern, target, type);
3012 }
3013 }
3014
3015 static VALUE
vm_check_keyword(lindex_t bits,lindex_t idx,const VALUE * ep)3016 vm_check_keyword(lindex_t bits, lindex_t idx, const VALUE *ep)
3017 {
3018 const VALUE kw_bits = *(ep - bits);
3019
3020 if (FIXNUM_P(kw_bits)) {
3021 unsigned int b = (unsigned int)FIX2ULONG(kw_bits);
3022 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
3023 return Qfalse;
3024 }
3025 else {
3026 VM_ASSERT(RB_TYPE_P(kw_bits, T_HASH));
3027 if (rb_hash_has_key(kw_bits, INT2FIX(idx))) return Qfalse;
3028 }
3029 return Qtrue;
3030 }
3031
3032 static void
vm_dtrace(rb_event_flag_t flag,rb_execution_context_t * ec)3033 vm_dtrace(rb_event_flag_t flag, rb_execution_context_t *ec)
3034 {
3035 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
3036 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
3037 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
3038 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
3039
3040 switch (flag) {
3041 case RUBY_EVENT_CALL:
3042 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
3043 return;
3044 case RUBY_EVENT_C_CALL:
3045 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
3046 return;
3047 case RUBY_EVENT_RETURN:
3048 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
3049 return;
3050 case RUBY_EVENT_C_RETURN:
3051 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
3052 return;
3053 }
3054 }
3055 }
3056
3057 static VALUE
vm_const_get_under(ID id,rb_num_t flags,VALUE cbase)3058 vm_const_get_under(ID id, rb_num_t flags, VALUE cbase)
3059 {
3060 VALUE ns;
3061
3062 if ((ns = vm_search_const_defined_class(cbase, id)) == 0) {
3063 return ns;
3064 }
3065 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
3066 return rb_public_const_get_at(ns, id);
3067 }
3068 else {
3069 return rb_const_get_at(ns, id);
3070 }
3071 }
3072
3073 static VALUE
vm_check_if_class(ID id,rb_num_t flags,VALUE super,VALUE klass)3074 vm_check_if_class(ID id, rb_num_t flags, VALUE super, VALUE klass)
3075 {
3076 if (!RB_TYPE_P(klass, T_CLASS)) {
3077 rb_raise(rb_eTypeError, "%"PRIsVALUE" is not a class", rb_id2str(id));
3078 }
3079 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
3080 VALUE tmp = rb_class_real(RCLASS_SUPER(klass));
3081
3082 if (tmp != super) {
3083 rb_raise(rb_eTypeError,
3084 "superclass mismatch for class %"PRIsVALUE"",
3085 rb_id2str(id));
3086 }
3087 else {
3088 return klass;
3089 }
3090 }
3091 else {
3092 return klass;
3093 }
3094 }
3095
3096 static VALUE
vm_check_if_module(ID id,VALUE mod)3097 vm_check_if_module(ID id, VALUE mod)
3098 {
3099 if (!RB_TYPE_P(mod, T_MODULE)) {
3100 rb_raise(rb_eTypeError, "%"PRIsVALUE" is not a module", rb_id2str(id));
3101 }
3102 else {
3103 return mod;
3104 }
3105 }
3106
3107 static VALUE
vm_declare_class(ID id,rb_num_t flags,VALUE cbase,VALUE super)3108 vm_declare_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
3109 {
3110 /* new class declaration */
3111 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
3112 VALUE c = rb_define_class_id(id, s);
3113
3114 rb_set_class_path_string(c, cbase, rb_id2str(id));
3115 rb_const_set(cbase, id, c);
3116 rb_class_inherited(s, c);
3117 return c;
3118 }
3119
3120 static VALUE
vm_declare_module(ID id,VALUE cbase)3121 vm_declare_module(ID id, VALUE cbase)
3122 {
3123 /* new module declaration */
3124 VALUE mod = rb_define_module_id(id);
3125 rb_set_class_path_string(mod, cbase, rb_id2str(id));
3126 rb_const_set(cbase, id, mod);
3127 return mod;
3128 }
3129
3130 static VALUE
vm_define_class(ID id,rb_num_t flags,VALUE cbase,VALUE super)3131 vm_define_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
3132 {
3133 VALUE klass;
3134
3135 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !RB_TYPE_P(super, T_CLASS)) {
3136 rb_raise(rb_eTypeError,
3137 "superclass must be a Class (%"PRIsVALUE" given)",
3138 rb_obj_class(super));
3139 }
3140
3141 vm_check_if_namespace(cbase);
3142
3143 /* find klass */
3144 rb_autoload_load(cbase, id);
3145 if ((klass = vm_const_get_under(id, flags, cbase)) != 0) {
3146 return vm_check_if_class(id, flags, super, klass);
3147 }
3148 else {
3149 return vm_declare_class(id, flags, cbase, super);
3150 }
3151 }
3152
3153 static VALUE
vm_define_module(ID id,rb_num_t flags,VALUE cbase)3154 vm_define_module(ID id, rb_num_t flags, VALUE cbase)
3155 {
3156 VALUE mod;
3157
3158 vm_check_if_namespace(cbase);
3159 if ((mod = vm_const_get_under(id, flags, cbase)) != 0) {
3160 return vm_check_if_module(id, mod);
3161 }
3162 else {
3163 return vm_declare_module(id, cbase);
3164 }
3165 }
3166
3167 static VALUE
vm_find_or_create_class_by_id(ID id,rb_num_t flags,VALUE cbase,VALUE super)3168 vm_find_or_create_class_by_id(ID id,
3169 rb_num_t flags,
3170 VALUE cbase,
3171 VALUE super)
3172 {
3173 rb_vm_defineclass_type_t type = VM_DEFINECLASS_TYPE(flags);
3174
3175 switch (type) {
3176 case VM_DEFINECLASS_TYPE_CLASS:
3177 /* classdef returns class scope value */
3178 return vm_define_class(id, flags, cbase, super);
3179
3180 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
3181 /* classdef returns class scope value */
3182 return rb_singleton_class(cbase);
3183
3184 case VM_DEFINECLASS_TYPE_MODULE:
3185 /* classdef returns class scope value */
3186 return vm_define_module(id, flags, cbase);
3187
3188 default:
3189 rb_bug("unknown defineclass type: %d", (int)type);
3190 }
3191 }
3192
3193 static VALUE
vm_opt_str_freeze(VALUE str,int bop,ID id)3194 vm_opt_str_freeze(VALUE str, int bop, ID id)
3195 {
3196 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
3197 return str;
3198 }
3199 else {
3200 return Qundef;
3201 }
3202 }
3203
3204 /* this macro is mandatory to use OPTIMIZED_CMP. What a design! */
3205 #define id_cmp idCmp
3206
3207 static VALUE
vm_opt_newarray_max(rb_num_t num,const VALUE * ptr)3208 vm_opt_newarray_max(rb_num_t num, const VALUE *ptr)
3209 {
3210 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
3211 if (num == 0) {
3212 return Qnil;
3213 }
3214 else {
3215 struct cmp_opt_data cmp_opt = { 0, 0 };
3216 VALUE result = *ptr;
3217 rb_snum_t i = num - 1;
3218 while (i-- > 0) {
3219 const VALUE v = *++ptr;
3220 if (OPTIMIZED_CMP(v, result, cmp_opt) > 0) {
3221 result = v;
3222 }
3223 }
3224 return result;
3225 }
3226 }
3227 else {
3228 VALUE ary = rb_ary_new4(num, ptr);
3229 return rb_funcall(ary, idMax, 0);
3230 }
3231 }
3232
3233 static VALUE
vm_opt_newarray_min(rb_num_t num,const VALUE * ptr)3234 vm_opt_newarray_min(rb_num_t num, const VALUE *ptr)
3235 {
3236 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
3237 if (num == 0) {
3238 return Qnil;
3239 }
3240 else {
3241 struct cmp_opt_data cmp_opt = { 0, 0 };
3242 VALUE result = *ptr;
3243 rb_snum_t i = num - 1;
3244 while (i-- > 0) {
3245 const VALUE v = *++ptr;
3246 if (OPTIMIZED_CMP(v, result, cmp_opt) < 0) {
3247 result = v;
3248 }
3249 }
3250 return result;
3251 }
3252 }
3253 else {
3254 VALUE ary = rb_ary_new4(num, ptr);
3255 return rb_funcall(ary, idMin, 0);
3256 }
3257 }
3258
3259 #undef id_cmp
3260
3261 static int
vm_ic_hit_p(IC ic,const VALUE * reg_ep)3262 vm_ic_hit_p(IC ic, const VALUE *reg_ep)
3263 {
3264 if (ic->ic_serial == GET_GLOBAL_CONSTANT_STATE()) {
3265 return (ic->ic_cref == NULL || ic->ic_cref == rb_vm_get_cref(reg_ep));
3266 }
3267 return FALSE;
3268 }
3269
3270 static void
vm_ic_update(IC ic,VALUE val,const VALUE * reg_ep)3271 vm_ic_update(IC ic, VALUE val, const VALUE *reg_ep)
3272 {
3273 VM_ASSERT(ic->ic_value.value != Qundef);
3274 ic->ic_value.value = val;
3275 ic->ic_serial = GET_GLOBAL_CONSTANT_STATE() - ruby_vm_const_missing_count;
3276 ic->ic_cref = vm_get_const_key_cref(reg_ep);
3277 ruby_vm_const_missing_count = 0;
3278 }
3279
3280 static VALUE
vm_once_dispatch(rb_execution_context_t * ec,ISEQ iseq,ISE is)3281 vm_once_dispatch(rb_execution_context_t *ec, ISEQ iseq, ISE is)
3282 {
3283 rb_thread_t *th = rb_ec_thread_ptr(ec);
3284 rb_thread_t *const RUNNING_THREAD_ONCE_DONE = (rb_thread_t *)(0x1);
3285
3286 again:
3287 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
3288 return is->once.value;
3289 }
3290 else if (is->once.running_thread == NULL) {
3291 VALUE val;
3292 is->once.running_thread = th;
3293 val = rb_ensure(vm_once_exec, (VALUE)iseq, vm_once_clear, (VALUE)is);
3294 RB_OBJ_WRITE(ec->cfp->iseq, &is->once.value, val);
3295 /* is->once.running_thread is cleared by vm_once_clear() */
3296 is->once.running_thread = RUNNING_THREAD_ONCE_DONE; /* success */
3297 return val;
3298 }
3299 else if (is->once.running_thread == th) {
3300 /* recursive once */
3301 return vm_once_exec((VALUE)iseq);
3302 }
3303 else {
3304 /* waiting for finish */
3305 RUBY_VM_CHECK_INTS(ec);
3306 rb_thread_schedule();
3307 goto again;
3308 }
3309 }
3310
3311 static OFFSET
vm_case_dispatch(CDHASH hash,OFFSET else_offset,VALUE key)3312 vm_case_dispatch(CDHASH hash, OFFSET else_offset, VALUE key)
3313 {
3314 switch (OBJ_BUILTIN_TYPE(key)) {
3315 case -1:
3316 case T_FLOAT:
3317 case T_SYMBOL:
3318 case T_BIGNUM:
3319 case T_STRING:
3320 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
3321 SYMBOL_REDEFINED_OP_FLAG |
3322 INTEGER_REDEFINED_OP_FLAG |
3323 FLOAT_REDEFINED_OP_FLAG |
3324 NIL_REDEFINED_OP_FLAG |
3325 TRUE_REDEFINED_OP_FLAG |
3326 FALSE_REDEFINED_OP_FLAG |
3327 STRING_REDEFINED_OP_FLAG)) {
3328 st_data_t val;
3329 if (RB_FLOAT_TYPE_P(key)) {
3330 double kval = RFLOAT_VALUE(key);
3331 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
3332 key = FIXABLE(kval) ? LONG2FIX((long)kval) : rb_dbl2big(kval);
3333 }
3334 }
3335 if (rb_hash_stlike_lookup(hash, key, &val)) {
3336 return FIX2LONG((VALUE)val);
3337 }
3338 else {
3339 return else_offset;
3340 }
3341 }
3342 }
3343 return 0;
3344 }
3345
3346 NORETURN(static void
3347 vm_stack_consistency_error(const rb_execution_context_t *ec,
3348 const rb_control_frame_t *,
3349 const VALUE *));
3350 static void
vm_stack_consistency_error(const rb_execution_context_t * ec,const rb_control_frame_t * cfp,const VALUE * bp)3351 vm_stack_consistency_error(const rb_execution_context_t *ec,
3352 const rb_control_frame_t *cfp,
3353 const VALUE *bp)
3354 {
3355 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
3356 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
3357 static const char stack_consistency_error[] =
3358 "Stack consistency error (sp: %"PRIdPTRDIFF", bp: %"PRIdPTRDIFF")";
3359 #if defined RUBY_DEVEL
3360 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
3361 rb_str_cat_cstr(mesg, "\n");
3362 rb_str_append(mesg, rb_iseq_disasm(cfp->iseq));
3363 rb_exc_fatal(rb_exc_new3(rb_eFatal, mesg));
3364 #else
3365 rb_bug(stack_consistency_error, nsp, nbp);
3366 #endif
3367 }
3368
3369 static VALUE
vm_opt_plus(VALUE recv,VALUE obj)3370 vm_opt_plus(VALUE recv, VALUE obj)
3371 {
3372 if (FIXNUM_2_P(recv, obj) &&
3373 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
3374 return rb_fix_plus_fix(recv, obj);
3375 }
3376 else if (FLONUM_2_P(recv, obj) &&
3377 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
3378 return DBL2NUM(RFLOAT_VALUE(recv) + RFLOAT_VALUE(obj));
3379 }
3380 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
3381 return Qundef;
3382 }
3383 else if (RBASIC_CLASS(recv) == rb_cFloat &&
3384 RBASIC_CLASS(obj) == rb_cFloat &&
3385 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
3386 return DBL2NUM(RFLOAT_VALUE(recv) + RFLOAT_VALUE(obj));
3387 }
3388 else if (RBASIC_CLASS(recv) == rb_cString &&
3389 RBASIC_CLASS(obj) == rb_cString &&
3390 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
3391 return rb_str_plus(recv, obj);
3392 }
3393 else if (RBASIC_CLASS(recv) == rb_cArray &&
3394 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
3395 return rb_ary_plus(recv, obj);
3396 }
3397 else {
3398 return Qundef;
3399 }
3400 }
3401
3402 static VALUE
vm_opt_minus(VALUE recv,VALUE obj)3403 vm_opt_minus(VALUE recv, VALUE obj)
3404 {
3405 if (FIXNUM_2_P(recv, obj) &&
3406 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
3407 return rb_fix_minus_fix(recv, obj);
3408 }
3409 else if (FLONUM_2_P(recv, obj) &&
3410 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
3411 return DBL2NUM(RFLOAT_VALUE(recv) - RFLOAT_VALUE(obj));
3412 }
3413 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
3414 return Qundef;
3415 }
3416 else if (RBASIC_CLASS(recv) == rb_cFloat &&
3417 RBASIC_CLASS(obj) == rb_cFloat &&
3418 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
3419 return DBL2NUM(RFLOAT_VALUE(recv) - RFLOAT_VALUE(obj));
3420 }
3421 else {
3422 return Qundef;
3423 }
3424 }
3425
3426 static VALUE
vm_opt_mult(VALUE recv,VALUE obj)3427 vm_opt_mult(VALUE recv, VALUE obj)
3428 {
3429 if (FIXNUM_2_P(recv, obj) &&
3430 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
3431 return rb_fix_mul_fix(recv, obj);
3432 }
3433 else if (FLONUM_2_P(recv, obj) &&
3434 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
3435 return DBL2NUM(RFLOAT_VALUE(recv) * RFLOAT_VALUE(obj));
3436 }
3437 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
3438 return Qundef;
3439 }
3440 else if (RBASIC_CLASS(recv) == rb_cFloat &&
3441 RBASIC_CLASS(obj) == rb_cFloat &&
3442 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
3443 return DBL2NUM(RFLOAT_VALUE(recv) * RFLOAT_VALUE(obj));
3444 }
3445 else {
3446 return Qundef;
3447 }
3448 }
3449
3450 static VALUE
vm_opt_div(VALUE recv,VALUE obj)3451 vm_opt_div(VALUE recv, VALUE obj)
3452 {
3453 if (FIXNUM_2_P(recv, obj) &&
3454 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
3455 return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_div_fix(recv, obj);
3456 }
3457 else if (FLONUM_2_P(recv, obj) &&
3458 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
3459 return rb_flo_div_flo(recv, obj);
3460 }
3461 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
3462 return Qundef;
3463 }
3464 else if (RBASIC_CLASS(recv) == rb_cFloat &&
3465 RBASIC_CLASS(obj) == rb_cFloat &&
3466 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
3467 return rb_flo_div_flo(recv, obj);
3468 }
3469 else {
3470 return Qundef;
3471 }
3472 }
3473
3474 static VALUE
vm_opt_mod(VALUE recv,VALUE obj)3475 vm_opt_mod(VALUE recv, VALUE obj)
3476 {
3477 if (FIXNUM_2_P(recv, obj) &&
3478 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
3479 return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_mod_fix(recv, obj);
3480 }
3481 else if (FLONUM_2_P(recv, obj) &&
3482 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
3483 return DBL2NUM(ruby_float_mod(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj)));
3484 }
3485 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
3486 return Qundef;
3487 }
3488 else if (RBASIC_CLASS(recv) == rb_cFloat &&
3489 RBASIC_CLASS(obj) == rb_cFloat &&
3490 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
3491 return DBL2NUM(ruby_float_mod(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj)));
3492 }
3493 else {
3494 return Qundef;
3495 }
3496 }
3497
3498 static VALUE
vm_opt_neq(CALL_INFO ci,CALL_CACHE cc,CALL_INFO ci_eq,CALL_CACHE cc_eq,VALUE recv,VALUE obj)3499 vm_opt_neq(CALL_INFO ci, CALL_CACHE cc,
3500 CALL_INFO ci_eq, CALL_CACHE cc_eq,
3501 VALUE recv, VALUE obj)
3502 {
3503 if (vm_method_cfunc_is(ci, cc, recv, rb_obj_not_equal)) {
3504 VALUE val = opt_eq_func(recv, obj, ci_eq, cc_eq);
3505
3506 if (val != Qundef) {
3507 return RTEST(val) ? Qfalse : Qtrue;
3508 }
3509 }
3510
3511 return Qundef;
3512 }
3513
3514 static VALUE
vm_opt_lt(VALUE recv,VALUE obj)3515 vm_opt_lt(VALUE recv, VALUE obj)
3516 {
3517 if (FIXNUM_2_P(recv, obj) &&
3518 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
3519 return (SIGNED_VALUE)recv < (SIGNED_VALUE)obj ? Qtrue : Qfalse;
3520 }
3521 else if (FLONUM_2_P(recv, obj) &&
3522 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
3523 return RFLOAT_VALUE(recv) < RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
3524 }
3525 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
3526 return Qundef;
3527 }
3528 else if (RBASIC_CLASS(recv) == rb_cFloat &&
3529 RBASIC_CLASS(obj) == rb_cFloat &&
3530 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
3531 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
3532 return RFLOAT_VALUE(recv) < RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
3533 }
3534 else {
3535 return Qundef;
3536 }
3537 }
3538
3539 static VALUE
vm_opt_le(VALUE recv,VALUE obj)3540 vm_opt_le(VALUE recv, VALUE obj)
3541 {
3542 if (FIXNUM_2_P(recv, obj) &&
3543 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
3544 return (SIGNED_VALUE)recv <= (SIGNED_VALUE)obj ? Qtrue : Qfalse;
3545 }
3546 else if (FLONUM_2_P(recv, obj) &&
3547 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
3548 return RFLOAT_VALUE(recv) <= RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
3549 }
3550 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
3551 return Qundef;
3552 }
3553 else if (RBASIC_CLASS(recv) == rb_cFloat &&
3554 RBASIC_CLASS(obj) == rb_cFloat &&
3555 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
3556 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
3557 return RFLOAT_VALUE(recv) <= RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
3558 }
3559 else {
3560 return Qundef;
3561 }
3562 }
3563
3564 static VALUE
vm_opt_gt(VALUE recv,VALUE obj)3565 vm_opt_gt(VALUE recv, VALUE obj)
3566 {
3567 if (FIXNUM_2_P(recv, obj) &&
3568 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
3569 return (SIGNED_VALUE)recv > (SIGNED_VALUE)obj ? Qtrue : Qfalse;
3570 }
3571 else if (FLONUM_2_P(recv, obj) &&
3572 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
3573 return RFLOAT_VALUE(recv) > RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
3574 }
3575 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
3576 return Qundef;
3577 }
3578 else if (RBASIC_CLASS(recv) == rb_cFloat &&
3579 RBASIC_CLASS(obj) == rb_cFloat &&
3580 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
3581 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
3582 return RFLOAT_VALUE(recv) > RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
3583 }
3584 else {
3585 return Qundef;
3586 }
3587 }
3588
3589 static VALUE
vm_opt_ge(VALUE recv,VALUE obj)3590 vm_opt_ge(VALUE recv, VALUE obj)
3591 {
3592 if (FIXNUM_2_P(recv, obj) &&
3593 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
3594 return (SIGNED_VALUE)recv >= (SIGNED_VALUE)obj ? Qtrue : Qfalse;
3595 }
3596 else if (FLONUM_2_P(recv, obj) &&
3597 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
3598 return RFLOAT_VALUE(recv) >= RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
3599 }
3600 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
3601 return Qundef;
3602 }
3603 else if (RBASIC_CLASS(recv) == rb_cFloat &&
3604 RBASIC_CLASS(obj) == rb_cFloat &&
3605 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
3606 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
3607 return RFLOAT_VALUE(recv) >= RFLOAT_VALUE(obj) ? Qtrue : Qfalse;
3608 }
3609 else {
3610 return Qundef;
3611 }
3612 }
3613
3614
3615 static VALUE
vm_opt_ltlt(VALUE recv,VALUE obj)3616 vm_opt_ltlt(VALUE recv, VALUE obj)
3617 {
3618 if (SPECIAL_CONST_P(recv)) {
3619 return Qundef;
3620 }
3621 else if (RBASIC_CLASS(recv) == rb_cString &&
3622 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
3623 return rb_str_concat(recv, obj);
3624 }
3625 else if (RBASIC_CLASS(recv) == rb_cArray &&
3626 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
3627 return rb_ary_push(recv, obj);
3628 }
3629 else {
3630 return Qundef;
3631 }
3632 }
3633
3634 static VALUE
vm_opt_and(VALUE recv,VALUE obj)3635 vm_opt_and(VALUE recv, VALUE obj)
3636 {
3637 if (FIXNUM_2_P(recv, obj) &&
3638 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
3639 return LONG2NUM(FIX2LONG(recv) & FIX2LONG(obj));
3640 }
3641 else {
3642 return Qundef;
3643 }
3644 }
3645
3646 static VALUE
vm_opt_or(VALUE recv,VALUE obj)3647 vm_opt_or(VALUE recv, VALUE obj)
3648 {
3649 if (FIXNUM_2_P(recv, obj) &&
3650 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
3651 return LONG2NUM(FIX2LONG(recv) | FIX2LONG(obj));
3652 }
3653 else {
3654 return Qundef;
3655 }
3656 }
3657
3658 static VALUE
vm_opt_aref(VALUE recv,VALUE obj)3659 vm_opt_aref(VALUE recv, VALUE obj)
3660 {
3661 if (SPECIAL_CONST_P(recv)) {
3662 return Qundef;
3663 }
3664 else if (RBASIC_CLASS(recv) == rb_cArray &&
3665 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
3666 if (FIXNUM_P(obj)) {
3667 return rb_ary_entry_internal(recv, FIX2LONG(obj));
3668 }
3669 else {
3670 return rb_ary_aref1(recv, obj);
3671 }
3672 }
3673 else if (RBASIC_CLASS(recv) == rb_cHash &&
3674 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
3675 return rb_hash_aref(recv, obj);
3676 }
3677 else {
3678 return Qundef;
3679 }
3680 }
3681
3682 static VALUE
vm_opt_aset(VALUE recv,VALUE obj,VALUE set)3683 vm_opt_aset(VALUE recv, VALUE obj, VALUE set)
3684 {
3685 if (SPECIAL_CONST_P(recv)) {
3686 return Qundef;
3687 }
3688 else if (RBASIC_CLASS(recv) == rb_cArray &&
3689 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
3690 FIXNUM_P(obj)) {
3691 rb_ary_store(recv, FIX2LONG(obj), set);
3692 return set;
3693 }
3694 else if (RBASIC_CLASS(recv) == rb_cHash &&
3695 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
3696 rb_hash_aset(recv, obj, set);
3697 return set;
3698 }
3699 else {
3700 return Qundef;
3701 }
3702 }
3703
3704 static VALUE
vm_opt_aref_with(VALUE recv,VALUE key)3705 vm_opt_aref_with(VALUE recv, VALUE key)
3706 {
3707 if (!SPECIAL_CONST_P(recv) && RBASIC_CLASS(recv) == rb_cHash &&
3708 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
3709 rb_hash_compare_by_id_p(recv) == Qfalse) {
3710 return rb_hash_aref(recv, key);
3711 }
3712 else {
3713 return Qundef;
3714 }
3715 }
3716
3717 static VALUE
vm_opt_aset_with(VALUE recv,VALUE key,VALUE val)3718 vm_opt_aset_with(VALUE recv, VALUE key, VALUE val)
3719 {
3720 if (!SPECIAL_CONST_P(recv) && RBASIC_CLASS(recv) == rb_cHash &&
3721 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
3722 rb_hash_compare_by_id_p(recv) == Qfalse) {
3723 return rb_hash_aset(recv, key, val);
3724 }
3725 else {
3726 return Qundef;
3727 }
3728 }
3729
3730 static VALUE
vm_opt_length(VALUE recv,int bop)3731 vm_opt_length(VALUE recv, int bop)
3732 {
3733 if (SPECIAL_CONST_P(recv)) {
3734 return Qundef;
3735 }
3736 else if (RBASIC_CLASS(recv) == rb_cString &&
3737 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
3738 if (bop == BOP_EMPTY_P) {
3739 return LONG2NUM(RSTRING_LEN(recv));
3740 }
3741 else {
3742 return rb_str_length(recv);
3743 }
3744 }
3745 else if (RBASIC_CLASS(recv) == rb_cArray &&
3746 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
3747 return LONG2NUM(RARRAY_LEN(recv));
3748 }
3749 else if (RBASIC_CLASS(recv) == rb_cHash &&
3750 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
3751 return INT2FIX(RHASH_SIZE(recv));
3752 }
3753 else {
3754 return Qundef;
3755 }
3756 }
3757
3758 static VALUE
vm_opt_empty_p(VALUE recv)3759 vm_opt_empty_p(VALUE recv)
3760 {
3761 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
3762 case Qundef: return Qundef;
3763 case INT2FIX(0): return Qtrue;
3764 default: return Qfalse;
3765 }
3766 }
3767
3768 static VALUE
fix_succ(VALUE x)3769 fix_succ(VALUE x)
3770 {
3771 switch (x) {
3772 case ~0UL:
3773 /* 0xFFFF_FFFF == INT2FIX(-1)
3774 * `-1.succ` is of course 0. */
3775 return INT2FIX(0);
3776 case RSHIFT(~0UL, 1):
3777 /* 0x7FFF_FFFF == LONG2FIX(0x3FFF_FFFF)
3778 * 0x3FFF_FFFF + 1 == 0x4000_0000, which is a Bignum. */
3779 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
3780 default:
3781 /* LONG2FIX(FIX2LONG(x)+FIX2LONG(y))
3782 * == ((lx*2+1)/2 + (ly*2+1)/2)*2+1
3783 * == lx*2 + ly*2 + 1
3784 * == (lx*2+1) + (ly*2+1) - 1
3785 * == x + y - 1
3786 *
3787 * Here, if we put y := INT2FIX(1):
3788 *
3789 * == x + INT2FIX(1) - 1
3790 * == x + 2 .
3791 */
3792 return x + 2;
3793 }
3794 }
3795
3796 static VALUE
vm_opt_succ(VALUE recv)3797 vm_opt_succ(VALUE recv)
3798 {
3799 if (FIXNUM_P(recv) &&
3800 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
3801 return fix_succ(recv);
3802 }
3803 else if (SPECIAL_CONST_P(recv)) {
3804 return Qundef;
3805 }
3806 else if (RBASIC_CLASS(recv) == rb_cString &&
3807 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
3808 return rb_str_succ(recv);
3809 }
3810 else {
3811 return Qundef;
3812 }
3813 }
3814
3815 static VALUE
vm_opt_not(CALL_INFO ci,CALL_CACHE cc,VALUE recv)3816 vm_opt_not(CALL_INFO ci, CALL_CACHE cc, VALUE recv)
3817 {
3818 if (vm_method_cfunc_is(ci, cc, recv, rb_obj_not)) {
3819 return RTEST(recv) ? Qfalse : Qtrue;
3820 }
3821 else {
3822 return Qundef;
3823 }
3824 }
3825
3826 static VALUE
vm_opt_regexpmatch1(VALUE recv,VALUE obj)3827 vm_opt_regexpmatch1(VALUE recv, VALUE obj)
3828 {
3829 if (BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
3830 return rb_reg_match(recv, obj);
3831 }
3832 else {
3833 return rb_funcall(recv, idEqTilde, 1, obj);
3834 }
3835 }
3836
3837 static VALUE
vm_opt_regexpmatch2(VALUE recv,VALUE obj)3838 vm_opt_regexpmatch2(VALUE recv, VALUE obj)
3839 {
3840 if (CLASS_OF(recv) == rb_cString &&
3841 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
3842 return rb_reg_match(obj, recv);
3843 }
3844 else {
3845 return Qundef;
3846 }
3847 }
3848
3849 rb_event_flag_t rb_iseq_event_flags(const rb_iseq_t *iseq, size_t pos);
3850
3851 NOINLINE(static void vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const VALUE *pc));
3852
3853 static inline void
vm_trace_hook(rb_execution_context_t * ec,rb_control_frame_t * reg_cfp,const VALUE * pc,rb_event_flag_t pc_events,rb_event_flag_t target_event,rb_hook_list_t * global_hooks,rb_hook_list_t * local_hooks,VALUE val)3854 vm_trace_hook(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const VALUE *pc,
3855 rb_event_flag_t pc_events, rb_event_flag_t target_event,
3856 rb_hook_list_t *global_hooks, rb_hook_list_t *local_hooks, VALUE val)
3857 {
3858 rb_event_flag_t event = pc_events & target_event;
3859 VALUE self = GET_SELF();
3860
3861 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
3862
3863 if (event & global_hooks->events) {
3864 /* increment PC because source line is calculated with PC-1 */
3865 reg_cfp->pc++;
3866 vm_dtrace(event, ec);
3867 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
3868 reg_cfp->pc--;
3869 }
3870
3871 if (local_hooks != NULL) {
3872 if (event & local_hooks->events) {
3873 /* increment PC because source line is calculated with PC-1 */
3874 reg_cfp->pc++;
3875 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
3876 reg_cfp->pc--;
3877 }
3878 }
3879 }
3880
3881 #define VM_TRACE_HOOK(target_event, val) do { \
3882 if ((pc_events & (target_event)) & enabled_flags) { \
3883 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks, (val)); \
3884 } \
3885 } while (0)
3886
3887 static void
vm_trace(rb_execution_context_t * ec,rb_control_frame_t * reg_cfp,const VALUE * pc)3888 vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const VALUE *pc)
3889 {
3890 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
3891
3892 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
3893 return;
3894 }
3895 else {
3896 const rb_iseq_t *iseq = reg_cfp->iseq;
3897 size_t pos = pc - iseq->body->iseq_encoded;
3898 rb_event_flag_t pc_events = rb_iseq_event_flags(iseq, pos);
3899 rb_hook_list_t *local_hooks = iseq->aux.exec.local_hooks;
3900 rb_event_flag_t local_hook_events = local_hooks != NULL ? local_hooks->events : 0;
3901 enabled_flags |= local_hook_events;
3902
3903 VM_ASSERT((local_hook_events & ~ISEQ_TRACE_EVENTS) == 0);
3904
3905 if ((pc_events & enabled_flags) == 0) {
3906 #if 0
3907 /* disable trace */
3908 /* TODO: incomplete */
3909 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
3910 #else
3911 /* do not disable trace because of performance problem
3912 * (re-enable overhead)
3913 */
3914 #endif
3915 return;
3916 }
3917 else if (ec->trace_arg != NULL) {
3918 /* already tracing */
3919 return;
3920 }
3921 else {
3922 rb_hook_list_t *global_hooks = rb_vm_global_hooks(ec);
3923
3924 if (0) {
3925 fprintf(stderr, "vm_trace>>%4d (%4x) - %s:%d %s\n",
3926 (int)pos,
3927 (int)pc_events,
3928 RSTRING_PTR(rb_iseq_path(iseq)),
3929 (int)rb_iseq_line_no(iseq, pos),
3930 RSTRING_PTR(rb_iseq_label(iseq)));
3931 }
3932 VM_ASSERT(reg_cfp->pc == pc);
3933 VM_ASSERT(pc_events != 0);
3934 VM_ASSERT(enabled_flags & pc_events);
3935
3936 /* check traces */
3937 VM_TRACE_HOOK(RUBY_EVENT_CLASS | RUBY_EVENT_CALL | RUBY_EVENT_B_CALL, Qundef);
3938 VM_TRACE_HOOK(RUBY_EVENT_LINE, Qundef);
3939 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE, Qundef);
3940 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH, Qundef);
3941 VM_TRACE_HOOK(RUBY_EVENT_END | RUBY_EVENT_RETURN | RUBY_EVENT_B_RETURN, TOPN(0));
3942 }
3943 }
3944 }
3945
3946 #if VM_CHECK_MODE > 0
3947 static NORETURN( NOINLINE( COLDFUNC
3948 void vm_canary_is_found_dead(enum ruby_vminsn_type i, VALUE c)));
3949 static VALUE vm_stack_canary;
3950
3951 void
Init_vm_stack_canary(void)3952 Init_vm_stack_canary(void)
3953 {
3954 /* This has to be called _after_ our PRNG is properly set up. */
3955 int n = ruby_fill_random_bytes(&vm_stack_canary, sizeof vm_stack_canary, false);
3956
3957 VM_ASSERT(n == 0);
3958 }
3959
3960 static void
vm_canary_is_found_dead(enum ruby_vminsn_type i,VALUE c)3961 vm_canary_is_found_dead(enum ruby_vminsn_type i, VALUE c)
3962 {
3963 /* Because a method has already been called, why not call
3964 * another one. */
3965 const char *insn = rb_insns_name(i);
3966 VALUE inspection = rb_inspect(c);
3967 const char *str = StringValueCStr(inspection);
3968
3969 rb_bug("dead canary found at %s: %s", insn, str);
3970 }
3971
3972 #else
Init_vm_stack_canary(void)3973 void Init_vm_stack_canary(void) { /* nothing to do */ }
3974 #endif
3975