12#include "ruby/internal/config.h"
22#include "internal/array.h"
23#include "internal/compile.h"
24#include "internal/complex.h"
25#include "internal/encoding.h"
26#include "internal/error.h"
27#include "internal/gc.h"
28#include "internal/hash.h"
29#include "internal/io.h"
30#include "internal/numeric.h"
31#include "internal/object.h"
32#include "internal/rational.h"
33#include "internal/re.h"
34#include "internal/ruby_parser.h"
35#include "internal/symbol.h"
36#include "internal/thread.h"
37#include "internal/variable.h"
43#include "vm_callinfo.h"
49#include "insns_info.inc"
51#define FIXNUM_INC(n, i) ((n)+(INT2FIX(i)&~FIXNUM_FLAG))
85 unsigned int rescued: 2;
86 unsigned int unremovable: 1;
91 enum ruby_vminsn_type insn_id;
121 const void *ensure_node;
126const ID rb_iseq_shared_exc_local_tbl[] = {idERROR_INFO};
146#define compile_debug CPDEBUG
148#define compile_debug ISEQ_COMPILE_DATA(iseq)->option->debug_level
153#define compile_debug_print_indent(level) \
154 ruby_debug_print_indent((level), compile_debug, gl_node_level * 2)
156#define debugp(header, value) (void) \
157 (compile_debug_print_indent(1) && \
158 ruby_debug_print_value(1, compile_debug, (header), (value)))
160#define debugi(header, id) (void) \
161 (compile_debug_print_indent(1) && \
162 ruby_debug_print_id(1, compile_debug, (header), (id)))
164#define debugp_param(header, value) (void) \
165 (compile_debug_print_indent(1) && \
166 ruby_debug_print_value(1, compile_debug, (header), (value)))
168#define debugp_verbose(header, value) (void) \
169 (compile_debug_print_indent(2) && \
170 ruby_debug_print_value(2, compile_debug, (header), (value)))
172#define debugp_verbose_node(header, value) (void) \
173 (compile_debug_print_indent(10) && \
174 ruby_debug_print_value(10, compile_debug, (header), (value)))
176#define debug_node_start(node) ((void) \
177 (compile_debug_print_indent(1) && \
178 (ruby_debug_print_node(1, CPDEBUG, "", (const NODE *)(node)), gl_node_level)), \
181#define debug_node_end() gl_node_level --
185#define debugi(header, id) ((void)0)
186#define debugp(header, value) ((void)0)
187#define debugp_verbose(header, value) ((void)0)
188#define debugp_verbose_node(header, value) ((void)0)
189#define debugp_param(header, value) ((void)0)
190#define debug_node_start(node) ((void)0)
191#define debug_node_end() ((void)0)
194#if CPDEBUG > 1 || CPDEBUG < 0
196#define printf ruby_debug_printf
197#define debugs if (compile_debug_print_indent(1)) ruby_debug_printf
198#define debug_compile(msg, v) ((void)(compile_debug_print_indent(1) && fputs((msg), stderr)), (v))
200#define debugs if(0)printf
201#define debug_compile(msg, v) (v)
204#define LVAR_ERRINFO (1)
207#define NEW_LABEL(l) new_label_body(iseq, (l))
208#define LABEL_FORMAT "<L%03d>"
210#define NEW_ISEQ(node, name, type, line_no) \
211 new_child_iseq(iseq, (node), rb_fstring(name), 0, (type), (line_no))
213#define NEW_CHILD_ISEQ(node, name, type, line_no) \
214 new_child_iseq(iseq, (node), rb_fstring(name), iseq, (type), (line_no))
216#define NEW_CHILD_ISEQ_WITH_CALLBACK(callback_func, name, type, line_no) \
217 new_child_iseq_with_callback(iseq, (callback_func), (name), iseq, (type), (line_no))
220#define ADD_SEQ(seq1, seq2) \
221 APPEND_LIST((seq1), (seq2))
224#define ADD_INSN(seq, line_node, insn) \
225 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 0))
228#define ADD_SYNTHETIC_INSN(seq, line_no, node_id, insn) \
229 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, (line_no), (node_id), BIN(insn), 0))
232#define INSERT_BEFORE_INSN(next, line_no, node_id, insn) \
233 ELEM_INSERT_PREV(&(next)->link, (LINK_ELEMENT *) new_insn_body(iseq, line_no, node_id, BIN(insn), 0))
236#define INSERT_AFTER_INSN(prev, line_no, node_id, insn) \
237 ELEM_INSERT_NEXT(&(prev)->link, (LINK_ELEMENT *) new_insn_body(iseq, line_no, node_id, BIN(insn), 0))
240#define ADD_INSN1(seq, line_node, insn, op1) \
241 ADD_ELEM((seq), (LINK_ELEMENT *) \
242 new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 1, (VALUE)(op1)))
245#define INSERT_BEFORE_INSN1(next, line_no, node_id, insn, op1) \
246 ELEM_INSERT_PREV(&(next)->link, (LINK_ELEMENT *) \
247 new_insn_body(iseq, line_no, node_id, BIN(insn), 1, (VALUE)(op1)))
250#define INSERT_AFTER_INSN1(prev, line_no, node_id, insn, op1) \
251 ELEM_INSERT_NEXT(&(prev)->link, (LINK_ELEMENT *) \
252 new_insn_body(iseq, line_no, node_id, BIN(insn), 1, (VALUE)(op1)))
254#define LABEL_REF(label) ((label)->refcnt++)
257#define ADD_INSNL(seq, line_node, insn, label) (ADD_INSN1(seq, line_node, insn, label), LABEL_REF(label))
259#define ADD_INSN2(seq, line_node, insn, op1, op2) \
260 ADD_ELEM((seq), (LINK_ELEMENT *) \
261 new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 2, (VALUE)(op1), (VALUE)(op2)))
263#define ADD_INSN3(seq, line_node, insn, op1, op2, op3) \
264 ADD_ELEM((seq), (LINK_ELEMENT *) \
265 new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 3, (VALUE)(op1), (VALUE)(op2), (VALUE)(op3)))
268#define ADD_SEND(seq, line_node, id, argc) \
269 ADD_SEND_R((seq), (line_node), (id), (argc), NULL, (VALUE)INT2FIX(0), NULL)
271#define ADD_SEND_WITH_FLAG(seq, line_node, id, argc, flag) \
272 ADD_SEND_R((seq), (line_node), (id), (argc), NULL, (VALUE)(flag), NULL)
274#define ADD_SEND_WITH_BLOCK(seq, line_node, id, argc, block) \
275 ADD_SEND_R((seq), (line_node), (id), (argc), (block), (VALUE)INT2FIX(0), NULL)
277#define ADD_CALL_RECEIVER(seq, line_node) \
278 ADD_INSN((seq), (line_node), putself)
280#define ADD_CALL(seq, line_node, id, argc) \
281 ADD_SEND_R((seq), (line_node), (id), (argc), NULL, (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
283#define ADD_CALL_WITH_BLOCK(seq, line_node, id, argc, block) \
284 ADD_SEND_R((seq), (line_node), (id), (argc), (block), (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
286#define ADD_SEND_R(seq, line_node, id, argc, block, flag, keywords) \
287 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_send(iseq, nd_line(line_node), nd_node_id(line_node), (id), (VALUE)(argc), (block), (VALUE)(flag), (keywords)))
289#define ADD_TRACE(seq, event) \
290 ADD_ELEM((seq), (LINK_ELEMENT *)new_trace_body(iseq, (event), 0))
291#define ADD_TRACE_WITH_DATA(seq, event, data) \
292 ADD_ELEM((seq), (LINK_ELEMENT *)new_trace_body(iseq, (event), (data)))
294static void iseq_add_getlocal(
rb_iseq_t *iseq,
LINK_ANCHOR *
const seq,
const NODE *
const line_node,
int idx,
int level);
295static void iseq_add_setlocal(
rb_iseq_t *iseq,
LINK_ANCHOR *
const seq,
const NODE *
const line_node,
int idx,
int level);
297#define ADD_GETLOCAL(seq, line_node, idx, level) iseq_add_getlocal(iseq, (seq), (line_node), (idx), (level))
298#define ADD_SETLOCAL(seq, line_node, idx, level) iseq_add_setlocal(iseq, (seq), (line_node), (idx), (level))
301#define ADD_LABEL(seq, label) \
302 ADD_ELEM((seq), (LINK_ELEMENT *) (label))
304#define APPEND_LABEL(seq, before, label) \
305 APPEND_ELEM((seq), (before), (LINK_ELEMENT *) (label))
307#define ADD_ADJUST(seq, line_node, label) \
308 ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), nd_line(line_node)))
310#define ADD_ADJUST_RESTORE(seq, label) \
311 ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), -1))
313#define LABEL_UNREMOVABLE(label) \
314 ((label) ? (LABEL_REF(label), (label)->unremovable=1) : 0)
315#define ADD_CATCH_ENTRY(type, ls, le, iseqv, lc) do { \
316 VALUE _e = rb_ary_new3(5, (type), \
317 (VALUE)(ls) | 1, (VALUE)(le) | 1, \
318 (VALUE)(iseqv), (VALUE)(lc) | 1); \
319 LABEL_UNREMOVABLE(ls); \
322 if (NIL_P(ISEQ_COMPILE_DATA(iseq)->catch_table_ary)) \
323 RB_OBJ_WRITE(iseq, &ISEQ_COMPILE_DATA(iseq)->catch_table_ary, rb_ary_hidden_new(3)); \
324 rb_ary_push(ISEQ_COMPILE_DATA(iseq)->catch_table_ary, freeze_hide_obj(_e)); \
328#define COMPILE(anchor, desc, node) \
329 (debug_compile("== " desc "\n", \
330 iseq_compile_each(iseq, (anchor), (node), 0)))
333#define COMPILE_POPPED(anchor, desc, node) \
334 (debug_compile("== " desc "\n", \
335 iseq_compile_each(iseq, (anchor), (node), 1)))
338#define COMPILE_(anchor, desc, node, popped) \
339 (debug_compile("== " desc "\n", \
340 iseq_compile_each(iseq, (anchor), (node), (popped))))
342#define COMPILE_RECV(anchor, desc, node, recv) \
343 (private_recv_p(node) ? \
344 (ADD_INSN(anchor, node, putself), VM_CALL_FCALL) : \
345 COMPILE(anchor, desc, recv) ? 0 : -1)
347#define OPERAND_AT(insn, idx) \
348 (((INSN*)(insn))->operands[(idx)])
350#define INSN_OF(insn) \
351 (((INSN*)(insn))->insn_id)
353#define IS_INSN(link) ((link)->type == ISEQ_ELEMENT_INSN)
354#define IS_LABEL(link) ((link)->type == ISEQ_ELEMENT_LABEL)
355#define IS_ADJUST(link) ((link)->type == ISEQ_ELEMENT_ADJUST)
356#define IS_TRACE(link) ((link)->type == ISEQ_ELEMENT_TRACE)
357#define IS_INSN_ID(iobj, insn) (INSN_OF(iobj) == BIN(insn))
358#define IS_NEXT_INSN_ID(link, insn) \
359 ((link)->next && IS_INSN((link)->next) && IS_INSN_ID((link)->next, insn))
367append_compile_error(const
rb_iseq_t *iseq,
int line, const
char *fmt, ...)
369 VALUE err_info = ISEQ_COMPILE_DATA(iseq)->err_info;
370 VALUE file = rb_iseq_path(iseq);
375 err = rb_syntax_error_append(err, file, line, -1, NULL, fmt, args);
377 if (
NIL_P(err_info)) {
378 RB_OBJ_WRITE(iseq, &ISEQ_COMPILE_DATA(iseq)->err_info, err);
381 else if (!err_info) {
392compile_bug(
rb_iseq_t *iseq,
int line,
const char *fmt, ...)
396 rb_report_bug_valist(rb_iseq_path(iseq), line, fmt, args);
402#define COMPILE_ERROR append_compile_error
404#define ERROR_ARGS_AT(n) iseq, nd_line(n),
405#define ERROR_ARGS ERROR_ARGS_AT(node)
407#define EXPECT_NODE(prefix, node, ndtype, errval) \
409 const NODE *error_node = (node); \
410 enum node_type error_type = nd_type(error_node); \
411 if (error_type != (ndtype)) { \
412 COMPILE_ERROR(ERROR_ARGS_AT(error_node) \
413 prefix ": " #ndtype " is expected, but %s", \
414 ruby_node_name(error_type)); \
419#define EXPECT_NODE_NONULL(prefix, parent, ndtype, errval) \
421 COMPILE_ERROR(ERROR_ARGS_AT(parent) \
422 prefix ": must be " #ndtype ", but 0"); \
426#define UNKNOWN_NODE(prefix, node, errval) \
428 const NODE *error_node = (node); \
429 COMPILE_ERROR(ERROR_ARGS_AT(error_node) prefix ": unknown node (%s)", \
430 ruby_node_name(nd_type(error_node))); \
437#define CHECK(sub) if (!(sub)) {BEFORE_RETURN;return COMPILE_NG;}
438#define NO_CHECK(sub) (void)(sub)
441#define DECL_ANCHOR(name) \
442 LINK_ANCHOR name[1] = {{{ISEQ_ELEMENT_ANCHOR,},&name[0].anchor}}
443#define INIT_ANCHOR(name) \
444 ((name->last = &name->anchor)->next = NULL)
447freeze_hide_obj(
VALUE obj)
450 RBASIC_CLEAR_CLASS(obj);
454#include "optinsn.inc"
455#if OPT_INSTRUCTIONS_UNIFICATION
456#include "optunifs.inc"
461#define ISEQ_ARG iseq,
462#define ISEQ_ARG_DECLARE rb_iseq_t *iseq,
465#define ISEQ_ARG_DECLARE
469#define gl_node_level ISEQ_COMPILE_DATA(iseq)->node_level
475static int insn_data_length(
INSN *iobj);
476static int calc_sp_depth(
int depth,
INSN *iobj);
478static INSN *new_insn_body(
rb_iseq_t *iseq,
int line_no,
int node_id,
enum ruby_vminsn_type insn_id,
int argc, ...);
491static int iseq_set_exception_local_table(
rb_iseq_t *iseq);
495static int iseq_set_exception_table(
rb_iseq_t *iseq);
496static int iseq_set_optargs_table(
rb_iseq_t *iseq);
497static int iseq_set_parameters_lvar_state(
const rb_iseq_t *iseq);
500static int compile_hash(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *node,
int method_call_keywords,
int popped);
507verify_list(ISEQ_ARG_DECLARE
const char *info,
LINK_ANCHOR *
const anchor)
513 if (!compile_debug)
return;
515 list = anchor->anchor.next;
516 plist = &anchor->anchor;
518 if (plist != list->prev) {
525 if (anchor->last != plist && anchor->last != 0) {
530 rb_bug(
"list verify error: %08x (%s)", flag, info);
535#define verify_list(info, anchor) verify_list(iseq, (info), (anchor))
542 VALUE *original = rb_iseq_original_iseq(iseq);
544 while (i < ISEQ_BODY(iseq)->iseq_size) {
545 VALUE insn = original[i];
546 const char *types = insn_op_types(insn);
548 for (
int j=0; types[j]; j++) {
549 if (types[j] == TS_CALLDATA) {
553 if (cc != vm_cc_empty()) {
555 rb_bug(
"call cache is not initialized by vm_cc_empty()");
562 for (
unsigned int i=0; i<ISEQ_BODY(iseq)->ci_size; i++) {
563 struct rb_call_data *cd = &ISEQ_BODY(iseq)->call_data[i];
566 if (cc != NULL && cc != vm_cc_empty()) {
568 rb_bug(
"call cache is not initialized by vm_cc_empty()");
580 elem->prev = anchor->last;
581 anchor->last->next = elem;
583 verify_list(
"add", anchor);
593 elem->next = before->next;
594 elem->next->prev = elem;
596 if (before == anchor->last) anchor->last = elem;
597 verify_list(
"add", anchor);
600#define ADD_ELEM(anchor, elem) ADD_ELEM(iseq, (anchor), (elem))
601#define APPEND_ELEM(anchor, before, elem) APPEND_ELEM(iseq, (anchor), (before), (elem))
605branch_coverage_valid_p(
rb_iseq_t *iseq,
int first_line)
607 if (!ISEQ_COVERAGE(iseq))
return 0;
608 if (!ISEQ_BRANCH_COVERAGE(iseq))
return 0;
609 if (first_line <= 0)
return 0;
616 const int first_lineno = loc->beg_pos.lineno, first_column = loc->beg_pos.column;
617 const int last_lineno = loc->end_pos.lineno, last_column = loc->end_pos.column;
620 rb_hash_aset(structure, key, branch);
632 if (!branch_coverage_valid_p(iseq, loc->beg_pos.lineno))
return Qundef;
643 VALUE branch_base = rb_hash_aref(structure, key);
646 if (
NIL_P(branch_base)) {
647 branch_base = setup_branch(loc,
type, structure, key);
648 branches = rb_hash_new();
660generate_dummy_line_node(
int lineno,
int node_id)
663 nd_set_line(&dummy, lineno);
664 nd_set_node_id(&dummy, node_id);
671 if (!branch_coverage_valid_p(iseq, loc->beg_pos.lineno))
return;
682 VALUE branch = rb_hash_aref(branches, key);
686 branch = setup_branch(loc,
type, branches, key);
696 ADD_TRACE_WITH_DATA(seq, RUBY_EVENT_COVERAGE_BRANCH, counter_idx);
697 ADD_SYNTHETIC_INSN(seq, loc->end_pos.lineno, node_id, nop);
700#define ISEQ_LAST_LINE(iseq) (ISEQ_COMPILE_DATA(iseq)->last_line)
703validate_label(st_data_t name, st_data_t label, st_data_t arg)
707 if (!lobj->link.next) {
709 COMPILE_ERROR(iseq, lobj->position,
710 "%"PRIsVALUE
": undefined label",
720 st_foreach(labels_table, validate_label, (st_data_t)iseq);
721 st_free_table(labels_table);
725get_nd_recv(
const NODE *node)
727 switch (nd_type(node)) {
729 return RNODE_CALL(node)->nd_recv;
731 return RNODE_OPCALL(node)->nd_recv;
735 return RNODE_QCALL(node)->nd_recv;
739 return RNODE_ATTRASGN(node)->nd_recv;
741 return RNODE_OP_ASGN1(node)->nd_recv;
743 return RNODE_OP_ASGN2(node)->nd_recv;
745 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
750get_node_call_nd_mid(
const NODE *node)
752 switch (nd_type(node)) {
754 return RNODE_CALL(node)->nd_mid;
756 return RNODE_OPCALL(node)->nd_mid;
758 return RNODE_FCALL(node)->nd_mid;
760 return RNODE_QCALL(node)->nd_mid;
762 return RNODE_VCALL(node)->nd_mid;
764 return RNODE_ATTRASGN(node)->nd_mid;
766 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
771get_nd_args(
const NODE *node)
773 switch (nd_type(node)) {
775 return RNODE_CALL(node)->nd_args;
777 return RNODE_OPCALL(node)->nd_args;
779 return RNODE_FCALL(node)->nd_args;
781 return RNODE_QCALL(node)->nd_args;
785 return RNODE_ATTRASGN(node)->nd_args;
787 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
792get_node_colon_nd_mid(
const NODE *node)
794 switch (nd_type(node)) {
796 return RNODE_COLON2(node)->nd_mid;
798 return RNODE_COLON3(node)->nd_mid;
800 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
805get_nd_vid(
const NODE *node)
807 switch (nd_type(node)) {
809 return RNODE_LASGN(node)->nd_vid;
811 return RNODE_DASGN(node)->nd_vid;
813 return RNODE_IASGN(node)->nd_vid;
815 return RNODE_CVASGN(node)->nd_vid;
817 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
822get_nd_value(
const NODE *node)
824 switch (nd_type(node)) {
826 return RNODE_LASGN(node)->nd_value;
828 return RNODE_DASGN(node)->nd_value;
830 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
835get_string_value(
const NODE *node)
837 switch (nd_type(node)) {
839 return RB_OBJ_SET_SHAREABLE(rb_node_str_string_val(node));
841 return RB_OBJ_SET_SHAREABLE(rb_node_file_path_val(node));
843 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
853 (*ifunc->func)(iseq, ret, ifunc->data);
855 ADD_SYNTHETIC_INSN(ret, ISEQ_COMPILE_DATA(iseq)->last_line, -1, leave);
857 CHECK(iseq_setup_insn(iseq, ret));
858 return iseq_setup(iseq, ret);
861static bool drop_unreachable_return(
LINK_ANCHOR *ret);
870 NO_CHECK(COMPILE(ret,
"nil", node));
871 iseq_set_local_table(iseq, 0, 0);
874 else if (nd_type_p(node, NODE_SCOPE)) {
876 iseq_set_local_table(iseq, RNODE_SCOPE(node)->nd_tbl, (
NODE *)RNODE_SCOPE(node)->nd_args);
877 iseq_set_arguments(iseq, ret, (
NODE *)RNODE_SCOPE(node)->nd_args);
878 iseq_set_parameters_lvar_state(iseq);
880 switch (ISEQ_BODY(iseq)->
type) {
881 case ISEQ_TYPE_BLOCK:
883 LABEL *start = ISEQ_COMPILE_DATA(iseq)->start_label = NEW_LABEL(0);
884 LABEL *end = ISEQ_COMPILE_DATA(iseq)->end_label = NEW_LABEL(0);
886 start->rescued = LABEL_RESCUE_BEG;
887 end->rescued = LABEL_RESCUE_END;
890 ADD_SYNTHETIC_INSN(ret, ISEQ_BODY(iseq)->location.first_lineno, -1, nop);
891 ADD_LABEL(ret, start);
892 CHECK(COMPILE(ret,
"block body", RNODE_SCOPE(node)->nd_body));
895 ISEQ_COMPILE_DATA(iseq)->last_line = ISEQ_BODY(iseq)->location.code_location.end_pos.lineno;
898 ADD_CATCH_ENTRY(CATCH_TYPE_REDO, start, end, NULL, start);
899 ADD_CATCH_ENTRY(CATCH_TYPE_NEXT, start, end, NULL, end);
902 case ISEQ_TYPE_CLASS:
905 CHECK(COMPILE(ret,
"scoped node", RNODE_SCOPE(node)->nd_body));
907 ISEQ_COMPILE_DATA(iseq)->last_line = nd_line(node);
910 case ISEQ_TYPE_METHOD:
912 ISEQ_COMPILE_DATA(iseq)->root_node = RNODE_SCOPE(node)->nd_body;
914 CHECK(COMPILE(ret,
"scoped node", RNODE_SCOPE(node)->nd_body));
915 ISEQ_COMPILE_DATA(iseq)->root_node = RNODE_SCOPE(node)->nd_body;
917 ISEQ_COMPILE_DATA(iseq)->last_line = nd_line(node);
921 CHECK(COMPILE(ret,
"scoped node", RNODE_SCOPE(node)->nd_body));
928#define INVALID_ISEQ_TYPE(type) \
929 ISEQ_TYPE_##type: m = #type; goto invalid_iseq_type
930 switch (ISEQ_BODY(iseq)->
type) {
931 case INVALID_ISEQ_TYPE(
METHOD);
932 case INVALID_ISEQ_TYPE(CLASS);
933 case INVALID_ISEQ_TYPE(BLOCK);
934 case INVALID_ISEQ_TYPE(EVAL);
935 case INVALID_ISEQ_TYPE(MAIN);
936 case INVALID_ISEQ_TYPE(TOP);
937#undef INVALID_ISEQ_TYPE
938 case ISEQ_TYPE_RESCUE:
939 iseq_set_exception_local_table(iseq);
940 CHECK(COMPILE(ret,
"rescue", node));
942 case ISEQ_TYPE_ENSURE:
943 iseq_set_exception_local_table(iseq);
944 CHECK(COMPILE_POPPED(ret,
"ensure", node));
946 case ISEQ_TYPE_PLAIN:
947 CHECK(COMPILE(ret,
"ensure", node));
950 COMPILE_ERROR(ERROR_ARGS
"unknown scope: %d", ISEQ_BODY(iseq)->
type);
953 COMPILE_ERROR(ERROR_ARGS
"compile/ISEQ_TYPE_%s should not be reached", m);
958 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_RESCUE || ISEQ_BODY(iseq)->
type == ISEQ_TYPE_ENSURE) {
959 NODE dummy_line_node = generate_dummy_line_node(0, -1);
960 ADD_GETLOCAL(ret, &dummy_line_node, LVAR_ERRINFO, 0);
961 ADD_INSN1(ret, &dummy_line_node,
throw,
INT2FIX(0) );
963 else if (!drop_unreachable_return(ret)) {
964 ADD_SYNTHETIC_INSN(ret, ISEQ_COMPILE_DATA(iseq)->last_line, -1, leave);
968 if (ISEQ_COMPILE_DATA(iseq)->labels_table) {
969 st_table *labels_table = ISEQ_COMPILE_DATA(iseq)->labels_table;
970 ISEQ_COMPILE_DATA(iseq)->labels_table = 0;
971 validate_labels(iseq, labels_table);
974 CHECK(iseq_setup_insn(iseq, ret));
975 return iseq_setup(iseq, ret);
979rb_iseq_translate_threaded_code(
rb_iseq_t *iseq)
981#if OPT_DIRECT_THREADED_CODE || OPT_CALL_THREADED_CODE
982 const void *
const *table = rb_vm_get_insns_address_table();
984 VALUE *encoded = (
VALUE *)ISEQ_BODY(iseq)->iseq_encoded;
986 for (i = 0; i < ISEQ_BODY(iseq)->iseq_size; ) {
987 int insn = (int)ISEQ_BODY(iseq)->iseq_encoded[i];
988 int len = insn_len(insn);
989 encoded[i] = (
VALUE)table[insn];
996 rb_yjit_live_iseq_count++;
997 rb_yjit_iseq_alloc_count++;
1004rb_iseq_original_iseq(
const rb_iseq_t *iseq)
1006 VALUE *original_code;
1008 if (ISEQ_ORIGINAL_ISEQ(iseq))
return ISEQ_ORIGINAL_ISEQ(iseq);
1009 original_code = ISEQ_ORIGINAL_ISEQ_ALLOC(iseq, ISEQ_BODY(iseq)->iseq_size);
1010 MEMCPY(original_code, ISEQ_BODY(iseq)->iseq_encoded,
VALUE, ISEQ_BODY(iseq)->iseq_size);
1012#if OPT_DIRECT_THREADED_CODE || OPT_CALL_THREADED_CODE
1016 for (i = 0; i < ISEQ_BODY(iseq)->iseq_size; ) {
1017 const void *addr = (
const void *)original_code[i];
1018 const int insn = rb_vm_insn_addr2insn(addr);
1020 original_code[i] = insn;
1021 i += insn_len(insn);
1025 return original_code;
1032#if defined(HAVE_TRUE_LONG_LONG) && SIZEOF_LONG_LONG > SIZEOF_VALUE
1033# define ALIGNMENT_SIZE SIZEOF_LONG_LONG
1035# define ALIGNMENT_SIZE SIZEOF_VALUE
1037#define PADDING_SIZE_MAX ((size_t)((ALIGNMENT_SIZE) - 1))
1039#define ALIGNMENT_SIZE_OF(type) alignment_size_assert(RUBY_ALIGNOF(type), #type)
1042alignment_size_assert(
size_t align,
const char *
type)
1045 "ALIGNMENT_SIZE_OF(%s):%zd == (2 ** N) is expected",
type, align);
1051calc_padding(
void *ptr,
size_t align)
1056 mis = (size_t)ptr & (align - 1);
1058 padding = align - mis;
1069 size_t padding = calc_padding((
void *)&storage->buff[storage->pos], align);
1071 if (size >= INT_MAX - padding) rb_memerror();
1072 if (storage->pos + size + padding > storage->size) {
1073 unsigned int alloc_size = storage->size;
1075 while (alloc_size < size + PADDING_SIZE_MAX) {
1076 if (alloc_size >= INT_MAX / 2) rb_memerror();
1079 storage->next = (
void *)
ALLOC_N(
char, alloc_size +
1081 storage = *arena = storage->next;
1084 storage->size = alloc_size;
1085 padding = calc_padding((
void *)&storage->buff[storage->pos], align);
1088 storage->pos += (int)padding;
1090 ptr = (
void *)&storage->buff[storage->pos];
1091 storage->pos += (int)size;
1096compile_data_alloc(
rb_iseq_t *iseq,
size_t size,
size_t align)
1099 return compile_data_alloc_with_arena(arena, size, align);
1102#define compile_data_alloc_type(iseq, type) \
1103 (type *)compile_data_alloc(iseq, sizeof(type), ALIGNMENT_SIZE_OF(type))
1106compile_data_alloc2(
rb_iseq_t *iseq,
size_t elsize,
size_t num,
size_t align)
1109 return compile_data_alloc(iseq, size, align);
1112#define compile_data_alloc2_type(iseq, type, num) \
1113 (type *)compile_data_alloc2(iseq, sizeof(type), num, ALIGNMENT_SIZE_OF(type))
1116compile_data_calloc2(
rb_iseq_t *iseq,
size_t elsize,
size_t num,
size_t align)
1119 void *p = compile_data_alloc(iseq, size, align);
1124#define compile_data_calloc2_type(iseq, type, num) \
1125 (type *)compile_data_calloc2(iseq, sizeof(type), num, ALIGNMENT_SIZE_OF(type))
1131 return (
INSN *)compile_data_alloc_with_arena(arena,
sizeof(
INSN), ALIGNMENT_SIZE_OF(
INSN));
1135compile_data_alloc_label(
rb_iseq_t *iseq)
1137 return compile_data_alloc_type(iseq,
LABEL);
1141compile_data_alloc_adjust(
rb_iseq_t *iseq)
1143 return compile_data_alloc_type(iseq,
ADJUST);
1147compile_data_alloc_trace(
rb_iseq_t *iseq)
1149 return compile_data_alloc_type(iseq,
TRACE);
1158 elem2->next = elem1->next;
1159 elem2->prev = elem1;
1160 elem1->next = elem2;
1162 elem2->next->prev = elem2;
1172 elem2->prev = elem1->prev;
1173 elem2->next = elem1;
1174 elem1->prev = elem2;
1176 elem2->prev->next = elem2;
1186 elem2->prev = elem1->prev;
1187 elem2->next = elem1->next;
1189 elem1->prev->next = elem2;
1192 elem1->next->prev = elem2;
1199 elem->prev->next = elem->next;
1201 elem->next->prev = elem->prev;
1208 return anchor->anchor.next;
1214 return anchor->last;
1221 switch (elem->type) {
1222 case ISEQ_ELEMENT_INSN:
1223 case ISEQ_ELEMENT_ADJUST:
1233LIST_INSN_SIZE_ONE(
const LINK_ANCHOR *
const anchor)
1235 LINK_ELEMENT *first_insn = ELEM_FIRST_INSN(FIRST_ELEMENT(anchor));
1236 if (first_insn != NULL &&
1237 ELEM_FIRST_INSN(first_insn->next) == NULL) {
1246LIST_INSN_SIZE_ZERO(
const LINK_ANCHOR *
const anchor)
1248 if (ELEM_FIRST_INSN(FIRST_ELEMENT(anchor)) == NULL) {
1266 if (anc2->anchor.next) {
1269 anc1->last->next = anc2->anchor.next;
1270 anc2->anchor.next->prev = anc1->last;
1271 anc1->last = anc2->last;
1276 verify_list(
"append", anc1);
1279#define APPEND_LIST(anc1, anc2) APPEND_LIST(iseq, (anc1), (anc2))
1288 printf(
"anch: %p, frst: %p, last: %p\n", (
void *)&anchor->anchor,
1289 (
void *)anchor->anchor.next, (
void *)anchor->last);
1291 printf(
"curr: %p, next: %p, prev: %p, type: %d\n", (
void *)list, (
void *)list->next,
1292 (
void *)list->prev, (
int)list->type);
1297 dump_disasm_list_with_cursor(anchor->anchor.next, cur, 0);
1298 verify_list(
"debug list", anchor);
1301#define debug_list(anc, cur) debug_list(iseq, (anc), (cur))
1304#define debug_list(anc, cur) ((void)0)
1310 TRACE *trace = compile_data_alloc_trace(iseq);
1312 trace->link.type = ISEQ_ELEMENT_TRACE;
1313 trace->link.next = NULL;
1314 trace->event = event;
1321new_label_body(
rb_iseq_t *iseq,
long line)
1323 LABEL *labelobj = compile_data_alloc_label(iseq);
1325 labelobj->link.type = ISEQ_ELEMENT_LABEL;
1326 labelobj->link.next = 0;
1328 labelobj->label_no = ISEQ_COMPILE_DATA(iseq)->label_no++;
1329 labelobj->sc_state = 0;
1331 labelobj->refcnt = 0;
1333 labelobj->rescued = LABEL_RESCUE_NONE;
1334 labelobj->unremovable = 0;
1335 labelobj->position = -1;
1342 ADJUST *adjust = compile_data_alloc_adjust(iseq);
1343 adjust->link.type = ISEQ_ELEMENT_ADJUST;
1344 adjust->link.next = 0;
1345 adjust->label = label;
1346 adjust->line_no = line;
1347 LABEL_UNREMOVABLE(label);
1354 const char *types = insn_op_types(insn->insn_id);
1355 for (
int j = 0; types[j]; j++) {
1356 char type = types[j];
1363 func(&OPERAND_AT(insn, j), data);
1372iseq_insn_each_object_write_barrier(
VALUE * obj,
VALUE iseq)
1381new_insn_core(
rb_iseq_t *iseq,
int line_no,
int node_id,
int insn_id,
int argc,
VALUE *argv)
1383 INSN *iobj = compile_data_alloc_insn(iseq);
1387 iobj->link.type = ISEQ_ELEMENT_INSN;
1388 iobj->link.next = 0;
1389 iobj->insn_id = insn_id;
1390 iobj->insn_info.line_no = line_no;
1391 iobj->insn_info.node_id = node_id;
1392 iobj->insn_info.events = 0;
1393 iobj->operands = argv;
1394 iobj->operand_size = argc;
1397 iseq_insn_each_markable_object(iobj, iseq_insn_each_object_write_barrier, (
VALUE)iseq);
1403new_insn_body(
rb_iseq_t *iseq,
int line_no,
int node_id,
enum ruby_vminsn_type insn_id,
int argc, ...)
1405 VALUE *operands = 0;
1409 va_start(argv, argc);
1410 operands = compile_data_alloc2_type(iseq,
VALUE, argc);
1411 for (i = 0; i < argc; i++) {
1417 return new_insn_core(iseq, line_no, node_id, insn_id, argc, operands);
1421insn_replace_with_operands(
rb_iseq_t *iseq,
INSN *iobj,
enum ruby_vminsn_type insn_id,
int argc, ...)
1423 VALUE *operands = 0;
1427 va_start(argv, argc);
1428 operands = compile_data_alloc2_type(iseq,
VALUE, argc);
1429 for (i = 0; i < argc; i++) {
1436 iobj->insn_id = insn_id;
1437 iobj->operand_size = argc;
1438 iobj->operands = operands;
1439 iseq_insn_each_markable_object(iobj, iseq_insn_each_object_write_barrier, (
VALUE)iseq);
1447 VM_ASSERT(argc >= 0);
1450 flag |= VM_CALL_KWARG;
1451 argc += kw_arg->keyword_len;
1454 if (!(flag & (VM_CALL_ARGS_SPLAT | VM_CALL_ARGS_BLOCKARG | VM_CALL_KWARG | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING))
1455 && !has_blockiseq) {
1456 flag |= VM_CALL_ARGS_SIMPLE;
1459 ISEQ_BODY(iseq)->ci_size++;
1460 const struct rb_callinfo *ci = vm_ci_new(mid, flag, argc, kw_arg);
1468 VALUE *operands = compile_data_calloc2_type(iseq,
VALUE, 2);
1471 operands[1] = (
VALUE)blockiseq;
1478 if (vm_ci_flag((
struct rb_callinfo *)ci) & VM_CALL_FORWARDING) {
1479 insn = new_insn_core(iseq, line_no, node_id, BIN(sendforward), 2, operands);
1482 insn = new_insn_core(iseq, line_no, node_id, BIN(send), 2, operands);
1495 VALUE ast_value = rb_ruby_ast_new(node);
1497 debugs(
"[new_child_iseq]> ---------------------------------------\n");
1498 int isolated_depth = ISEQ_COMPILE_DATA(iseq)->isolated_depth;
1499 ret_iseq = rb_iseq_new_with_opt(ast_value, name,
1500 rb_iseq_path(iseq), rb_iseq_realpath(iseq),
1502 isolated_depth ? isolated_depth + 1 : 0,
1503 type, ISEQ_COMPILE_DATA(iseq)->option,
1504 ISEQ_BODY(iseq)->variable.script_lines);
1505 debugs(
"[new_child_iseq]< ---------------------------------------\n");
1515 debugs(
"[new_child_iseq_with_callback]> ---------------------------------------\n");
1516 ret_iseq = rb_iseq_new_with_callback(ifunc, name,
1517 rb_iseq_path(iseq), rb_iseq_realpath(iseq),
1518 line_no, parent,
type, ISEQ_COMPILE_DATA(iseq)->option);
1519 debugs(
"[new_child_iseq_with_callback]< ---------------------------------------\n");
1527 ISEQ_COMPILE_DATA(iseq)->catch_except_p =
true;
1528 if (ISEQ_BODY(iseq)->parent_iseq != NULL) {
1529 if (ISEQ_COMPILE_DATA(ISEQ_BODY(iseq)->parent_iseq)) {
1530 set_catch_except_p((
rb_iseq_t *) ISEQ_BODY(iseq)->parent_iseq);
1553 while (pos < body->iseq_size) {
1554 insn = rb_vm_insn_decode(body->iseq_encoded[pos]);
1555 if (insn == BIN(
throw)) {
1556 set_catch_except_p(iseq);
1559 pos += insn_len(insn);
1565 for (i = 0; i < ct->size; i++) {
1567 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1568 if (entry->type != CATCH_TYPE_BREAK
1569 && entry->type != CATCH_TYPE_NEXT
1570 && entry->type != CATCH_TYPE_REDO) {
1572 ISEQ_COMPILE_DATA(iseq)->catch_except_p =
true;
1579iseq_insert_nop_between_end_and_cont(
rb_iseq_t *iseq)
1581 VALUE catch_table_ary = ISEQ_COMPILE_DATA(iseq)->catch_table_ary;
1582 if (
NIL_P(catch_table_ary))
return;
1583 unsigned int i, tlen = (
unsigned int)
RARRAY_LEN(catch_table_ary);
1585 for (i = 0; i < tlen; i++) {
1591 enum rb_catch_type ct = (
enum rb_catch_type)(ptr[0] & 0xffff);
1593 if (ct != CATCH_TYPE_BREAK
1594 && ct != CATCH_TYPE_NEXT
1595 && ct != CATCH_TYPE_REDO) {
1597 for (e = end; e && (IS_LABEL(e) || IS_TRACE(e)); e = e->next) {
1599 INSN *nop = new_insn_core(iseq, 0, -1, BIN(nop), 0, 0);
1600 ELEM_INSERT_NEXT(end, &nop->link);
1613 if (
RTEST(ISEQ_COMPILE_DATA(iseq)->err_info))
1618 if (compile_debug > 5)
1619 dump_disasm_list(FIRST_ELEMENT(anchor));
1621 debugs(
"[compile step 3.1 (iseq_optimize)]\n");
1622 iseq_optimize(iseq, anchor);
1624 if (compile_debug > 5)
1625 dump_disasm_list(FIRST_ELEMENT(anchor));
1627 if (ISEQ_COMPILE_DATA(iseq)->option->instructions_unification) {
1628 debugs(
"[compile step 3.2 (iseq_insns_unification)]\n");
1629 iseq_insns_unification(iseq, anchor);
1630 if (compile_debug > 5)
1631 dump_disasm_list(FIRST_ELEMENT(anchor));
1634 debugs(
"[compile step 3.4 (iseq_insert_nop_between_end_and_cont)]\n");
1635 iseq_insert_nop_between_end_and_cont(iseq);
1636 if (compile_debug > 5)
1637 dump_disasm_list(FIRST_ELEMENT(anchor));
1645 if (
RTEST(ISEQ_COMPILE_DATA(iseq)->err_info))
1648 debugs(
"[compile step 4.1 (iseq_set_sequence)]\n");
1649 if (!iseq_set_sequence(iseq, anchor))
return COMPILE_NG;
1650 if (compile_debug > 5)
1651 dump_disasm_list(FIRST_ELEMENT(anchor));
1653 debugs(
"[compile step 4.2 (iseq_set_exception_table)]\n");
1654 if (!iseq_set_exception_table(iseq))
return COMPILE_NG;
1656 debugs(
"[compile step 4.3 (set_optargs_table)] \n");
1657 if (!iseq_set_optargs_table(iseq))
return COMPILE_NG;
1659 debugs(
"[compile step 5 (iseq_translate_threaded_code)] \n");
1660 if (!rb_iseq_translate_threaded_code(iseq))
return COMPILE_NG;
1662 debugs(
"[compile step 6 (update_catch_except_flags)] \n");
1664 update_catch_except_flags(iseq, ISEQ_BODY(iseq));
1666 debugs(
"[compile step 6.1 (remove unused catch tables)] \n");
1668 if (!ISEQ_COMPILE_DATA(iseq)->catch_except_p && ISEQ_BODY(iseq)->catch_table) {
1669 ruby_xfree_sized(ISEQ_BODY(iseq)->catch_table, iseq_catch_table_bytes(ISEQ_BODY(iseq)->catch_table->size));
1670 ISEQ_BODY(iseq)->catch_table = NULL;
1673#if VM_INSN_INFO_TABLE_IMPL == 2
1674 if (ISEQ_BODY(iseq)->insns_info.succ_index_table == NULL) {
1675 debugs(
"[compile step 7 (rb_iseq_insns_info_encode_positions)] \n");
1676 rb_iseq_insns_info_encode_positions(iseq);
1680 if (compile_debug > 1) {
1681 VALUE str = rb_iseq_disasm(iseq);
1684 verify_call_cache(iseq);
1685 debugs(
"[compile step: finish]\n");
1691iseq_set_exception_local_table(
rb_iseq_t *iseq)
1693 ISEQ_BODY(iseq)->local_table_size = numberof(rb_iseq_shared_exc_local_tbl);
1694 ISEQ_BODY(iseq)->local_table = rb_iseq_shared_exc_local_tbl;
1695 ISEQ_BODY(iseq)->lvar_states = NULL;
1703 while (iseq != ISEQ_BODY(iseq)->local_iseq) {
1705 iseq = ISEQ_BODY(iseq)->parent_iseq;
1711get_dyna_var_idx_at_raw(
const rb_iseq_t *iseq,
ID id)
1715 for (i = 0; i < ISEQ_BODY(iseq)->local_table_size; i++) {
1716 if (ISEQ_BODY(iseq)->local_table[i] == id) {
1726 int idx = get_dyna_var_idx_at_raw(ISEQ_BODY(iseq)->local_iseq,
id);
1729 COMPILE_ERROR(iseq, ISEQ_LAST_LINE(iseq),
1730 "get_local_var_idx: %d", idx);
1737get_dyna_var_idx(
const rb_iseq_t *iseq,
ID id,
int *level,
int *ls)
1739 int lv = 0, idx = -1;
1740 const rb_iseq_t *
const topmost_iseq = iseq;
1743 idx = get_dyna_var_idx_at_raw(iseq,
id);
1747 iseq = ISEQ_BODY(iseq)->parent_iseq;
1752 COMPILE_ERROR(topmost_iseq, ISEQ_LAST_LINE(topmost_iseq),
1753 "get_dyna_var_idx: -1");
1757 *ls = ISEQ_BODY(iseq)->local_table_size;
1762iseq_local_block_param_p(
const rb_iseq_t *iseq,
unsigned int idx,
unsigned int level)
1766 iseq = ISEQ_BODY(iseq)->parent_iseq;
1769 body = ISEQ_BODY(iseq);
1770 if (body->local_iseq == iseq &&
1771 body->param.flags.has_block &&
1772 body->local_table_size - body->param.block_start == idx) {
1781iseq_block_param_id_p(
const rb_iseq_t *iseq,
ID id,
int *pidx,
int *plevel)
1784 int idx = get_dyna_var_idx(iseq,
id, &level, &ls);
1785 if (iseq_local_block_param_p(iseq, ls - idx, level)) {
1796access_outer_variables(
const rb_iseq_t *iseq,
int level,
ID id,
bool write)
1798 int isolated_depth = ISEQ_COMPILE_DATA(iseq)->isolated_depth;
1800 if (isolated_depth && level >= isolated_depth) {
1801 if (
id == rb_intern(
"yield")) {
1802 COMPILE_ERROR(iseq, ISEQ_LAST_LINE(iseq),
"can not yield from isolated Proc");
1805 COMPILE_ERROR(iseq, ISEQ_LAST_LINE(iseq),
"can not access variable '%s' from isolated Proc", rb_id2name(
id));
1809 for (
int i=0; i<level; i++) {
1811 struct rb_id_table *ovs = ISEQ_BODY(iseq)->outer_variables;
1814 ovs = ISEQ_BODY(iseq)->outer_variables = rb_id_table_create(8);
1817 if (rb_id_table_lookup(ISEQ_BODY(iseq)->outer_variables,
id, &val)) {
1818 if (write && !val) {
1819 rb_id_table_insert(ISEQ_BODY(iseq)->outer_variables,
id,
Qtrue);
1823 rb_id_table_insert(ISEQ_BODY(iseq)->outer_variables,
id, RBOOL(write));
1826 iseq = ISEQ_BODY(iseq)->parent_iseq;
1831iseq_lvar_id(
const rb_iseq_t *iseq,
int idx,
int level)
1833 for (
int i=0; i<level; i++) {
1834 iseq = ISEQ_BODY(iseq)->parent_iseq;
1837 ID id = ISEQ_BODY(iseq)->local_table[ISEQ_BODY(iseq)->local_table_size - idx];
1843update_lvar_state(
const rb_iseq_t *iseq,
int level,
int idx)
1845 for (
int i=0; i<level; i++) {
1846 iseq = ISEQ_BODY(iseq)->parent_iseq;
1849 enum lvar_state *states = ISEQ_BODY(iseq)->lvar_states;
1850 int table_idx = ISEQ_BODY(iseq)->local_table_size - idx;
1851 switch (states[table_idx]) {
1852 case lvar_uninitialized:
1853 states[table_idx] = lvar_initialized;
1855 case lvar_initialized:
1856 states[table_idx] = lvar_reassigned;
1858 case lvar_reassigned:
1862 rb_bug(
"unreachable");
1867iseq_set_parameters_lvar_state(
const rb_iseq_t *iseq)
1869 for (
unsigned int i=0; i<ISEQ_BODY(iseq)->param.size; i++) {
1870 ISEQ_BODY(iseq)->lvar_states[i] = lvar_initialized;
1873 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
1874 int opt_num = ISEQ_BODY(iseq)->param.opt_num;
1875 for (
int i=0; i<opt_num; i++) {
1876 ISEQ_BODY(iseq)->lvar_states[lead_num + i] = lvar_uninitialized;
1885 if (iseq_local_block_param_p(iseq, idx, level)) {
1886 ADD_INSN2(seq, line_node, getblockparam,
INT2FIX((idx) + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
1889 ADD_INSN2(seq, line_node, getlocal,
INT2FIX((idx) + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
1891 if (level > 0) access_outer_variables(iseq, level, iseq_lvar_id(iseq, idx, level),
Qfalse);
1897 if (iseq_local_block_param_p(iseq, idx, level)) {
1898 ADD_INSN2(seq, line_node, setblockparam,
INT2FIX((idx) + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
1901 ADD_INSN2(seq, line_node, setlocal,
INT2FIX((idx) + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
1903 update_lvar_state(iseq, level, idx);
1904 if (level > 0) access_outer_variables(iseq, level, iseq_lvar_id(iseq, idx, level),
Qtrue);
1913 if (body->param.flags.has_opt ||
1914 body->param.flags.has_post ||
1915 body->param.flags.has_rest ||
1916 body->param.flags.has_block ||
1917 body->param.flags.has_kw ||
1918 body->param.flags.has_kwrest) {
1920 if (body->param.flags.has_block) {
1921 body->param.size = body->param.block_start + 1;
1923 else if (body->param.flags.has_kwrest) {
1924 body->param.size = body->param.keyword->rest_start + 1;
1926 else if (body->param.flags.has_kw) {
1927 body->param.size = body->param.keyword->bits_start + 1;
1929 else if (body->param.flags.has_post) {
1930 body->param.size = body->param.post_start + body->param.post_num;
1932 else if (body->param.flags.has_rest) {
1933 body->param.size = body->param.rest_start + 1;
1935 else if (body->param.flags.has_opt) {
1936 body->param.size = body->param.lead_num + body->param.opt_num;
1943 body->param.size = body->param.lead_num;
1953 struct rb_iseq_param_keyword *keyword;
1956 int kw = 0, rkw = 0, di = 0, i;
1958 body->param.flags.has_kw = TRUE;
1959 body->param.keyword = keyword =
ZALLOC_N(
struct rb_iseq_param_keyword, 1);
1963 node = node->nd_next;
1966 keyword->bits_start = arg_size++;
1968 node = args->kw_args;
1970 const NODE *val_node = get_nd_value(node->nd_body);
1973 if (val_node == NODE_SPECIAL_REQUIRED_KEYWORD) {
1977 switch (nd_type(val_node)) {
1979 dv = rb_node_sym_string_val(val_node);
1982 dv = rb_node_regx_string_val(val_node);
1985 dv = rb_node_line_lineno_val(val_node);
1988 dv = rb_node_integer_literal_val(val_node);
1991 dv = rb_node_float_literal_val(val_node);
1994 dv = rb_node_rational_literal_val(val_node);
1996 case NODE_IMAGINARY:
1997 dv = rb_node_imaginary_literal_val(val_node);
2000 dv = rb_node_encoding_val(val_node);
2012 NO_CHECK(COMPILE_POPPED(optargs,
"kwarg", RNODE(node)));
2016 keyword->num = ++di;
2020 node = node->nd_next;
2025 if (RNODE_DVAR(args->kw_rest_arg)->nd_vid != 0) {
2026 ID kw_id = ISEQ_BODY(iseq)->local_table[arg_size];
2027 keyword->rest_start = arg_size++;
2028 body->param.flags.has_kwrest = TRUE;
2030 if (kw_id == idPow) body->param.flags.anon_kwrest = TRUE;
2032 keyword->required_num = rkw;
2033 keyword->table = &body->local_table[keyword->bits_start - keyword->num];
2038 for (i = 0; i <
RARRAY_LEN(default_values); i++) {
2040 if (dv == complex_mark) dv =
Qundef;
2045 keyword->default_values = dvs;
2054 if (!body->param.flags.use_block) {
2055 body->param.flags.use_block = 1;
2060 st_data_t key = (st_data_t)rb_intern_str(body->location.label);
2061 set_insert(&vm->unused_block_warning_table, key);
2069 debugs(
"iseq_set_arguments: %s\n", node_args ?
"" :
"0");
2073 const struct rb_args_info *
const args = &RNODE_ARGS(node_args)->nd_ainfo;
2079 EXPECT_NODE(
"iseq_set_arguments", node_args, NODE_ARGS, COMPILE_NG);
2081 body->param.lead_num = arg_size = (int)args->pre_args_num;
2082 if (body->param.lead_num > 0) body->param.flags.has_lead = TRUE;
2083 debugs(
" - argc: %d\n", body->param.lead_num);
2085 rest_id = args->rest_arg;
2086 if (rest_id == NODE_SPECIAL_EXCESSIVE_COMMA) {
2090 block_id = args->block_arg;
2092 bool optimized_forward = (args->forwarding && args->pre_args_num == 0 && !args->opt_args);
2094 if (optimized_forward) {
2099 if (args->opt_args) {
2107 label = NEW_LABEL(nd_line(RNODE(node)));
2109 ADD_LABEL(optargs, label);
2110 NO_CHECK(COMPILE_POPPED(optargs,
"optarg", node->nd_body));
2111 node = node->nd_next;
2116 label = NEW_LABEL(nd_line(node_args));
2118 ADD_LABEL(optargs, label);
2123 for (j = 0; j < i+1; j++) {
2128 body->param.flags.has_opt = TRUE;
2129 body->param.opt_num = i;
2130 body->param.opt_table = opt_table;
2135 body->param.rest_start = arg_size++;
2136 body->param.flags.has_rest = TRUE;
2137 if (rest_id ==
'*') body->param.flags.anon_rest = TRUE;
2141 if (args->first_post_arg) {
2142 body->param.post_start = arg_size;
2143 body->param.post_num = args->post_args_num;
2144 body->param.flags.has_post = TRUE;
2145 arg_size += args->post_args_num;
2147 if (body->param.flags.has_rest) {
2148 body->param.post_start = body->param.rest_start + 1;
2152 if (args->kw_args) {
2153 arg_size = iseq_set_arguments_keywords(iseq, optargs, args, arg_size);
2155 else if (args->kw_rest_arg && !optimized_forward) {
2156 ID kw_id = ISEQ_BODY(iseq)->local_table[arg_size];
2157 struct rb_iseq_param_keyword *keyword =
ZALLOC_N(
struct rb_iseq_param_keyword, 1);
2158 keyword->rest_start = arg_size++;
2159 body->param.keyword = keyword;
2160 body->param.flags.has_kwrest = TRUE;
2162 static ID anon_kwrest = 0;
2163 if (!anon_kwrest) anon_kwrest = rb_intern(
"**");
2164 if (kw_id == anon_kwrest) body->param.flags.anon_kwrest = TRUE;
2166 else if (args->no_kwarg) {
2167 body->param.flags.accepts_no_kwarg = TRUE;
2170 if (args->no_blockarg) {
2171 body->param.flags.accepts_no_block = TRUE;
2173 else if (block_id) {
2174 body->param.block_start = arg_size++;
2175 body->param.flags.has_block = TRUE;
2176 iseq_set_use_block(iseq);
2180 if (optimized_forward) {
2181 body->param.flags.use_block = 1;
2182 body->param.flags.forwardable = TRUE;
2186 iseq_calc_param_size(iseq);
2187 body->param.size = arg_size;
2189 if (args->pre_init) {
2190 NO_CHECK(COMPILE_POPPED(optargs,
"init arguments (m)", args->pre_init));
2192 if (args->post_init) {
2193 NO_CHECK(COMPILE_POPPED(optargs,
"init arguments (p)", args->post_init));
2196 if (body->type == ISEQ_TYPE_BLOCK) {
2197 if (body->param.flags.has_opt == FALSE &&
2198 body->param.flags.has_post == FALSE &&
2199 body->param.flags.has_rest == FALSE &&
2200 body->param.flags.has_kw == FALSE &&
2201 body->param.flags.has_kwrest == FALSE) {
2203 if (body->param.lead_num == 1 && last_comma == 0) {
2205 body->param.flags.ambiguous_param0 = TRUE;
2217 unsigned int size = tbl ? tbl->size : 0;
2218 unsigned int offset = 0;
2221 struct rb_args_info *args = &RNODE_ARGS(node_args)->nd_ainfo;
2226 if (args->forwarding && args->pre_args_num == 0 && !args->opt_args) {
2235 MEMCPY(ids, tbl->ids + offset,
ID, size);
2236 ISEQ_BODY(iseq)->local_table = ids;
2238 enum lvar_state *states =
ALLOC_N(
enum lvar_state, size);
2240 for (
unsigned int i=0; i<size; i++) {
2241 states[i] = lvar_uninitialized;
2244 ISEQ_BODY(iseq)->lvar_states = states;
2246 ISEQ_BODY(iseq)->local_table_size = size;
2248 debugs(
"iseq_set_local_table: %u\n", ISEQ_BODY(iseq)->local_table_size);
2260 else if ((tlit = OBJ_BUILTIN_TYPE(lit)) == -1) {
2263 else if ((tval = OBJ_BUILTIN_TYPE(val)) == -1) {
2266 else if (tlit != tval) {
2276 long x =
FIX2LONG(rb_big_cmp(lit, val));
2284 return rb_float_cmp(lit, val);
2287 const struct RRational *rat1 = RRATIONAL(val);
2288 const struct RRational *rat2 = RRATIONAL(lit);
2289 return rb_iseq_cdhash_cmp(rat1->num, rat2->num) || rb_iseq_cdhash_cmp(rat1->den, rat2->den);
2292 const struct RComplex *comp1 = RCOMPLEX(val);
2293 const struct RComplex *comp2 = RCOMPLEX(lit);
2294 return rb_iseq_cdhash_cmp(comp1->real, comp2->real) || rb_iseq_cdhash_cmp(comp1->imag, comp2->imag);
2297 return rb_reg_equal(val, lit) ? 0 : -1;
2305rb_iseq_cdhash_hash(
VALUE a)
2307 switch (OBJ_BUILTIN_TYPE(a)) {
2310 return (st_index_t)a;
2318 return rb_rational_hash(a);
2320 return rb_complex_hash(a);
2330 rb_iseq_cdhash_hash,
2344 rb_hash_aset(data->hash, key,
INT2FIX(lobj->position - (data->pos+data->len)));
2352 return INT2FIX(ISEQ_BODY(iseq)->ivc_size++);
2359 struct rb_id_table *tbl = ISEQ_COMPILE_DATA(iseq)->ivar_cache_table;
2361 if (rb_id_table_lookup(tbl,
id,&val)) {
2366 tbl = rb_id_table_create(1);
2367 ISEQ_COMPILE_DATA(iseq)->ivar_cache_table = tbl;
2369 val =
INT2FIX(ISEQ_BODY(iseq)->icvarc_size++);
2370 rb_id_table_insert(tbl,
id,val);
2374#define BADINSN_DUMP(anchor, list, dest) \
2375 dump_disasm_list_with_cursor(FIRST_ELEMENT(anchor), list, dest)
2377#define BADINSN_ERROR \
2378 (SIZED_FREE_N(generated_iseq, generated_iseq_size), \
2379 SIZED_FREE_N(insns_info, insns_info_size), \
2380 BADINSN_DUMP(anchor, list, NULL), \
2386 int stack_max = 0, sp = 0, line = 0;
2389 for (list = FIRST_ELEMENT(anchor); list; list = list->next) {
2390 if (IS_LABEL(list)) {
2396 for (list = FIRST_ELEMENT(anchor); list; list = list->next) {
2397 switch (list->type) {
2398 case ISEQ_ELEMENT_INSN:
2406 sp = calc_sp_depth(sp, iobj);
2408 BADINSN_DUMP(anchor, list, NULL);
2409 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2410 "argument stack underflow (%d)", sp);
2413 if (sp > stack_max) {
2417 line = iobj->insn_info.line_no;
2419 operands = iobj->operands;
2420 insn = iobj->insn_id;
2421 types = insn_op_types(insn);
2422 len = insn_len(insn);
2425 if (iobj->operand_size !=
len - 1) {
2427 BADINSN_DUMP(anchor, list, NULL);
2428 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2429 "operand size miss! (%d for %d)",
2430 iobj->operand_size,
len - 1);
2434 for (j = 0; types[j]; j++) {
2435 if (types[j] == TS_OFFSET) {
2439 BADINSN_DUMP(anchor, list, NULL);
2440 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2441 "unknown label: "LABEL_FORMAT, lobj->label_no);
2444 if (lobj->sp == -1) {
2447 else if (lobj->sp != sp) {
2448 debugs(
"%s:%d: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2449 RSTRING_PTR(rb_iseq_path(iseq)), line,
2450 lobj->label_no, lobj->sp, sp);
2456 case ISEQ_ELEMENT_LABEL:
2459 if (lobj->sp == -1) {
2463 if (lobj->sp != sp) {
2464 debugs(
"%s:%d: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2465 RSTRING_PTR(rb_iseq_path(iseq)), line,
2466 lobj->label_no, lobj->sp, sp);
2472 case ISEQ_ELEMENT_TRACE:
2477 case ISEQ_ELEMENT_ADJUST:
2482 sp = adjust->label ? adjust->label->sp : 0;
2483 if (adjust->line_no != -1 && orig_sp - sp < 0) {
2484 BADINSN_DUMP(anchor, list, NULL);
2485 COMPILE_ERROR(iseq, adjust->line_no,
2486 "iseq_set_sequence: adjust bug %d < %d",
2493 BADINSN_DUMP(anchor, list, NULL);
2494 COMPILE_ERROR(iseq, line,
"unknown list type: %d", list->type);
2503 int insns_info_index,
int code_index,
const INSN *iobj)
2505 if (insns_info_index == 0 ||
2506 insns_info[insns_info_index-1].line_no != iobj->insn_info.line_no ||
2507#ifdef USE_ISEQ_NODE_ID
2508 insns_info[insns_info_index-1].node_id != iobj->insn_info.node_id ||
2510 insns_info[insns_info_index-1].events != iobj->insn_info.events) {
2511 insns_info[insns_info_index].line_no = iobj->insn_info.line_no;
2512#ifdef USE_ISEQ_NODE_ID
2513 insns_info[insns_info_index].node_id = iobj->insn_info.node_id;
2515 insns_info[insns_info_index].events = iobj->insn_info.events;
2516 positions[insns_info_index] = code_index;
2524 int insns_info_index,
int code_index,
const ADJUST *adjust)
2526 insns_info[insns_info_index].line_no = adjust->line_no;
2527 insns_info[insns_info_index].node_id = -1;
2528 insns_info[insns_info_index].events = 0;
2529 positions[insns_info_index] = code_index;
2534array_to_idlist(
VALUE arr)
2539 for (
long i = 0; i < size; i++) {
2548idlist_to_array(
const ID *ids)
2565 unsigned int *positions;
2567 VALUE *generated_iseq;
2571 int insn_num, code_index, insns_info_index, sp = 0;
2572 int stack_max = fix_sp_depth(iseq, anchor);
2574 if (stack_max < 0)
return COMPILE_NG;
2577 insn_num = code_index = 0;
2578 for (list = FIRST_ELEMENT(anchor); list; list = list->next) {
2579 switch (list->type) {
2580 case ISEQ_ELEMENT_INSN:
2584 sp = calc_sp_depth(sp, iobj);
2586 events = iobj->insn_info.events |= events;
2587 if (ISEQ_COVERAGE(iseq)) {
2588 if (ISEQ_LINE_COVERAGE(iseq) && (events & RUBY_EVENT_COVERAGE_LINE) &&
2589 !(rb_get_coverage_mode() & COVERAGE_TARGET_ONESHOT_LINES)) {
2590 int line = iobj->insn_info.line_no - 1;
2591 if (line >= 0 && line <
RARRAY_LEN(ISEQ_LINE_COVERAGE(iseq))) {
2595 if (ISEQ_BRANCH_COVERAGE(iseq) && (events & RUBY_EVENT_COVERAGE_BRANCH)) {
2596 while (
RARRAY_LEN(ISEQ_PC2BRANCHINDEX(iseq)) <= code_index) {
2602 code_index += insn_data_length(iobj);
2607 case ISEQ_ELEMENT_LABEL:
2610 lobj->position = code_index;
2611 if (lobj->sp != sp) {
2612 debugs(
"%s: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2613 RSTRING_PTR(rb_iseq_path(iseq)),
2614 lobj->label_no, lobj->sp, sp);
2619 case ISEQ_ELEMENT_TRACE:
2622 events |= trace->event;
2623 if (trace->event & RUBY_EVENT_COVERAGE_BRANCH) data = trace->data;
2626 case ISEQ_ELEMENT_ADJUST:
2629 if (adjust->line_no != -1) {
2631 sp = adjust->label ? adjust->label->sp : 0;
2632 if (orig_sp - sp > 0) {
2633 if (orig_sp - sp > 1) code_index++;
2645 const int generated_iseq_size = code_index;
2648 const int insns_info_size = insn_num;
2651 const int positions_size = insn_num;
2652 positions =
ALLOC_N(
unsigned int, insn_num);
2653 if (ISEQ_IS_SIZE(body)) {
2657 body->is_entries = NULL;
2660 if (body->ci_size) {
2664 body->call_data = NULL;
2666 ISEQ_COMPILE_DATA(iseq)->ci_index = 0;
2673 iseq_bits_t * mark_offset_bits;
2674 int code_size = code_index;
2676 bool needs_bitmap =
false;
2678 const size_t mark_offset_bits_size = ISEQ_MBITS_BUFLEN(code_index);
2679 if (mark_offset_bits_size == 1) {
2680 mark_offset_bits = &ISEQ_COMPILE_DATA(iseq)->mark_bits.single;
2681 ISEQ_COMPILE_DATA(iseq)->is_single_mark_bit =
true;
2684 mark_offset_bits =
ZALLOC_N(iseq_bits_t, mark_offset_bits_size);
2685 ISEQ_COMPILE_DATA(iseq)->mark_bits.list = mark_offset_bits;
2686 ISEQ_COMPILE_DATA(iseq)->is_single_mark_bit =
false;
2689 ISEQ_COMPILE_DATA(iseq)->iseq_encoded = (
void *)generated_iseq;
2690 ISEQ_COMPILE_DATA(iseq)->iseq_size = code_index;
2692 list = FIRST_ELEMENT(anchor);
2693 insns_info_index = code_index = sp = 0;
2696 switch (list->type) {
2697 case ISEQ_ELEMENT_INSN:
2705 sp = calc_sp_depth(sp, iobj);
2707 operands = iobj->operands;
2708 insn = iobj->insn_id;
2709 generated_iseq[code_index] = insn;
2710 types = insn_op_types(insn);
2711 len = insn_len(insn);
2713 for (j = 0; types[j]; j++) {
2714 char type = types[j];
2722 generated_iseq[code_index + 1 + j] = lobj->position - (code_index +
len);
2727 VALUE map = operands[j];
2730 data.pos = code_index;
2734 freeze_hide_obj(map);
2736 generated_iseq[code_index + 1 + j] = map;
2737 ISEQ_MBITS_SET(mark_offset_bits, code_index + 1 + j);
2739 needs_bitmap =
true;
2744 generated_iseq[code_index + 1 + j] =
FIX2INT(operands[j]);
2749 VALUE v = operands[j];
2750 generated_iseq[code_index + 1 + j] = v;
2754 ISEQ_MBITS_SET(mark_offset_bits, code_index + 1 + j);
2755 needs_bitmap =
true;
2762 unsigned int ic_index = ISEQ_COMPILE_DATA(iseq)->ic_index++;
2763 IC ic = &ISEQ_IS_ENTRY_START(body,
type)[ic_index].ic_cache;
2764 if (UNLIKELY(ic_index >= body->ic_size)) {
2765 BADINSN_DUMP(anchor, &iobj->link, 0);
2766 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2767 "iseq_set_sequence: ic_index overflow: index: %d, size: %d",
2768 ic_index, ISEQ_IS_SIZE(body));
2771 ic->
segments = array_to_idlist(operands[j]);
2773 generated_iseq[code_index + 1 + j] = (
VALUE)ic;
2778 unsigned int ic_index =
FIX2UINT(operands[j]);
2780 IVC cache = ((
IVC)&body->is_entries[ic_index]);
2782 if (insn == BIN(setinstancevariable)) {
2783 cache->iv_set_name =
SYM2ID(operands[j - 1]);
2784 cache->value = IVAR_CACHE_INIT;
2787 cache->iv_set_name = 0;
2788 cache->value = rb_getivar_cache_pack(ROOT_SHAPE_ID, ATTR_INDEX_NOT_SET);
2794 unsigned int ic_index =
FIX2UINT(operands[j]);
2795 IC ic = &ISEQ_IS_ENTRY_START(body,
type)[ic_index].ic_cache;
2796 if (UNLIKELY(ic_index >= ISEQ_IS_SIZE(body))) {
2797 BADINSN_DUMP(anchor, &iobj->link, 0);
2798 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2799 "iseq_set_sequence: ic_index overflow: index: %d, size: %d",
2800 ic_index, ISEQ_IS_SIZE(body));
2802 generated_iseq[code_index + 1 + j] = (
VALUE)ic;
2809 RUBY_ASSERT(ISEQ_COMPILE_DATA(iseq)->ci_index <= body->ci_size);
2810 struct rb_call_data *cd = &body->call_data[ISEQ_COMPILE_DATA(iseq)->ci_index++];
2812 cd->cc = vm_cc_empty();
2813 generated_iseq[code_index + 1 + j] = (
VALUE)cd;
2817 generated_iseq[code_index + 1 + j] =
SYM2ID(operands[j]);
2820 generated_iseq[code_index + 1 + j] = operands[j];
2823 generated_iseq[code_index + 1 + j] = operands[j];
2826 BADINSN_ERROR(iseq, iobj->insn_info.line_no,
2827 "unknown operand type: %c",
type);
2831 if (add_insn_info(insns_info, positions, insns_info_index, code_index, iobj)) insns_info_index++;
2835 case ISEQ_ELEMENT_LABEL:
2838 if (lobj->sp != sp) {
2839 debugs(
"%s: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2840 RSTRING_PTR(rb_iseq_path(iseq)),
2841 lobj->label_no, lobj->sp, sp);
2846 case ISEQ_ELEMENT_ADJUST:
2851 if (adjust->label) {
2852 sp = adjust->label->sp;
2858 if (adjust->line_no != -1) {
2859 const int diff = orig_sp - sp;
2861 if (insns_info_index == 0) {
2862 COMPILE_ERROR(iseq, adjust->line_no,
2863 "iseq_set_sequence: adjust bug (ISEQ_ELEMENT_ADJUST must not be the first in iseq)");
2865 if (add_adjust_info(insns_info, positions, insns_info_index, code_index, adjust)) insns_info_index++;
2868 generated_iseq[code_index++] = BIN(adjuststack);
2869 generated_iseq[code_index++] = orig_sp - sp;
2871 else if (diff == 1) {
2872 generated_iseq[code_index++] = BIN(pop);
2874 else if (diff < 0) {
2875 int label_no = adjust->label ? adjust->label->label_no : -1;
2876 SIZED_FREE_N(generated_iseq, generated_iseq_size);
2877 SIZED_FREE_N(insns_info, insns_info_size);
2878 SIZED_FREE_N(positions, positions_size);
2879 if (ISEQ_MBITS_BUFLEN(code_size) > 1) {
2880 SIZED_FREE_N(mark_offset_bits, ISEQ_MBITS_BUFLEN(code_index));
2882 debug_list(anchor, list);
2883 COMPILE_ERROR(iseq, adjust->line_no,
2884 "iseq_set_sequence: adjust bug to %d %d < %d",
2885 label_no, orig_sp, sp);
2898 body->iseq_encoded = (
void *)generated_iseq;
2899 body->iseq_size = code_index;
2900 body->stack_max = stack_max;
2902 if (ISEQ_COMPILE_DATA(iseq)->is_single_mark_bit) {
2903 body->mark_bits.single = ISEQ_COMPILE_DATA(iseq)->mark_bits.single;
2907 body->mark_bits.list = mark_offset_bits;
2910 body->mark_bits.list = NULL;
2911 ISEQ_COMPILE_DATA(iseq)->mark_bits.list = NULL;
2912 SIZED_FREE_N(mark_offset_bits, mark_offset_bits_size);
2917 body->insns_info.body = insns_info;
2918 body->insns_info.positions = positions;
2921 body->insns_info.body = insns_info;
2922 SIZED_REALLOC_N(positions,
unsigned int, insns_info_index, positions_size);
2923 body->insns_info.positions = positions;
2924 body->insns_info.size = insns_info_index;
2930label_get_position(
LABEL *lobj)
2932 return lobj->position;
2936label_get_sp(
LABEL *lobj)
2942iseq_set_exception_table(
rb_iseq_t *iseq)
2944 const VALUE *tptr, *ptr;
2945 unsigned int tlen, i;
2948 ISEQ_BODY(iseq)->catch_table = NULL;
2950 VALUE catch_table_ary = ISEQ_COMPILE_DATA(iseq)->catch_table_ary;
2951 if (
NIL_P(catch_table_ary))
return COMPILE_OK;
2959 for (i = 0; i < table->size; i++) {
2962 entry = UNALIGNED_MEMBER_PTR(table, entries[i]);
2963 entry->type = (
enum rb_catch_type)(ptr[0] & 0xffff);
2964 pos = label_get_position((
LABEL *)(ptr[1] & ~1));
2966 entry->start = (
unsigned int)pos;
2967 pos = label_get_position((
LABEL *)(ptr[2] & ~1));
2969 entry->end = (
unsigned int)pos;
2976 entry->cont = label_get_position(lobj);
2977 entry->sp = label_get_sp(lobj);
2980 if (entry->type == CATCH_TYPE_RESCUE ||
2981 entry->type == CATCH_TYPE_BREAK ||
2982 entry->type == CATCH_TYPE_NEXT) {
2991 ISEQ_BODY(iseq)->catch_table = table;
2992 RB_OBJ_WRITE(iseq, &ISEQ_COMPILE_DATA(iseq)->catch_table_ary, 0);
3013 VALUE *opt_table = (
VALUE *)ISEQ_BODY(iseq)->param.opt_table;
3015 if (ISEQ_BODY(iseq)->param.flags.has_opt) {
3016 for (i = 0; i < ISEQ_BODY(iseq)->param.opt_num + 1; i++) {
3017 opt_table[i] = label_get_position((
LABEL *)opt_table[i]);
3024get_destination_insn(
INSN *iobj)
3030 list = lobj->link.next;
3032 switch (list->type) {
3033 case ISEQ_ELEMENT_INSN:
3034 case ISEQ_ELEMENT_ADJUST:
3036 case ISEQ_ELEMENT_LABEL:
3039 case ISEQ_ELEMENT_TRACE:
3042 events |= trace->event;
3050 if (list && IS_INSN(list)) {
3052 iobj->insn_info.events |= events;
3058get_next_insn(
INSN *iobj)
3063 if (IS_INSN(list) || IS_ADJUST(list)) {
3072get_prev_insn(
INSN *iobj)
3077 if (IS_INSN(list) || IS_ADJUST(list)) {
3086unref_destination(
INSN *iobj,
int pos)
3088 LABEL *lobj = (
LABEL *)OPERAND_AT(iobj, pos);
3090 if (!lobj->refcnt) ELEM_REMOVE(&lobj->link);
3094replace_destination(
INSN *dobj,
INSN *nobj)
3096 VALUE n = OPERAND_AT(nobj, 0);
3099 if (dl == nl)
return false;
3102 OPERAND_AT(dobj, 0) = n;
3103 if (!dl->refcnt) ELEM_REMOVE(&dl->link);
3108find_destination(
INSN *i)
3110 int pos,
len = insn_len(i->insn_id);
3111 for (pos = 0; pos <
len; ++pos) {
3112 if (insn_op_types(i->insn_id)[pos] == TS_OFFSET) {
3113 return (
LABEL *)OPERAND_AT(i, pos);
3123 int *unref_counts = 0, nlabels = ISEQ_COMPILE_DATA(iseq)->label_no;
3126 unref_counts =
ALLOCA_N(
int, nlabels);
3127 MEMZERO(unref_counts,
int, nlabels);
3132 if (IS_INSN_ID(i, leave)) {
3136 else if ((lab = find_destination((
INSN *)i)) != 0) {
3137 unref_counts[lab->label_no]++;
3140 else if (IS_LABEL(i)) {
3142 if (lab->unremovable)
return 0;
3143 if (lab->refcnt > unref_counts[lab->label_no]) {
3144 if (i == first)
return 0;
3149 else if (IS_TRACE(i)) {
3152 else if (IS_ADJUST(i)) {
3156 }
while ((i = i->next) != 0);
3161 VALUE insn = INSN_OF(i);
3162 int pos,
len = insn_len(insn);
3163 for (pos = 0; pos <
len; ++pos) {
3164 switch (insn_op_types(insn)[pos]) {
3166 unref_destination((
INSN *)i, pos);
3175 }
while ((i != end) && (i = i->next) != 0);
3182 switch (OPERAND_AT(iobj, 0)) {
3184 ELEM_REMOVE(&iobj->link);
3187 ELEM_REMOVE(&iobj->link);
3190 iobj->insn_id = BIN(adjuststack);
3196is_frozen_putstring(
INSN *insn,
VALUE *op)
3198 if (IS_INSN_ID(insn, dupstring) || IS_INSN_ID(insn, dupchilledstring)) {
3199 *op = OPERAND_AT(insn, 0);
3202 else if (IS_INSN_ID(insn, putobject)) {
3203 *op = OPERAND_AT(insn, 0);
3214 if (prev->type == ISEQ_ELEMENT_LABEL) {
3216 if (label->refcnt > 0) {
3220 else if (prev->type == ISEQ_ELEMENT_INSN) {
3253 INSN *niobj, *ciobj, *dup = 0;
3257 switch (INSN_OF(iobj)) {
3258 case BIN(dupstring):
3259 case BIN(dupchilledstring):
3265 case BIN(putobject):
3268 default:
return FALSE;
3271 ciobj = (
INSN *)get_next_insn(iobj);
3272 if (IS_INSN_ID(ciobj, jump)) {
3273 ciobj = (
INSN *)get_next_insn((
INSN*)OPERAND_AT(ciobj, 0));
3275 if (IS_INSN_ID(ciobj, dup)) {
3276 ciobj = (
INSN *)get_next_insn(dup = ciobj);
3278 if (!ciobj || !IS_INSN_ID(ciobj, checktype))
return FALSE;
3279 niobj = (
INSN *)get_next_insn(ciobj);
3284 switch (INSN_OF(niobj)) {
3286 if (OPERAND_AT(ciobj, 0) ==
type) {
3287 dest = (
LABEL *)OPERAND_AT(niobj, 0);
3290 case BIN(branchunless):
3291 if (OPERAND_AT(ciobj, 0) !=
type) {
3292 dest = (
LABEL *)OPERAND_AT(niobj, 0);
3298 line = ciobj->insn_info.line_no;
3299 node_id = ciobj->insn_info.node_id;
3301 if (niobj->link.next && IS_LABEL(niobj->link.next)) {
3302 dest = (
LABEL *)niobj->link.next;
3305 dest = NEW_LABEL(line);
3306 ELEM_INSERT_NEXT(&niobj->link, &dest->link);
3309 INSERT_AFTER_INSN1(iobj, line, node_id, jump, dest);
3311 if (!dup) INSERT_AFTER_INSN(iobj, line, node_id, pop);
3318 const struct rb_callinfo *nci = vm_ci_new(vm_ci_mid(ci),
3319 vm_ci_flag(ci) | add,
3329 const struct rb_callinfo *nci = vm_ci_new(vm_ci_mid(ci),
3337#define vm_ci_simple(ci) (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)
3345 optimize_checktype(iseq, iobj);
3347 if (IS_INSN_ID(iobj, jump)) {
3348 INSN *niobj, *diobj, *piobj;
3349 diobj = (
INSN *)get_destination_insn(iobj);
3350 niobj = (
INSN *)get_next_insn(iobj);
3352 if (diobj == niobj) {
3359 unref_destination(iobj, 0);
3360 ELEM_REMOVE(&iobj->link);
3363 else if (iobj != diobj && IS_INSN(&diobj->link) &&
3364 IS_INSN_ID(diobj, jump) &&
3365 OPERAND_AT(iobj, 0) != OPERAND_AT(diobj, 0) &&
3366 diobj->insn_info.events == 0) {
3377 if (replace_destination(iobj, diobj)) {
3378 remove_unreachable_chunk(iseq, iobj->link.next);
3382 else if (IS_INSN_ID(diobj, leave)) {
3395 unref_destination(iobj, 0);
3396 iobj->insn_id = BIN(leave);
3397 iobj->operand_size = 0;
3398 iobj->insn_info = diobj->insn_info;
3401 else if (IS_INSN(iobj->link.prev) &&
3402 (piobj = (
INSN *)iobj->link.prev) &&
3403 (IS_INSN_ID(piobj, branchif) ||
3404 IS_INSN_ID(piobj, branchunless))) {
3405 INSN *pdiobj = (
INSN *)get_destination_insn(piobj);
3406 if (niobj == pdiobj) {
3407 int refcnt = IS_LABEL(piobj->link.next) ?
3408 ((
LABEL *)piobj->link.next)->refcnt : 0;
3423 piobj->insn_id = (IS_INSN_ID(piobj, branchif))
3424 ? BIN(branchunless) : BIN(branchif);
3425 if (replace_destination(piobj, iobj) && refcnt <= 1) {
3426 ELEM_REMOVE(&iobj->link);
3433 else if (diobj == pdiobj) {
3447 INSN *popiobj = new_insn_core(iseq, iobj->insn_info.line_no, iobj->insn_info.node_id, BIN(pop), 0, 0);
3448 ELEM_REPLACE(&piobj->link, &popiobj->link);
3451 if (remove_unreachable_chunk(iseq, iobj->link.next)) {
3465 if (IS_INSN_ID(iobj, newrange)) {
3466 INSN *
const range = iobj;
3468 VALUE str_beg, str_end;
3470 if ((end = (
INSN *)get_prev_insn(range)) != 0 &&
3471 is_frozen_putstring(end, &str_end) &&
3472 (beg = (
INSN *)get_prev_insn(end)) != 0 &&
3473 is_frozen_putstring(beg, &str_beg) &&
3474 !(insn_has_label_before(&beg->link) || insn_has_label_before(&end->link))) {
3475 int excl =
FIX2INT(OPERAND_AT(range, 0));
3478 ELEM_REMOVE(&beg->link);
3479 ELEM_REMOVE(&end->link);
3480 range->insn_id = BIN(putobject);
3481 OPERAND_AT(range, 0) = lit_range;
3486 if (IS_INSN_ID(iobj, leave)) {
3487 remove_unreachable_chunk(iseq, iobj->link.next);
3499 if (IS_INSN_ID(iobj, duparray)) {
3501 if (IS_INSN(next) && (IS_INSN_ID(next, concatarray) || IS_INSN_ID(next, concattoarray))) {
3502 iobj->insn_id = BIN(putobject);
3512 if (IS_INSN_ID(iobj, duparray)) {
3514 if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3518 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3519 VALUE ary = iobj->operands[0];
3522 insn_replace_with_operands(iseq, iobj, BIN(opt_ary_freeze), 2, ary, (
VALUE)ci);
3534 if (IS_INSN_ID(iobj, duphash)) {
3536 if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3540 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3541 VALUE hash = iobj->operands[0];
3543 RB_OBJ_SET_SHAREABLE(hash);
3545 insn_replace_with_operands(iseq, iobj, BIN(opt_hash_freeze), 2, hash, (
VALUE)ci);
3557 if (IS_INSN_ID(iobj, newarray) && iobj->operands[0] ==
INT2FIX(0)) {
3559 if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3563 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3564 insn_replace_with_operands(iseq, iobj, BIN(opt_ary_freeze), 2, rb_cArray_empty_frozen, (
VALUE)ci);
3576 if (IS_INSN_ID(iobj, newhash) && iobj->operands[0] ==
INT2FIX(0)) {
3578 if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3582 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3583 insn_replace_with_operands(iseq, iobj, BIN(opt_hash_freeze), 2, rb_cHash_empty_frozen, (
VALUE)ci);
3589 if (IS_INSN_ID(iobj, branchif) ||
3590 IS_INSN_ID(iobj, branchnil) ||
3591 IS_INSN_ID(iobj, branchunless)) {
3600 INSN *nobj = (
INSN *)get_destination_insn(iobj);
3622 int stop_optimization =
3623 ISEQ_COVERAGE(iseq) && ISEQ_LINE_COVERAGE(iseq) &&
3624 nobj->link.type == ISEQ_ELEMENT_INSN &&
3625 nobj->insn_info.events;
3626 if (!stop_optimization) {
3627 INSN *pobj = (
INSN *)iobj->link.prev;
3630 if (!IS_INSN(&pobj->link))
3632 else if (IS_INSN_ID(pobj, dup))
3637 if (IS_INSN(&nobj->link) && IS_INSN_ID(nobj, jump)) {
3638 if (!replace_destination(iobj, nobj))
break;
3640 else if (prev_dup && IS_INSN_ID(nobj, dup) &&
3641 !!(nobj = (
INSN *)nobj->link.next) &&
3643 nobj->insn_id == iobj->insn_id) {
3659 if (!replace_destination(iobj, nobj))
break;
3687 if (prev_dup && IS_INSN(pobj->link.prev)) {
3688 pobj = (
INSN *)pobj->link.prev;
3690 if (IS_INSN_ID(pobj, putobject)) {
3691 cond = (IS_INSN_ID(iobj, branchif) ?
3692 OPERAND_AT(pobj, 0) !=
Qfalse :
3693 IS_INSN_ID(iobj, branchunless) ?
3694 OPERAND_AT(pobj, 0) ==
Qfalse :
3697 else if (IS_INSN_ID(pobj, dupstring) ||
3698 IS_INSN_ID(pobj, duparray) ||
3699 IS_INSN_ID(pobj, newarray)) {
3700 cond = IS_INSN_ID(iobj, branchif);
3702 else if (IS_INSN_ID(pobj, putnil)) {
3703 cond = !IS_INSN_ID(iobj, branchif);
3706 if (prev_dup || !IS_INSN_ID(pobj, newarray)) {
3707 ELEM_REMOVE(iobj->link.prev);
3709 else if (!iseq_pop_newarray(iseq, pobj)) {
3710 pobj = new_insn_core(iseq, pobj->insn_info.line_no, pobj->insn_info.node_id, BIN(pop), 0, NULL);
3711 ELEM_INSERT_PREV(&iobj->link, &pobj->link);
3715 pobj = new_insn_core(iseq, pobj->insn_info.line_no, pobj->insn_info.node_id, BIN(putnil), 0, NULL);
3716 ELEM_INSERT_NEXT(&iobj->link, &pobj->link);
3718 iobj->insn_id = BIN(jump);
3722 unref_destination(iobj, 0);
3723 ELEM_REMOVE(&iobj->link);
3728 nobj = (
INSN *)get_destination_insn(nobj);
3733 if (IS_INSN_ID(iobj, pop)) {
3741 if (IS_INSN(prev)) {
3742 enum ruby_vminsn_type previ = ((
INSN *)prev)->insn_id;
3743 if (previ == BIN(putobject) || previ == BIN(putnil) ||
3744 previ == BIN(putself) || previ == BIN(dupstring) ||
3745 previ == BIN(dupchilledstring) ||
3746 previ == BIN(dup) ||
3747 previ == BIN(getlocal) ||
3748 previ == BIN(getblockparam) ||
3749 previ == BIN(getblockparamproxy) ||
3750 previ == BIN(getinstancevariable) ||
3751 previ == BIN(duparray)) {
3755 ELEM_REMOVE(&iobj->link);
3757 else if (previ == BIN(newarray) && iseq_pop_newarray(iseq, (
INSN*)prev)) {
3758 ELEM_REMOVE(&iobj->link);
3760 else if (previ == BIN(concatarray)) {
3762 INSERT_BEFORE_INSN1(piobj, piobj->insn_info.line_no, piobj->insn_info.node_id, splatarray,
Qfalse);
3763 INSN_OF(piobj) = BIN(pop);
3765 else if (previ == BIN(concatstrings)) {
3766 if (OPERAND_AT(prev, 0) ==
INT2FIX(1)) {
3770 ELEM_REMOVE(&iobj->link);
3771 INSN_OF(prev) = BIN(adjuststack);
3777 if (IS_INSN_ID(iobj, newarray) ||
3778 IS_INSN_ID(iobj, duparray) ||
3779 IS_INSN_ID(iobj, concatarray) ||
3780 IS_INSN_ID(iobj, splatarray) ||
3790 if (IS_INSN(next) && IS_INSN_ID(next, splatarray)) {
3796 if (IS_INSN_ID(iobj, newarray)) {
3798 if (IS_INSN(next) && IS_INSN_ID(next, expandarray) &&
3799 OPERAND_AT(next, 1) ==
INT2FIX(0)) {
3801 op1 = OPERAND_AT(iobj, 0);
3802 op2 = OPERAND_AT(next, 0);
3813 INSN_OF(iobj) = BIN(swap);
3814 iobj->operand_size = 0;
3823 INSN_OF(iobj) = BIN(opt_reverse);
3828 INSN_OF(iobj) = BIN(opt_reverse);
3829 OPERAND_AT(iobj, 0) = OPERAND_AT(next, 0);
3839 for (; diff > 0; diff--) {
3840 INSERT_BEFORE_INSN(iobj, iobj->insn_info.line_no, iobj->insn_info.node_id, pop);
3851 for (; diff < 0; diff++) {
3852 INSERT_BEFORE_INSN(iobj, iobj->insn_info.line_no, iobj->insn_info.node_id, putnil);
3859 if (IS_INSN_ID(iobj, duparray)) {
3868 if (IS_INSN(next) && IS_INSN_ID(next, expandarray)) {
3869 INSN_OF(iobj) = BIN(putobject);
3873 if (IS_INSN_ID(iobj, anytostring)) {
3881 if (IS_INSN(next) && IS_INSN_ID(next, concatstrings) &&
3882 OPERAND_AT(next, 0) ==
INT2FIX(1)) {
3887 if (IS_INSN_ID(iobj, dupstring) || IS_INSN_ID(iobj, dupchilledstring) ||
3895 if (IS_NEXT_INSN_ID(&iobj->link, concatstrings) &&
3896 RSTRING_LEN(OPERAND_AT(iobj, 0)) == 0) {
3897 INSN *next = (
INSN *)iobj->link.next;
3898 if ((OPERAND_AT(next, 0) = FIXNUM_INC(OPERAND_AT(next, 0), -1)) ==
INT2FIX(1)) {
3899 ELEM_REMOVE(&next->link);
3901 ELEM_REMOVE(&iobj->link);
3903 if (IS_NEXT_INSN_ID(&iobj->link, toregexp)) {
3904 INSN *next = (
INSN *)iobj->link.next;
3905 if (OPERAND_AT(next, 1) ==
INT2FIX(1)) {
3906 VALUE src = OPERAND_AT(iobj, 0);
3907 int opt = (int)
FIX2LONG(OPERAND_AT(next, 0));
3908 VALUE path = rb_iseq_path(iseq);
3909 int line = iobj->insn_info.line_no;
3910 VALUE errinfo = rb_errinfo();
3911 VALUE re = rb_reg_compile(src, opt, RSTRING_PTR(path), line);
3913 VALUE message = rb_attr_get(rb_errinfo(), idMesg);
3914 rb_set_errinfo(errinfo);
3915 COMPILE_ERROR(iseq, line,
"%" PRIsVALUE, message);
3918 RB_OBJ_SET_SHAREABLE(re);
3921 ELEM_REMOVE(iobj->link.next);
3926 if (IS_INSN_ID(iobj, concatstrings)) {
3935 if (IS_INSN(next) && IS_INSN_ID(next, jump))
3936 next = get_destination_insn(jump = (
INSN *)next);
3937 if (IS_INSN(next) && IS_INSN_ID(next, concatstrings)) {
3938 int n =
FIX2INT(OPERAND_AT(iobj, 0)) +
FIX2INT(OPERAND_AT(next, 0)) - 1;
3939 OPERAND_AT(iobj, 0) =
INT2FIX(n);
3941 LABEL *label = ((
LABEL *)OPERAND_AT(jump, 0));
3942 if (!--label->refcnt) {
3943 ELEM_REMOVE(&label->link);
3946 label = NEW_LABEL(0);
3947 OPERAND_AT(jump, 0) = (
VALUE)label;
3950 ELEM_INSERT_NEXT(next, &label->link);
3951 CHECK(iseq_peephole_optimize(iseq, get_next_insn(jump), do_tailcallopt));
3959 if (do_tailcallopt &&
3960 (IS_INSN_ID(iobj, send) ||
3961 IS_INSN_ID(iobj, invokesuper))) {
3970 if (iobj->link.next) {
3973 if (!IS_INSN(next)) {
3977 switch (INSN_OF(next)) {
3986 next = get_destination_insn((
INSN *)next);
4000 if (IS_INSN_ID(piobj, send) ||
4001 IS_INSN_ID(piobj, invokesuper)) {
4002 if (OPERAND_AT(piobj, 1) == 0) {
4003 ci = ci_flag_set(iseq, ci, VM_CALL_TAILCALL);
4004 OPERAND_AT(piobj, 0) = (
VALUE)ci;
4009 ci = ci_flag_set(iseq, ci, VM_CALL_TAILCALL);
4010 OPERAND_AT(piobj, 0) = (
VALUE)ci;
4016 if (IS_INSN_ID(iobj, dup)) {
4017 if (IS_NEXT_INSN_ID(&iobj->link, setlocal)) {
4028 if (IS_NEXT_INSN_ID(set1, setlocal)) {
4030 if (OPERAND_AT(set1, 0) == OPERAND_AT(set2, 0) &&
4031 OPERAND_AT(set1, 1) == OPERAND_AT(set2, 1)) {
4033 ELEM_REMOVE(&iobj->link);
4046 else if (IS_NEXT_INSN_ID(set1, dup) &&
4047 IS_NEXT_INSN_ID(set1->next, setlocal)) {
4048 set2 = set1->next->next;
4049 if (OPERAND_AT(set1, 0) == OPERAND_AT(set2, 0) &&
4050 OPERAND_AT(set1, 1) == OPERAND_AT(set2, 1)) {
4051 ELEM_REMOVE(set1->next);
4065 if (IS_INSN_ID(iobj, getlocal)) {
4067 if (IS_NEXT_INSN_ID(niobj, dup)) {
4068 niobj = niobj->next;
4070 if (IS_NEXT_INSN_ID(niobj, setlocal)) {
4072 if (OPERAND_AT(iobj, 0) == OPERAND_AT(set1, 0) &&
4073 OPERAND_AT(iobj, 1) == OPERAND_AT(set1, 1)) {
4089 if (IS_INSN_ID(iobj, opt_invokebuiltin_delegate)) {
4090 if (IS_TRACE(iobj->link.next)) {
4091 if (IS_NEXT_INSN_ID(iobj->link.next, leave)) {
4092 iobj->insn_id = BIN(opt_invokebuiltin_delegate_leave);
4094 if (iobj == (
INSN *)list && bf->argc == 0 && (ISEQ_BODY(iseq)->builtin_attrs & BUILTIN_ATTR_LEAF)) {
4095 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_SINGLE_NOARG_LEAF;
4108 if (IS_INSN_ID(iobj, getblockparam)) {
4109 if (IS_NEXT_INSN_ID(&iobj->link, branchif) || IS_NEXT_INSN_ID(&iobj->link, branchunless)) {
4110 iobj->insn_id = BIN(getblockparamproxy);
4114 if (IS_INSN_ID(iobj, splatarray) && OPERAND_AT(iobj, 0) ==
false) {
4116 if (IS_NEXT_INSN_ID(niobj, duphash)) {
4117 niobj = niobj->next;
4119 unsigned int set_flags = 0, unset_flags = 0;
4132 if (IS_NEXT_INSN_ID(niobj, send)) {
4133 siobj = niobj->next;
4134 set_flags = VM_CALL_ARGS_SPLAT|VM_CALL_KW_SPLAT|VM_CALL_KW_SPLAT_MUT;
4135 unset_flags = VM_CALL_ARGS_BLOCKARG;
4150 else if ((IS_NEXT_INSN_ID(niobj, getlocal) || IS_NEXT_INSN_ID(niobj, getinstancevariable) ||
4151 IS_NEXT_INSN_ID(niobj, getblockparamproxy)) && (IS_NEXT_INSN_ID(niobj->next, send))) {
4152 siobj = niobj->next->next;
4153 set_flags = VM_CALL_ARGS_SPLAT|VM_CALL_KW_SPLAT|VM_CALL_KW_SPLAT_MUT|VM_CALL_ARGS_BLOCKARG;
4158 unsigned int flags = vm_ci_flag(ci);
4159 if ((flags & set_flags) == set_flags && !(flags & unset_flags)) {
4160 ((
INSN*)niobj)->insn_id = BIN(putobject);
4161 RB_OBJ_WRITE(iseq, &OPERAND_AT(niobj, 0), RB_OBJ_SET_SHAREABLE(rb_hash_freeze(rb_hash_resurrect(OPERAND_AT(niobj, 0)))));
4163 const struct rb_callinfo *nci = vm_ci_new(vm_ci_mid(ci),
4164 flags & ~VM_CALL_KW_SPLAT_MUT, vm_ci_argc(ci), vm_ci_kwarg(ci));
4166 OPERAND_AT(siobj, 0) = (
VALUE)nci;
4176insn_set_specialized_instruction(
rb_iseq_t *iseq,
INSN *iobj,
int insn_id)
4178 if (insn_id == BIN(opt_neq)) {
4179 VALUE original_ci = iobj->operands[0];
4180 VALUE new_ci = (
VALUE)new_callinfo(iseq, idEq, 1, 0, NULL, FALSE);
4181 insn_replace_with_operands(iseq, iobj, insn_id, 2, new_ci, original_ci);
4184 iobj->insn_id = insn_id;
4185 iobj->operand_size = insn_len(insn_id) - 1;
4195 if (IS_INSN_ID(iobj, newarray) && iobj->link.next &&
4196 IS_INSN(iobj->link.next)) {
4200 INSN *niobj = (
INSN *)iobj->link.next;
4201 if (IS_INSN_ID(niobj, send)) {
4203 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0) {
4205 switch (vm_ci_mid(ci)) {
4207 method =
INT2FIX(VM_OPT_NEWARRAY_SEND_MAX);
4210 method =
INT2FIX(VM_OPT_NEWARRAY_SEND_MIN);
4213 method =
INT2FIX(VM_OPT_NEWARRAY_SEND_HASH);
4218 VALUE num = iobj->operands[0];
4219 insn_replace_with_operands(iseq, iobj, BIN(opt_newarray_send), 2, num, method);
4220 ELEM_REMOVE(&niobj->link);
4225 else if ((IS_INSN_ID(niobj, dupstring) || IS_INSN_ID(niobj, dupchilledstring) ||
4227 IS_NEXT_INSN_ID(&niobj->link, send)) {
4229 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 1 && vm_ci_mid(ci) == idPack) {
4230 VALUE num = iobj->operands[0];
4231 insn_replace_with_operands(iseq, iobj, BIN(opt_newarray_send), 2, FIXNUM_INC(num, 1),
INT2FIX(VM_OPT_NEWARRAY_SEND_PACK));
4232 ELEM_REMOVE(&iobj->link);
4233 ELEM_REMOVE(niobj->link.next);
4234 ELEM_INSERT_NEXT(&niobj->link, &iobj->link);
4240 else if ((IS_INSN_ID(niobj, dupstring) || IS_INSN_ID(niobj, dupchilledstring) ||
4242 IS_NEXT_INSN_ID(&niobj->link, getlocal) &&
4243 (niobj->link.next && IS_NEXT_INSN_ID(niobj->link.next, send))) {
4246 if (vm_ci_mid(ci) == idPack && vm_ci_argc(ci) == 2 &&
4247 (kwarg && kwarg->keyword_len == 1 && kwarg->keywords[0] ==
rb_id2sym(idBuffer))) {
4248 VALUE num = iobj->operands[0];
4249 insn_replace_with_operands(iseq, iobj, BIN(opt_newarray_send), 2, FIXNUM_INC(num, 2),
INT2FIX(VM_OPT_NEWARRAY_SEND_PACK_BUFFER));
4251 ELEM_REMOVE((niobj->link.next)->next);
4253 ELEM_REMOVE(&iobj->link);
4255 ELEM_INSERT_NEXT(niobj->link.next, &iobj->link);
4263 if ((IS_INSN_ID(niobj, dupstring) || IS_INSN_ID(niobj, dupchilledstring) ||
4264 IS_INSN_ID(niobj, putobject) ||
4265 IS_INSN_ID(niobj, putself) ||
4266 IS_INSN_ID(niobj, getlocal) ||
4267 IS_INSN_ID(niobj, getinstancevariable)) &&
4268 IS_NEXT_INSN_ID(&niobj->link, send)) {
4275 sendobj = sendobj->next;
4276 ci = (
struct rb_callinfo *)OPERAND_AT(sendobj, 0);
4277 }
while (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && IS_NEXT_INSN_ID(sendobj, send));
4280 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 1 && vm_ci_mid(ci) == idIncludeP) {
4281 VALUE num = iobj->operands[0];
4283 insn_replace_with_operands(iseq, sendins, BIN(opt_newarray_send), 2, FIXNUM_INC(num, 1),
INT2FIX(VM_OPT_NEWARRAY_SEND_INCLUDE_P));
4285 ELEM_REMOVE(&iobj->link);
4299 if (IS_INSN_ID(iobj, duparray) && iobj->link.next && IS_INSN(iobj->link.next)) {
4300 INSN *niobj = (
INSN *)iobj->link.next;
4301 if ((IS_INSN_ID(niobj, getlocal) ||
4302 IS_INSN_ID(niobj, getinstancevariable) ||
4303 IS_INSN_ID(niobj, putself)) &&
4304 IS_NEXT_INSN_ID(&niobj->link, send)) {
4311 sendobj = sendobj->next;
4312 ci = (
struct rb_callinfo *)OPERAND_AT(sendobj, 0);
4313 }
while (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && IS_NEXT_INSN_ID(sendobj, send));
4315 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 1 && vm_ci_mid(ci) == idIncludeP) {
4317 VALUE ary = iobj->operands[0];
4321 insn_replace_with_operands(iseq, sendins, BIN(opt_duparray_send), 3, ary,
rb_id2sym(idIncludeP),
INT2FIX(1));
4324 ELEM_REMOVE(&iobj->link);
4331 if (IS_INSN_ID(iobj, send)) {
4335#define SP_INSN(opt) insn_set_specialized_instruction(iseq, iobj, BIN(opt_##opt))
4336 if (vm_ci_simple(ci)) {
4337 switch (vm_ci_argc(ci)) {
4339 switch (vm_ci_mid(ci)) {
4340 case idLength: SP_INSN(length);
return COMPILE_OK;
4341 case idSize: SP_INSN(size);
return COMPILE_OK;
4342 case idEmptyP: SP_INSN(empty_p);
return COMPILE_OK;
4343 case idNilP: SP_INSN(nil_p);
return COMPILE_OK;
4344 case idSucc: SP_INSN(succ);
return COMPILE_OK;
4345 case idNot: SP_INSN(not);
return COMPILE_OK;
4349 switch (vm_ci_mid(ci)) {
4350 case idPLUS: SP_INSN(plus);
return COMPILE_OK;
4351 case idMINUS: SP_INSN(minus);
return COMPILE_OK;
4352 case idMULT: SP_INSN(mult);
return COMPILE_OK;
4353 case idDIV: SP_INSN(div);
return COMPILE_OK;
4354 case idMOD: SP_INSN(mod);
return COMPILE_OK;
4355 case idEq: SP_INSN(eq);
return COMPILE_OK;
4356 case idNeq: SP_INSN(neq);
return COMPILE_OK;
4357 case idEqTilde:SP_INSN(regexpmatch2);
return COMPILE_OK;
4358 case idLT: SP_INSN(lt);
return COMPILE_OK;
4359 case idLE: SP_INSN(le);
return COMPILE_OK;
4360 case idGT: SP_INSN(gt);
return COMPILE_OK;
4361 case idGE: SP_INSN(ge);
return COMPILE_OK;
4362 case idLTLT: SP_INSN(ltlt);
return COMPILE_OK;
4363 case idAREF: SP_INSN(aref);
return COMPILE_OK;
4364 case idAnd: SP_INSN(and);
return COMPILE_OK;
4365 case idOr: SP_INSN(or);
return COMPILE_OK;
4369 switch (vm_ci_mid(ci)) {
4370 case idASET: SP_INSN(aset);
return COMPILE_OK;
4376 if ((vm_ci_flag(ci) & (VM_CALL_ARGS_BLOCKARG | VM_CALL_FORWARDING)) == 0 && blockiseq == NULL) {
4377 iobj->insn_id = BIN(opt_send_without_block);
4378 iobj->operand_size = insn_len(iobj->insn_id) - 1;
4389 switch (ISEQ_BODY(iseq)->
type) {
4391 case ISEQ_TYPE_EVAL:
4392 case ISEQ_TYPE_MAIN:
4394 case ISEQ_TYPE_RESCUE:
4395 case ISEQ_TYPE_ENSURE:
4407 const int do_peepholeopt = ISEQ_COMPILE_DATA(iseq)->option->peephole_optimization;
4408 const int do_tailcallopt = tailcallable_p(iseq) &&
4409 ISEQ_COMPILE_DATA(iseq)->option->tailcall_optimization;
4410 const int do_si = ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction;
4411 const int do_ou = ISEQ_COMPILE_DATA(iseq)->option->operands_unification;
4412 const int do_without_ints = ISEQ_BODY(iseq)->builtin_attrs & BUILTIN_ATTR_WITHOUT_INTERRUPTS;
4413 int rescue_level = 0;
4414 int tailcallopt = do_tailcallopt;
4416 list = FIRST_ELEMENT(anchor);
4418 int do_block_optimization = 0;
4419 LABEL * block_loop_label = NULL;
4422 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_BLOCK) {
4423 do_block_optimization = 1;
4428 if (IS_INSN(le) && IS_INSN_ID((
INSN *)le, nop) && IS_LABEL(le->next)) {
4429 block_loop_label = (
LABEL *)le->next;
4434 if (IS_INSN(list)) {
4435 if (do_peepholeopt) {
4436 iseq_peephole_optimize(iseq, list, tailcallopt);
4439 iseq_specialized_instruction(iseq, (
INSN *)list);
4442 insn_operands_unification((
INSN *)list);
4445 if (do_without_ints) {
4447 if (IS_INSN_ID(item, jump)) {
4448 item->insn_id = BIN(jump_without_ints);
4450 else if (IS_INSN_ID(item, branchif)) {
4451 item->insn_id = BIN(branchif_without_ints);
4453 else if (IS_INSN_ID(item, branchunless)) {
4454 item->insn_id = BIN(branchunless_without_ints);
4456 else if (IS_INSN_ID(item, branchnil)) {
4457 item->insn_id = BIN(branchnil_without_ints);
4461 if (do_block_optimization) {
4464 if (IS_INSN_ID(item,
throw)) {
4465 do_block_optimization = 0;
4470 const char *types = insn_op_types(item->insn_id);
4471 for (
int j = 0; types[j]; j++) {
4472 if (types[j] == TS_OFFSET) {
4477 LABEL * target = (
LABEL *)OPERAND_AT(item, j);
4478 if (target == block_loop_label) {
4479 do_block_optimization = 0;
4486 if (IS_LABEL(list)) {
4487 switch (((
LABEL *)list)->rescued) {
4488 case LABEL_RESCUE_BEG:
4490 tailcallopt = FALSE;
4492 case LABEL_RESCUE_END:
4493 if (!--rescue_level) tailcallopt = do_tailcallopt;
4500 if (do_block_optimization) {
4502 if (IS_INSN(le) && IS_INSN_ID((
INSN *)le, nop)) {
4509#if OPT_INSTRUCTIONS_UNIFICATION
4517 VALUE *operands = 0, *ptr = 0;
4521 for (i = 0; i < size; i++) {
4522 iobj = (
INSN *)list;
4523 argc += iobj->operand_size;
4528 ptr = operands = compile_data_alloc2_type(iseq,
VALUE, argc);
4533 for (i = 0; i < size; i++) {
4534 iobj = (
INSN *)list;
4535 MEMCPY(ptr, iobj->operands,
VALUE, iobj->operand_size);
4536 ptr += iobj->operand_size;
4540 return new_insn_core(iseq, iobj->insn_info.line_no, iobj->insn_info.node_id, insn_id, argc, operands);
4552#if OPT_INSTRUCTIONS_UNIFICATION
4558 list = FIRST_ELEMENT(anchor);
4560 if (IS_INSN(list)) {
4561 iobj = (
INSN *)list;
4563 if (unified_insns_data[
id] != 0) {
4564 const int *
const *entry = unified_insns_data[id];
4565 for (j = 1; j < (intptr_t)entry[0]; j++) {
4566 const int *unified = entry[j];
4568 for (k = 2; k < unified[1]; k++) {
4570 ((
INSN *)li)->insn_id != unified[k]) {
4577 new_unified_insn(iseq, unified[0], unified[1] - 1,
4582 niobj->link.next = li;
4601all_string_result_p(
const NODE *node)
4603 if (!node)
return FALSE;
4604 switch (nd_type(node)) {
4605 case NODE_STR:
case NODE_DSTR:
case NODE_FILE:
4607 case NODE_IF:
case NODE_UNLESS:
4608 if (!RNODE_IF(node)->nd_body || !RNODE_IF(node)->nd_else)
return FALSE;
4609 if (all_string_result_p(RNODE_IF(node)->nd_body))
4610 return all_string_result_p(RNODE_IF(node)->nd_else);
4612 case NODE_AND:
case NODE_OR:
4613 if (!RNODE_AND(node)->nd_2nd)
4614 return all_string_result_p(RNODE_AND(node)->nd_1st);
4615 if (!all_string_result_p(RNODE_AND(node)->nd_1st))
4617 return all_string_result_p(RNODE_AND(node)->nd_2nd);
4627 const NODE *lit_node;
4635 VALUE s = rb_str_new_mutable_parser_string(str);
4637 VALUE error = rb_reg_check_preprocess(s);
4638 if (!
NIL_P(error)) {
4639 COMPILE_ERROR(args->iseq, nd_line(node),
"%" PRIsVALUE, error);
4643 if (
NIL_P(args->lit)) {
4645 args->lit_node = node;
4654flush_dstr_fragment(
struct dstr_ctxt *args)
4656 if (!
NIL_P(args->lit)) {
4658 VALUE lit = args->lit;
4660 lit = rb_fstring(lit);
4661 ADD_INSN1(args->ret, args->lit_node, putobject, lit);
4668compile_dstr_fragments_0(
struct dstr_ctxt *args,
const NODE *
const node)
4670 const struct RNode_LIST *list = RNODE_DSTR(node)->nd_next;
4674 CHECK(append_dstr_fragment(args, node, str));
4678 const NODE *
const head = list->nd_head;
4679 if (nd_type_p(head, NODE_STR)) {
4680 CHECK(append_dstr_fragment(args, node, RNODE_STR(head)->
string));
4682 else if (nd_type_p(head, NODE_DSTR)) {
4683 CHECK(compile_dstr_fragments_0(args, head));
4686 flush_dstr_fragment(args);
4688 CHECK(COMPILE(args->ret,
"each string", head));
4700 .iseq = iseq, .ret = ret,
4701 .lit =
Qnil, .lit_node = NULL,
4702 .cnt = 0, .dregx = dregx,
4704 CHECK(compile_dstr_fragments_0(&args, node));
4705 flush_dstr_fragment(&args);
4715 while (node && nd_type_p(node, NODE_BLOCK)) {
4716 CHECK(COMPILE_(ret,
"BLOCK body", RNODE_BLOCK(node)->nd_head,
4717 (RNODE_BLOCK(node)->nd_next ? 1 : popped)));
4718 node = RNODE_BLOCK(node)->nd_next;
4721 CHECK(COMPILE_(ret,
"BLOCK next", RNODE_BLOCK(node)->nd_next, popped));
4730 if (!RNODE_DSTR(node)->nd_next) {
4731 VALUE lit = rb_node_dstr_string_val(node);
4732 ADD_INSN1(ret, node, dupstring, lit);
4733 RB_OBJ_SET_SHAREABLE(lit);
4737 CHECK(compile_dstr_fragments(iseq, ret, node, &cnt, FALSE));
4738 ADD_INSN1(ret, node, concatstrings,
INT2FIX(cnt));
4747 int cflag = (int)RNODE_DREGX(node)->as.nd_cflag;
4749 if (!RNODE_DREGX(node)->nd_next) {
4751 VALUE src = rb_node_dregx_string_val(node);
4752 VALUE match = rb_reg_compile(src, cflag, NULL, 0);
4753 RB_OBJ_SET_SHAREABLE(match);
4754 ADD_INSN1(ret, node, putobject, match);
4760 CHECK(compile_dstr_fragments(iseq, ret, node, &cnt, TRUE));
4764 ADD_INSN(ret, node, pop);
4774 const int line = nd_line(node);
4775 LABEL *lend = NEW_LABEL(line);
4776 rb_num_t cnt = ISEQ_FLIP_CNT_INCREMENT(ISEQ_BODY(iseq)->local_iseq)
4777 + VM_SVAR_FLIPFLOP_START;
4780 ADD_INSN2(ret, node, getspecial, key,
INT2FIX(0));
4781 ADD_INSNL(ret, node, branchif, lend);
4784 CHECK(COMPILE(ret,
"flip2 beg", RNODE_FLIP2(node)->nd_beg));
4785 ADD_INSNL(ret, node, branchunless, else_label);
4786 ADD_INSN1(ret, node, putobject,
Qtrue);
4787 ADD_INSN1(ret, node, setspecial, key);
4789 ADD_INSNL(ret, node, jump, then_label);
4793 ADD_LABEL(ret, lend);
4794 CHECK(COMPILE(ret,
"flip2 end", RNODE_FLIP2(node)->nd_end));
4795 ADD_INSNL(ret, node, branchunless, then_label);
4796 ADD_INSN1(ret, node, putobject,
Qfalse);
4797 ADD_INSN1(ret, node, setspecial, key);
4798 ADD_INSNL(ret, node, jump, then_label);
4807#define COMPILE_SINGLE 2
4814 LABEL *label = NEW_LABEL(nd_line(cond));
4815 if (!then_label) then_label = label;
4816 else if (!else_label) else_label = label;
4818 CHECK(compile_branch_condition(iseq, seq, cond, then_label, else_label));
4820 if (LIST_INSN_SIZE_ONE(seq)) {
4821 INSN *insn = (
INSN *)ELEM_FIRST_INSN(FIRST_ELEMENT(seq));
4822 if (insn->insn_id == BIN(jump) && (
LABEL *)(insn->operands[0]) == label)
4825 if (!label->refcnt) {
4826 return COMPILE_SINGLE;
4828 ADD_LABEL(seq, label);
4838 DECL_ANCHOR(ignore);
4841 switch (nd_type(cond)) {
4843 CHECK(ok = compile_logical(iseq, ret, RNODE_AND(cond)->nd_1st, NULL, else_label));
4844 cond = RNODE_AND(cond)->nd_2nd;
4845 if (ok == COMPILE_SINGLE) {
4846 ADD_INSNL(ret, cond, jump, else_label);
4847 INIT_ANCHOR(ignore);
4849 then_label = NEW_LABEL(nd_line(cond));
4853 CHECK(ok = compile_logical(iseq, ret, RNODE_OR(cond)->nd_1st, then_label, NULL));
4854 cond = RNODE_OR(cond)->nd_2nd;
4855 if (ok == COMPILE_SINGLE) {
4856 ADD_INSNL(ret, cond, jump, then_label);
4857 INIT_ANCHOR(ignore);
4859 else_label = NEW_LABEL(nd_line(cond));
4869 case NODE_IMAGINARY:
4876 ADD_INSNL(ret, cond, jump, then_label);
4881 ADD_INSNL(ret, cond, jump, else_label);
4887 CHECK(COMPILE_POPPED(ret,
"branch condition", cond));
4888 ADD_INSNL(ret, cond, jump, then_label);
4891 CHECK(compile_flip_flop(iseq, ret, cond, TRUE, then_label, else_label));
4894 CHECK(compile_flip_flop(iseq, ret, cond, FALSE, then_label, else_label));
4897 CHECK(compile_defined_expr(iseq, ret, cond,
Qfalse, ret == ignore));
4901 DECL_ANCHOR(cond_seq);
4902 INIT_ANCHOR(cond_seq);
4904 CHECK(COMPILE(cond_seq,
"branch condition", cond));
4906 if (LIST_INSN_SIZE_ONE(cond_seq)) {
4907 INSN *insn = (
INSN *)ELEM_FIRST_INSN(FIRST_ELEMENT(cond_seq));
4908 if (insn->insn_id == BIN(putobject)) {
4909 if (
RTEST(insn->operands[0])) {
4910 ADD_INSNL(ret, cond, jump, then_label);
4915 ADD_INSNL(ret, cond, jump, else_label);
4920 ADD_SEQ(ret, cond_seq);
4925 ADD_INSNL(ret, cond, branchunless, else_label);
4926 ADD_INSNL(ret, cond, jump, then_label);
4933keyword_node_p(
const NODE *
const node)
4935 return nd_type_p(node, NODE_HASH) && (RNODE_HASH(node)->nd_brace & HASH_BRACE) != HASH_BRACE;
4941 switch (nd_type(node)) {
4943 return rb_node_sym_string_val(node);
4945 UNKNOWN_NODE(
"get_symbol_value", node,
Qnil);
4952 NODE *node = node_hash->nd_head;
4953 VALUE hash = rb_hash_new();
4956 for (
int i = 0; node != NULL; i++, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
4957 VALUE key = get_symbol_value(iseq, RNODE_LIST(node)->nd_head);
4958 VALUE idx = rb_hash_aref(hash, key);
4963 rb_hash_aset(hash, key,
INT2FIX(i));
4973 const NODE *
const root_node,
4981 if (RNODE_HASH(root_node)->nd_head && nd_type_p(RNODE_HASH(root_node)->nd_head, NODE_LIST)) {
4982 const NODE *node = RNODE_HASH(root_node)->nd_head;
4986 const NODE *key_node = RNODE_LIST(node)->nd_head;
4990 if (key_node && nd_type_p(key_node, NODE_SYM)) {
4995 *flag |= VM_CALL_KW_SPLAT;
4996 if (seen_nodes > 1 || RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
5001 *flag |= VM_CALL_KW_SPLAT_MUT;
5006 node = RNODE_LIST(node)->nd_next;
5007 node = RNODE_LIST(node)->nd_next;
5011 node = RNODE_HASH(root_node)->nd_head;
5014 VALUE key_index = node_hash_unique_key_index(iseq, RNODE_HASH(root_node), &
len);
5017 VALUE *keywords = kw_arg->keywords;
5020 kw_arg->references = 0;
5021 kw_arg->keyword_len =
len;
5023 *kw_arg_ptr = kw_arg;
5025 for (i=0; node != NULL; i++, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
5026 const NODE *key_node = RNODE_LIST(node)->nd_head;
5027 const NODE *val_node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head;
5030 keywords[j] = get_symbol_value(iseq, key_node);
5034 NO_CHECK(COMPILE_(ret,
"keyword values", val_node, popped));
5048 for (; node;
len++, node = RNODE_LIST(node)->nd_next) {
5050 EXPECT_NODE(
"compile_args", node, NODE_LIST, -1);
5053 if (RNODE_LIST(node)->nd_next == NULL && keyword_node_p(RNODE_LIST(node)->nd_head)) {
5054 *kwnode_ptr = RNODE_LIST(node)->nd_head;
5057 RUBY_ASSERT(!keyword_node_p(RNODE_LIST(node)->nd_head));
5058 NO_CHECK(COMPILE_(ret,
"array element", RNODE_LIST(node)->nd_head, FALSE));
5066frozen_string_literal_p(
const rb_iseq_t *iseq)
5068 return ISEQ_COMPILE_DATA(iseq)->option->frozen_string_literal > 0;
5072static_literal_node_p(
const NODE *node,
const rb_iseq_t *iseq,
bool hash_key)
5074 switch (nd_type(node)) {
5082 case NODE_IMAGINARY:
5089 return hash_key || frozen_string_literal_p(iseq);
5098 switch (nd_type(node)) {
5101 VALUE lit = rb_node_integer_literal_val(node);
5107 VALUE lit = rb_node_float_literal_val(node);
5113 case NODE_IMAGINARY:
5122 return rb_node_sym_string_val(node);
5124 return RB_OBJ_SET_SHAREABLE(rb_node_regx_string_val(node));
5126 return rb_node_line_lineno_val(node);
5128 return rb_node_encoding_val(node);
5131 if (ISEQ_COMPILE_DATA(iseq)->option->debug_frozen_string_literal ||
RTEST(
ruby_debug)) {
5132 VALUE lit = get_string_value(node);
5133 VALUE str = rb_str_with_debug_created_info(lit, rb_iseq_path(iseq), (
int)nd_line(node));
5134 RB_OBJ_SET_SHAREABLE(str);
5138 return get_string_value(node);
5141 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
5148 const NODE *line_node = node;
5150 if (nd_type_p(node, NODE_ZLIST)) {
5152 ADD_INSN1(ret, line_node, newarray,
INT2FIX(0));
5157 EXPECT_NODE(
"compile_array", node, NODE_LIST, -1);
5160 for (; node; node = RNODE_LIST(node)->nd_next) {
5161 NO_CHECK(COMPILE_(ret,
"array element", RNODE_LIST(node)->nd_head, popped));
5203 const int max_stack_len = 0x100;
5204 const int min_tmp_ary_len = 0x40;
5208#define FLUSH_CHUNK \
5210 if (first_chunk) ADD_INSN1(ret, line_node, newarray, INT2FIX(stack_len)); \
5211 else ADD_INSN1(ret, line_node, pushtoarray, INT2FIX(stack_len)); \
5212 first_chunk = FALSE; \
5220 if (static_literal_node_p(RNODE_LIST(node)->nd_head, iseq,
false)) {
5222 const NODE *node_tmp = RNODE_LIST(node)->nd_next;
5223 for (; node_tmp && static_literal_node_p(RNODE_LIST(node_tmp)->nd_head, iseq,
false); node_tmp = RNODE_LIST(node_tmp)->nd_next)
5226 if ((first_chunk && stack_len == 0 && !node_tmp) || count >= min_tmp_ary_len) {
5231 for (; count; count--, node = RNODE_LIST(node)->nd_next)
5232 rb_ary_push(ary, static_literal_value(RNODE_LIST(node)->nd_head, iseq));
5233 RB_OBJ_SET_FROZEN_SHAREABLE(ary);
5238 ADD_INSN1(ret, line_node, duparray, ary);
5239 first_chunk = FALSE;
5242 ADD_INSN1(ret, line_node, putobject, ary);
5243 ADD_INSN(ret, line_node, concattoarray);
5245 RB_OBJ_SET_SHAREABLE(ary);
5251 for (; count; count--, node = RNODE_LIST(node)->nd_next) {
5253 EXPECT_NODE(
"compile_array", node, NODE_LIST, -1);
5256 if (!RNODE_LIST(node)->nd_next && keyword_node_p(RNODE_LIST(node)->nd_head)) {
5258 if (stack_len == 0 && first_chunk) {
5259 ADD_INSN1(ret, line_node, newarray,
INT2FIX(0));
5264 NO_CHECK(COMPILE_(ret,
"array element", RNODE_LIST(node)->nd_head, 0));
5265 ADD_INSN(ret, line_node, pushtoarraykwsplat);
5269 NO_CHECK(COMPILE_(ret,
"array element", RNODE_LIST(node)->nd_head, 0));
5274 if (stack_len >= max_stack_len) FLUSH_CHUNK;
5284static_literal_node_pair_p(
const NODE *node,
const rb_iseq_t *iseq)
5286 return RNODE_LIST(node)->nd_head && static_literal_node_p(RNODE_LIST(node)->nd_head, iseq,
true) && static_literal_node_p(RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head, iseq,
false);
5292 const NODE *line_node = node;
5294 node = RNODE_HASH(node)->nd_head;
5296 if (!node || nd_type_p(node, NODE_ZLIST)) {
5298 ADD_INSN1(ret, line_node, newhash,
INT2FIX(0));
5303 EXPECT_NODE(
"compile_hash", node, NODE_LIST, -1);
5306 for (; node; node = RNODE_LIST(node)->nd_next) {
5307 NO_CHECK(COMPILE_(ret,
"hash element", RNODE_LIST(node)->nd_head, popped));
5330 const int max_stack_len = 0x100;
5331 const int min_tmp_hash_len = 0x800;
5333 int first_chunk = 1;
5334 DECL_ANCHOR(anchor);
5335 INIT_ANCHOR(anchor);
5338#define FLUSH_CHUNK() \
5340 if (first_chunk) { \
5341 APPEND_LIST(ret, anchor); \
5342 ADD_INSN1(ret, line_node, newhash, INT2FIX(stack_len)); \
5345 ADD_INSN1(ret, line_node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE)); \
5346 ADD_INSN(ret, line_node, swap); \
5347 APPEND_LIST(ret, anchor); \
5348 ADD_SEND(ret, line_node, id_core_hash_merge_ptr, INT2FIX(stack_len + 1)); \
5350 INIT_ANCHOR(anchor); \
5351 first_chunk = stack_len = 0; \
5358 if (static_literal_node_pair_p(node, iseq)) {
5360 const NODE *node_tmp = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next;
5361 for (; node_tmp && static_literal_node_pair_p(node_tmp, iseq); node_tmp = RNODE_LIST(RNODE_LIST(node_tmp)->nd_next)->nd_next)
5364 if ((first_chunk && stack_len == 0 && !node_tmp) || count >= min_tmp_hash_len) {
5369 for (; count; count--, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
5371 elem[0] = static_literal_value(RNODE_LIST(node)->nd_head, iseq);
5373 elem[1] = static_literal_value(RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head, iseq);
5380 hash = RB_OBJ_SET_FROZEN_SHAREABLE(hash);
5385 ADD_INSN1(ret, line_node, duphash, hash);
5389 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5390 ADD_INSN(ret, line_node, swap);
5392 ADD_INSN1(ret, line_node, putobject, hash);
5394 ADD_SEND(ret, line_node, id_core_hash_merge_kwd,
INT2FIX(2));
5401 for (; count; count--, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
5404 EXPECT_NODE(
"compile_hash", node, NODE_LIST, -1);
5407 if (RNODE_LIST(node)->nd_head) {
5409 NO_CHECK(COMPILE_(anchor,
"hash key element", RNODE_LIST(node)->nd_head, 0));
5410 NO_CHECK(COMPILE_(anchor,
"hash value element", RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head, 0));
5414 if (stack_len >= max_stack_len) FLUSH_CHUNK();
5420 const NODE *kw = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head;
5421 int empty_kw = nd_type_p(kw, NODE_HASH) && (!RNODE_HASH(kw)->nd_head);
5422 int first_kw = first_chunk && stack_len == 0;
5423 int last_kw = !RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next;
5424 int only_kw = last_kw && first_kw;
5426 empty_kw = empty_kw || nd_type_p(kw, NODE_NIL);
5428 if (only_kw && method_call_keywords) {
5436 NO_CHECK(COMPILE(ret,
"keyword splat", kw));
5438 else if (first_kw) {
5442 ADD_INSN1(ret, line_node, newhash,
INT2FIX(0));
5449 if (only_kw && method_call_keywords) {
5455 NO_CHECK(COMPILE(ret,
"keyword splat", kw));
5462 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5463 if (first_kw) ADD_INSN1(ret, line_node, newhash,
INT2FIX(0));
5464 else ADD_INSN(ret, line_node, swap);
5466 NO_CHECK(COMPILE(ret,
"keyword splat", kw));
5468 ADD_SEND(ret, line_node, id_core_hash_merge_kwd,
INT2FIX(2));
5483rb_node_case_when_optimizable_literal(
const NODE *
const node)
5485 switch (nd_type(node)) {
5487 return rb_node_integer_literal_val(node);
5489 VALUE v = rb_node_float_literal_val(node);
5498 case NODE_IMAGINARY:
5507 return rb_node_sym_string_val(node);
5509 return rb_node_line_lineno_val(node);
5511 return rb_node_str_string_val(node);
5513 return rb_node_file_path_val(node);
5520 LABEL *l1,
int only_special_literals,
VALUE literals)
5523 const NODE *val = RNODE_LIST(vals)->nd_head;
5524 VALUE lit = rb_node_case_when_optimizable_literal(val);
5527 only_special_literals = 0;
5529 else if (
NIL_P(rb_hash_lookup(literals, lit))) {
5530 rb_hash_aset(literals, lit, (
VALUE)(l1) | 1);
5533 if (nd_type_p(val, NODE_STR) || nd_type_p(val, NODE_FILE)) {
5534 debugp_param(
"nd_lit", get_string_value(val));
5535 lit = get_string_value(val);
5536 ADD_INSN1(cond_seq, val, putobject, lit);
5540 if (!COMPILE(cond_seq,
"when cond", val))
return -1;
5544 ADD_INSN1(cond_seq, vals, topn,
INT2FIX(1));
5545 ADD_CALL(cond_seq, vals, idEqq,
INT2FIX(1));
5546 ADD_INSNL(cond_seq, val, branchif, l1);
5547 vals = RNODE_LIST(vals)->nd_next;
5549 return only_special_literals;
5554 LABEL *l1,
int only_special_literals,
VALUE literals)
5556 const NODE *line_node = vals;
5558 switch (nd_type(vals)) {
5560 if (when_vals(iseq, cond_seq, vals, l1, only_special_literals, literals) < 0)
5564 ADD_INSN (cond_seq, line_node, dup);
5565 CHECK(COMPILE(cond_seq,
"when splat", RNODE_SPLAT(vals)->nd_head));
5566 ADD_INSN1(cond_seq, line_node, splatarray,
Qfalse);
5567 ADD_INSN1(cond_seq, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE | VM_CHECKMATCH_ARRAY));
5568 ADD_INSNL(cond_seq, line_node, branchif, l1);
5571 CHECK(when_splat_vals(iseq, cond_seq, RNODE_ARGSCAT(vals)->nd_head, l1, only_special_literals, literals));
5572 CHECK(when_splat_vals(iseq, cond_seq, RNODE_ARGSCAT(vals)->nd_body, l1, only_special_literals, literals));
5575 CHECK(when_splat_vals(iseq, cond_seq, RNODE_ARGSPUSH(vals)->nd_head, l1, only_special_literals, literals));
5576 ADD_INSN (cond_seq, line_node, dup);
5577 CHECK(COMPILE(cond_seq,
"when argspush body", RNODE_ARGSPUSH(vals)->nd_body));
5578 ADD_INSN1(cond_seq, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE));
5579 ADD_INSNL(cond_seq, line_node, branchif, l1);
5582 ADD_INSN (cond_seq, line_node, dup);
5583 CHECK(COMPILE(cond_seq,
"when val", vals));
5584 ADD_INSN1(cond_seq, line_node, splatarray,
Qfalse);
5585 ADD_INSN1(cond_seq, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE | VM_CHECKMATCH_ARRAY));
5586 ADD_INSNL(cond_seq, line_node, branchif, l1);
5679 const NODE *line_node;
5694add_masgn_lhs_node(
struct masgn_state *state,
int lhs_pos,
const NODE *line_node,
int argc,
INSN *before_insn)
5697 rb_bug(
"no masgn_state");
5706 memo->before_insn = before_insn;
5707 memo->line_node = line_node;
5708 memo->argn = state->num_args + 1;
5709 memo->num_args = argc;
5710 state->num_args += argc;
5711 memo->lhs_pos = lhs_pos;
5713 if (!state->first_memo) {
5714 state->first_memo = memo;
5717 state->last_memo->next = memo;
5719 state->last_memo = memo;
5729 switch (nd_type(node)) {
5730 case NODE_ATTRASGN: {
5732 const NODE *line_node = node;
5734 CHECK(COMPILE_POPPED(pre,
"masgn lhs (NODE_ATTRASGN)", node));
5736 bool safenav_call =
false;
5738 iobj = (
INSN *)get_prev_insn((
INSN *)insn_element);
5740 ELEM_REMOVE(insn_element);
5741 if (!IS_INSN_ID(iobj, send)) {
5742 safenav_call =
true;
5743 iobj = (
INSN *)get_prev_insn(iobj);
5744 ELEM_INSERT_NEXT(&iobj->link, insn_element);
5746 (pre->last = iobj->link.prev)->next = 0;
5749 int argc = vm_ci_argc(ci) + 1;
5750 ci = ci_argc_set(iseq, ci, argc);
5751 OPERAND_AT(iobj, 0) = (
VALUE)ci;
5755 ADD_INSN(lhs, line_node, swap);
5758 ADD_INSN1(lhs, line_node, topn,
INT2FIX(argc));
5761 if (!add_masgn_lhs_node(state, lhs_pos, line_node, argc, (
INSN *)LAST_ELEMENT(lhs))) {
5765 iobj->link.prev = lhs->last;
5766 lhs->last->next = &iobj->link;
5767 for (lhs->last = &iobj->link; lhs->last->next; lhs->last = lhs->last->next);
5768 if (vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT) {
5769 int argc = vm_ci_argc(ci);
5770 bool dupsplat =
false;
5771 ci = ci_argc_set(iseq, ci, argc - 1);
5772 if (!(vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT_MUT)) {
5779 ci = ci_flag_set(iseq, ci, VM_CALL_ARGS_SPLAT_MUT);
5781 OPERAND_AT(iobj, 0) = (
VALUE)ci;
5790 int line_no = nd_line(line_node);
5791 int node_id = nd_node_id(line_node);
5794 INSERT_BEFORE_INSN(iobj, line_no, node_id, swap);
5795 INSERT_BEFORE_INSN1(iobj, line_no, node_id, splatarray,
Qtrue);
5796 INSERT_BEFORE_INSN(iobj, line_no, node_id, swap);
5798 INSERT_BEFORE_INSN1(iobj, line_no, node_id, pushtoarray,
INT2FIX(1));
5800 if (!safenav_call) {
5801 ADD_INSN(lhs, line_node, pop);
5803 ADD_INSN(lhs, line_node, pop);
5806 for (
int i=0; i < argc; i++) {
5807 ADD_INSN(post, line_node, pop);
5812 DECL_ANCHOR(nest_rhs);
5813 INIT_ANCHOR(nest_rhs);
5814 DECL_ANCHOR(nest_lhs);
5815 INIT_ANCHOR(nest_lhs);
5817 int prev_level = state->lhs_level;
5818 bool prev_nested = state->nested;
5820 state->lhs_level = lhs_pos - 1;
5821 CHECK(compile_massign0(iseq, pre, nest_rhs, nest_lhs, post, node, state, 1));
5822 state->lhs_level = prev_level;
5823 state->nested = prev_nested;
5825 ADD_SEQ(lhs, nest_rhs);
5826 ADD_SEQ(lhs, nest_lhs);
5830 if (!RNODE_CDECL(node)->nd_vid) {
5834 CHECK(COMPILE_POPPED(pre,
"masgn lhs (NODE_CDECL)", node));
5837 iobj = (
INSN *)insn_element;
5840 ELEM_REMOVE(insn_element);
5841 pre->last = iobj->link.prev;
5844 if (!add_masgn_lhs_node(state, lhs_pos, node, 1, (
INSN *)LAST_ELEMENT(lhs))) {
5848 ADD_INSN(post, node, pop);
5853 DECL_ANCHOR(anchor);
5854 INIT_ANCHOR(anchor);
5855 CHECK(COMPILE_POPPED(anchor,
"masgn lhs", node));
5856 ELEM_REMOVE(FIRST_ELEMENT(anchor));
5857 ADD_SEQ(lhs, anchor);
5868 CHECK(compile_massign_opt_lhs(iseq, ret, RNODE_LIST(lhsn)->nd_next));
5869 CHECK(compile_massign_lhs(iseq, ret, ret, ret, ret, RNODE_LIST(lhsn)->nd_head, NULL, 0));
5876 const NODE *rhsn,
const NODE *orig_lhsn)
5879 const int memsize = numberof(mem);
5881 int llen = 0, rlen = 0;
5883 const NODE *lhsn = orig_lhsn;
5885#define MEMORY(v) { \
5887 if (memindex == memsize) return 0; \
5888 for (i=0; i<memindex; i++) { \
5889 if (mem[i] == (v)) return 0; \
5891 mem[memindex++] = (v); \
5894 if (rhsn == 0 || !nd_type_p(rhsn, NODE_LIST)) {
5899 const NODE *ln = RNODE_LIST(lhsn)->nd_head;
5900 switch (nd_type(ln)) {
5905 MEMORY(get_nd_vid(ln));
5910 lhsn = RNODE_LIST(lhsn)->nd_next;
5916 NO_CHECK(COMPILE_POPPED(ret,
"masgn val (popped)", RNODE_LIST(rhsn)->nd_head));
5919 NO_CHECK(COMPILE(ret,
"masgn val", RNODE_LIST(rhsn)->nd_head));
5921 rhsn = RNODE_LIST(rhsn)->nd_next;
5926 for (i=0; i<llen-rlen; i++) {
5927 ADD_INSN(ret, orig_lhsn, putnil);
5931 compile_massign_opt_lhs(iseq, ret, orig_lhsn);
5938 const NODE *rhsn = RNODE_MASGN(node)->nd_value;
5939 const NODE *splatn = RNODE_MASGN(node)->nd_args;
5940 const NODE *lhsn = RNODE_MASGN(node)->nd_head;
5941 const NODE *lhsn_count = lhsn;
5942 int lhs_splat = (splatn && NODE_NAMED_REST_P(splatn)) ? 1 : 0;
5947 while (lhsn_count) {
5949 lhsn_count = RNODE_LIST(lhsn_count)->nd_next;
5952 CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, RNODE_LIST(lhsn)->nd_head, state, (llen - lpos) + lhs_splat + state->lhs_level));
5954 lhsn = RNODE_LIST(lhsn)->nd_next;
5958 if (nd_type_p(splatn, NODE_POSTARG)) {
5960 const NODE *postn = RNODE_POSTARG(splatn)->nd_2nd;
5961 const NODE *restn = RNODE_POSTARG(splatn)->nd_1st;
5962 int plen = (int)RNODE_LIST(postn)->as.nd_alen;
5964 int flag = 0x02 | (NODE_NAMED_REST_P(restn) ? 0x01 : 0x00);
5966 ADD_INSN2(lhs, splatn, expandarray,
INT2FIX(plen),
INT2FIX(flag));
5968 if (NODE_NAMED_REST_P(restn)) {
5969 CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, restn, state, 1 + plen + state->lhs_level));
5972 CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, RNODE_LIST(postn)->nd_head, state, (plen - ppos) + state->lhs_level));
5974 postn = RNODE_LIST(postn)->nd_next;
5979 CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, splatn, state, 1 + state->lhs_level));
5983 if (!state->nested) {
5984 NO_CHECK(COMPILE(rhs,
"normal masgn rhs", rhsn));
5988 ADD_INSN(rhs, node, dup);
5990 ADD_INSN2(rhs, node, expandarray,
INT2FIX(llen),
INT2FIX(lhs_splat));
5997 if (!popped || RNODE_MASGN(node)->nd_args || !compile_massign_opt(iseq, ret, RNODE_MASGN(node)->nd_value, RNODE_MASGN(node)->nd_head)) {
5999 state.lhs_level = popped ? 0 : 1;
6002 state.first_memo = NULL;
6003 state.last_memo = NULL;
6013 int ok = compile_massign0(iseq, pre, rhs, lhs, post, node, &state, popped);
6017 VALUE topn_arg =
INT2FIX((state.num_args - memo->argn) + memo->lhs_pos);
6018 for (
int i = 0; i < memo->num_args; i++) {
6019 INSERT_BEFORE_INSN1(memo->before_insn, nd_line(memo->line_node), nd_node_id(memo->line_node), topn, topn_arg);
6021 tmp_memo = memo->next;
6030 if (!popped && state.num_args >= 1) {
6032 ADD_INSN1(ret, node, setn,
INT2FIX(state.num_args));
6044 switch (nd_type(node)) {
6046 rb_ary_unshift(arr,
ID2SYM(RNODE_CONST(node)->nd_vid));
6047 RB_OBJ_SET_SHAREABLE(arr);
6050 rb_ary_unshift(arr,
ID2SYM(RNODE_COLON3(node)->nd_mid));
6051 rb_ary_unshift(arr,
ID2SYM(idNULL));
6052 RB_OBJ_SET_SHAREABLE(arr);
6055 rb_ary_unshift(arr,
ID2SYM(RNODE_COLON2(node)->nd_mid));
6056 node = RNODE_COLON2(node)->nd_head;
6065compile_const_prefix(
rb_iseq_t *iseq,
const NODE *
const node,
6068 switch (nd_type(node)) {
6070 debugi(
"compile_const_prefix - colon", RNODE_CONST(node)->nd_vid);
6071 ADD_INSN1(body, node, putobject,
Qtrue);
6072 ADD_INSN1(body, node, getconstant,
ID2SYM(RNODE_CONST(node)->nd_vid));
6075 debugi(
"compile_const_prefix - colon3", RNODE_COLON3(node)->nd_mid);
6076 ADD_INSN(body, node, pop);
6077 ADD_INSN1(body, node, putobject,
rb_cObject);
6078 ADD_INSN1(body, node, putobject,
Qtrue);
6079 ADD_INSN1(body, node, getconstant,
ID2SYM(RNODE_COLON3(node)->nd_mid));
6082 CHECK(compile_const_prefix(iseq, RNODE_COLON2(node)->nd_head, pref, body));
6083 debugi(
"compile_const_prefix - colon2", RNODE_COLON2(node)->nd_mid);
6084 ADD_INSN1(body, node, putobject,
Qfalse);
6085 ADD_INSN1(body, node, getconstant,
ID2SYM(RNODE_COLON2(node)->nd_mid));
6088 CHECK(COMPILE(pref,
"const colon2 prefix", node));
6095cpath_const_p(
const NODE *node)
6097 switch (nd_type(node)) {
6102 if (RNODE_COLON2(node)->nd_head) {
6103 return cpath_const_p(RNODE_COLON2(node)->nd_head);
6114 if (nd_type_p(cpath, NODE_COLON3)) {
6116 ADD_INSN1(ret, cpath, putobject,
rb_cObject);
6117 return VM_DEFINECLASS_FLAG_SCOPED;
6119 else if (nd_type_p(cpath, NODE_COLON2) && RNODE_COLON2(cpath)->nd_head) {
6121 NO_CHECK(COMPILE(ret,
"nd_else->nd_head", RNODE_COLON2(cpath)->nd_head));
6122 int flags = VM_DEFINECLASS_FLAG_SCOPED;
6123 if (!cpath_const_p(RNODE_COLON2(cpath)->nd_head)) {
6124 flags |= VM_DEFINECLASS_FLAG_DYNAMIC_CREF;
6130 ADD_INSN1(ret, cpath, putspecialobject,
6131 INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
6137private_recv_p(
const NODE *node)
6139 NODE *recv = get_nd_recv(node);
6140 if (recv && nd_type_p(recv, NODE_SELF)) {
6141 return RNODE_SELF(recv)->nd_state != 0;
6148 const NODE *
const node,
LABEL **lfinish,
VALUE needstr,
bool ignore);
6151compile_call(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
const enum node_type
type,
const NODE *
const line_node,
int popped,
bool assume_receiver);
6158 enum defined_type expr_type = DEFINED_NOT_DEFINED;
6159 enum node_type
type;
6160 const int line = nd_line(node);
6161 const NODE *line_node = node;
6163 switch (
type = nd_type(node)) {
6167 expr_type = DEFINED_NIL;
6170 expr_type = DEFINED_SELF;
6173 expr_type = DEFINED_TRUE;
6176 expr_type = DEFINED_FALSE;
6181 const NODE *vals = (nd_type(node) == NODE_HASH) ? RNODE_HASH(node)->nd_head : node;
6185 if (RNODE_LIST(vals)->nd_head) {
6186 defined_expr0(iseq, ret, RNODE_LIST(vals)->nd_head, lfinish,
Qfalse,
false);
6189 lfinish[1] = NEW_LABEL(line);
6191 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
6193 }
while ((vals = RNODE_LIST(vals)->nd_next) != NULL);
6206 case NODE_IMAGINARY:
6211 expr_type = DEFINED_EXPR;
6215 defined_expr0(iseq, ret, RNODE_LIST(node)->nd_head, lfinish,
Qfalse,
false);
6217 lfinish[1] = NEW_LABEL(line);
6219 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
6220 expr_type = DEFINED_EXPR;
6226 expr_type = DEFINED_LVAR;
6229#define PUSH_VAL(type) (needstr == Qfalse ? Qtrue : rb_iseq_defined_string(type))
6231 ADD_INSN3(ret, line_node, definedivar,
6232 ID2SYM(RNODE_IVAR(node)->nd_vid), get_ivar_ic_value(iseq,RNODE_IVAR(node)->nd_vid), PUSH_VAL(DEFINED_IVAR));
6236 ADD_INSN(ret, line_node, putnil);
6237 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_GVAR),
6238 ID2SYM(RNODE_GVAR(node)->nd_vid), PUSH_VAL(DEFINED_GVAR));
6242 ADD_INSN(ret, line_node, putnil);
6243 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_CVAR),
6244 ID2SYM(RNODE_CVAR(node)->nd_vid), PUSH_VAL(DEFINED_CVAR));
6248 ADD_INSN(ret, line_node, putnil);
6249 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_CONST),
6250 ID2SYM(RNODE_CONST(node)->nd_vid), PUSH_VAL(DEFINED_CONST));
6254 lfinish[1] = NEW_LABEL(line);
6256 defined_expr0(iseq, ret, RNODE_COLON2(node)->nd_head, lfinish,
Qfalse,
false);
6257 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
6258 NO_CHECK(COMPILE(ret,
"defined/colon2#nd_head", RNODE_COLON2(node)->nd_head));
6261 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_CONST_FROM),
6262 ID2SYM(RNODE_COLON2(node)->nd_mid), PUSH_VAL(DEFINED_CONST));
6265 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_METHOD),
6266 ID2SYM(RNODE_COLON2(node)->nd_mid), PUSH_VAL(DEFINED_METHOD));
6270 ADD_INSN1(ret, line_node, putobject,
rb_cObject);
6271 ADD_INSN3(ret, line_node, defined,
6272 INT2FIX(DEFINED_CONST_FROM),
ID2SYM(RNODE_COLON3(node)->nd_mid), PUSH_VAL(DEFINED_CONST));
6280 case NODE_ATTRASGN:{
6281 const int explicit_receiver =
6282 (
type == NODE_CALL ||
type == NODE_OPCALL ||
6283 (
type == NODE_ATTRASGN && !private_recv_p(node)));
6285 if (get_nd_args(node) || explicit_receiver) {
6287 lfinish[1] = NEW_LABEL(line);
6290 lfinish[2] = NEW_LABEL(line);
6293 if (get_nd_args(node)) {
6294 defined_expr0(iseq, ret, get_nd_args(node), lfinish,
Qfalse,
false);
6295 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
6297 if (explicit_receiver) {
6298 defined_expr0(iseq, ret, get_nd_recv(node), lfinish,
Qfalse,
true);
6299 switch (nd_type(get_nd_recv(node))) {
6305 ADD_INSNL(ret, line_node, branchunless, lfinish[2]);
6306 compile_call(iseq, ret, get_nd_recv(node), nd_type(get_nd_recv(node)), line_node, 0,
true);
6309 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
6310 NO_CHECK(COMPILE(ret,
"defined/recv", get_nd_recv(node)));
6314 ADD_INSN(ret, line_node, dup);
6316 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_METHOD),
6317 ID2SYM(get_node_call_nd_mid(node)), PUSH_VAL(DEFINED_METHOD));
6320 ADD_INSN(ret, line_node, putself);
6322 ADD_INSN(ret, line_node, dup);
6324 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_FUNC),
6325 ID2SYM(get_node_call_nd_mid(node)), PUSH_VAL(DEFINED_METHOD));
6331 ADD_INSN(ret, line_node, putnil);
6332 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_YIELD), 0,
6333 PUSH_VAL(DEFINED_YIELD));
6334 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
6339 ADD_INSN(ret, line_node, putnil);
6340 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_REF),
6341 INT2FIX((RNODE_BACK_REF(node)->nd_nth << 1) | (
type == NODE_BACK_REF)),
6342 PUSH_VAL(DEFINED_GVAR));
6347 ADD_INSN(ret, line_node, putnil);
6348 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_ZSUPER), 0,
6349 PUSH_VAL(DEFINED_ZSUPER));
6355 case NODE_OP_ASGN_OR:
6356 case NODE_OP_ASGN_AND:
6365 expr_type = DEFINED_ASGN;
6372 VALUE str = rb_iseq_defined_string(expr_type);
6373 ADD_INSN1(ret, line_node, putobject, str);
6376 ADD_INSN1(ret, line_node, putobject,
Qtrue);
6383 ADD_SYNTHETIC_INSN(ret, 0, -1, putnil);
6384 iseq_set_exception_local_table(iseq);
6389 const NODE *
const node,
LABEL **lfinish,
VALUE needstr,
bool ignore)
6392 defined_expr0(iseq, ret, node, lfinish, needstr,
false);
6394 int line = nd_line(node);
6395 LABEL *lstart = NEW_LABEL(line);
6396 LABEL *lend = NEW_LABEL(line);
6399 rb_iseq_new_with_callback_new_callback(build_defined_rescue_iseq, NULL);
6400 rescue = NEW_CHILD_ISEQ_WITH_CALLBACK(ifunc,
6402 ISEQ_BODY(iseq)->location.label),
6403 ISEQ_TYPE_RESCUE, 0);
6404 lstart->rescued = LABEL_RESCUE_BEG;
6405 lend->rescued = LABEL_RESCUE_END;
6406 APPEND_LABEL(ret, lcur, lstart);
6407 ADD_LABEL(ret, lend);
6409 ADD_CATCH_ENTRY(CATCH_TYPE_RESCUE, lstart, lend, rescue, lfinish[1]);
6417 const int line = nd_line(node);
6418 const NODE *line_node = node;
6419 if (!RNODE_DEFINED(node)->nd_head) {
6420 VALUE str = rb_iseq_defined_string(DEFINED_NIL);
6421 ADD_INSN1(ret, line_node, putobject, str);
6426 lfinish[0] = NEW_LABEL(line);
6429 defined_expr(iseq, ret, RNODE_DEFINED(node)->nd_head, lfinish, needstr, ignore);
6431 ELEM_INSERT_NEXT(last, &new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(putnil), 0)->link);
6432 ADD_INSN(ret, line_node, swap);
6434 ADD_LABEL(ret, lfinish[2]);
6436 ADD_INSN(ret, line_node, pop);
6437 ADD_LABEL(ret, lfinish[1]);
6439 ADD_LABEL(ret, lfinish[0]);
6445make_name_for_block(
const rb_iseq_t *orig_iseq)
6450 if (ISEQ_BODY(orig_iseq)->parent_iseq != 0) {
6451 while (ISEQ_BODY(orig_iseq)->local_iseq != iseq) {
6452 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_BLOCK) {
6455 iseq = ISEQ_BODY(iseq)->parent_iseq;
6460 return rb_sprintf(
"block in %"PRIsVALUE, ISEQ_BODY(iseq)->location.label);
6463 return rb_sprintf(
"block (%d levels) in %"PRIsVALUE, level, ISEQ_BODY(iseq)->location.label);
6472 enl->ensure_node = node;
6473 enl->prev = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack;
6475 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enl;
6485 while (erange->next != 0) {
6486 erange = erange->next;
6490 ne->end = erange->end;
6491 erange->end = lstart;
6497can_add_ensure_iseq(
const rb_iseq_t *iseq)
6500 if (ISEQ_COMPILE_DATA(iseq)->in_rescue && (e = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack) != NULL) {
6502 if (e->ensure_node)
return false;
6515 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack;
6517 DECL_ANCHOR(ensure);
6519 INIT_ANCHOR(ensure);
6521 if (enlp->erange != NULL) {
6522 DECL_ANCHOR(ensure_part);
6523 LABEL *lstart = NEW_LABEL(0);
6524 LABEL *lend = NEW_LABEL(0);
6525 INIT_ANCHOR(ensure_part);
6527 add_ensure_range(iseq, enlp->erange, lstart, lend);
6529 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enlp->prev;
6530 ADD_LABEL(ensure_part, lstart);
6531 NO_CHECK(COMPILE_POPPED(ensure_part,
"ensure part", enlp->ensure_node));
6532 ADD_LABEL(ensure_part, lend);
6533 ADD_SEQ(ensure, ensure_part);
6542 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = prev_enlp;
6543 ADD_SEQ(ret, ensure);
6548check_keyword(
const NODE *node)
6552 if (nd_type_p(node, NODE_LIST)) {
6553 while (RNODE_LIST(node)->nd_next) {
6554 node = RNODE_LIST(node)->nd_next;
6556 node = RNODE_LIST(node)->nd_head;
6559 return keyword_node_p(node);
6564keyword_node_single_splat_p(
NODE *kwnode)
6568 NODE *node = RNODE_HASH(kwnode)->nd_head;
6569 return RNODE_LIST(node)->nd_head == NULL &&
6570 RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next == NULL;
6575 NODE *kwnode,
unsigned int *flag_ptr)
6577 *flag_ptr |= VM_CALL_KW_SPLAT_MUT;
6578 ADD_INSN1(args, argn, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
6579 ADD_INSN1(args, argn, newhash,
INT2FIX(0));
6580 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6581 ADD_SEND(args, argn, id_core_hash_merge_kwd,
INT2FIX(2));
6584#define SPLATARRAY_FALSE 0
6585#define SPLATARRAY_TRUE 1
6586#define DUP_SINGLE_KW_SPLAT 2
6590 unsigned int *dup_rest,
unsigned int *flag_ptr,
struct rb_callinfo_kwarg **kwarg_ptr)
6592 if (!argn)
return 0;
6594 NODE *kwnode = NULL;
6596 switch (nd_type(argn)) {
6599 int len = compile_args(iseq, args, argn, &kwnode);
6600 RUBY_ASSERT(flag_ptr == NULL || (*flag_ptr & VM_CALL_ARGS_SPLAT) == 0);
6603 if (compile_keyword_arg(iseq, args, kwnode, kwarg_ptr, flag_ptr)) {
6607 if (keyword_node_single_splat_p(kwnode) && (*dup_rest & DUP_SINGLE_KW_SPLAT)) {
6608 compile_single_keyword_splat_mutable(iseq, args, argn, kwnode, flag_ptr);
6611 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6620 NO_CHECK(COMPILE(args,
"args (splat)", RNODE_SPLAT(argn)->nd_head));
6621 ADD_INSN1(args, argn, splatarray, RBOOL(*dup_rest & SPLATARRAY_TRUE));
6622 if (*dup_rest & SPLATARRAY_TRUE) *dup_rest &= ~SPLATARRAY_TRUE;
6623 if (flag_ptr) *flag_ptr |= VM_CALL_ARGS_SPLAT;
6624 RUBY_ASSERT(flag_ptr == NULL || (*flag_ptr & VM_CALL_KW_SPLAT) == 0);
6627 case NODE_ARGSCAT: {
6628 if (flag_ptr) *flag_ptr |= VM_CALL_ARGS_SPLAT;
6629 int argc = setup_args_core(iseq, args, RNODE_ARGSCAT(argn)->nd_head, dup_rest, NULL, NULL);
6630 bool args_pushed =
false;
6632 if (nd_type_p(RNODE_ARGSCAT(argn)->nd_body, NODE_LIST)) {
6633 int rest_len = compile_args(iseq, args, RNODE_ARGSCAT(argn)->nd_body, &kwnode);
6634 if (kwnode) rest_len--;
6635 ADD_INSN1(args, argn, pushtoarray,
INT2FIX(rest_len));
6639 RUBY_ASSERT(!check_keyword(RNODE_ARGSCAT(argn)->nd_body));
6640 NO_CHECK(COMPILE(args,
"args (cat: splat)", RNODE_ARGSCAT(argn)->nd_body));
6643 if (nd_type_p(RNODE_ARGSCAT(argn)->nd_head, NODE_LIST)) {
6644 ADD_INSN1(args, argn, splatarray, RBOOL(*dup_rest & SPLATARRAY_TRUE));
6645 if (*dup_rest & SPLATARRAY_TRUE) *dup_rest &= ~SPLATARRAY_TRUE;
6648 else if (!args_pushed) {
6649 ADD_INSN(args, argn, concattoarray);
6655 *flag_ptr |= VM_CALL_KW_SPLAT;
6656 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6662 case NODE_ARGSPUSH: {
6663 if (flag_ptr) *flag_ptr |= VM_CALL_ARGS_SPLAT;
6664 int argc = setup_args_core(iseq, args, RNODE_ARGSPUSH(argn)->nd_head, dup_rest, NULL, NULL);
6666 if (nd_type_p(RNODE_ARGSPUSH(argn)->nd_body, NODE_LIST)) {
6667 int rest_len = compile_args(iseq, args, RNODE_ARGSPUSH(argn)->nd_body, &kwnode);
6668 if (kwnode) rest_len--;
6669 ADD_INSN1(args, argn, newarray,
INT2FIX(rest_len));
6670 ADD_INSN1(args, argn, pushtoarray,
INT2FIX(1));
6673 if (keyword_node_p(RNODE_ARGSPUSH(argn)->nd_body)) {
6674 kwnode = RNODE_ARGSPUSH(argn)->nd_body;
6677 NO_CHECK(COMPILE(args,
"args (cat: splat)", RNODE_ARGSPUSH(argn)->nd_body));
6678 ADD_INSN1(args, argn, pushtoarray,
INT2FIX(1));
6684 *flag_ptr |= VM_CALL_KW_SPLAT;
6685 if (!keyword_node_single_splat_p(kwnode)) {
6686 *flag_ptr |= VM_CALL_KW_SPLAT_MUT;
6687 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6689 else if (*dup_rest & DUP_SINGLE_KW_SPLAT) {
6690 compile_single_keyword_splat_mutable(iseq, args, argn, kwnode, flag_ptr);
6693 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6701 UNKNOWN_NODE(
"setup_arg", argn,
Qnil);
6707setup_args_splat_mut(
unsigned int *flag,
int dup_rest,
int initial_dup_rest)
6709 if ((*flag & VM_CALL_ARGS_SPLAT) && dup_rest != initial_dup_rest) {
6710 *flag |= VM_CALL_ARGS_SPLAT_MUT;
6715setup_args_dup_rest_p(
const NODE *argn)
6717 switch(nd_type(argn)) {
6728 case NODE_IMAGINARY:
6741 return setup_args_dup_rest_p(RNODE_COLON2(argn)->nd_head);
6744 if (setup_args_dup_rest_p(RNODE_LIST(argn)->nd_head)) {
6747 argn = RNODE_LIST(argn)->nd_next;
6760 unsigned int dup_rest = SPLATARRAY_TRUE, initial_dup_rest;
6763 const NODE *check_arg = nd_type_p(argn, NODE_BLOCK_PASS) ?
6764 RNODE_BLOCK_PASS(argn)->nd_head : argn;
6767 switch(nd_type(check_arg)) {
6770 dup_rest = SPLATARRAY_FALSE;
6774 dup_rest = !nd_type_p(RNODE_ARGSCAT(check_arg)->nd_head, NODE_LIST);
6776 case(NODE_ARGSPUSH):
6778 dup_rest = !((nd_type_p(RNODE_ARGSPUSH(check_arg)->nd_head, NODE_SPLAT) ||
6779 (nd_type_p(RNODE_ARGSPUSH(check_arg)->nd_head, NODE_ARGSCAT) &&
6780 nd_type_p(RNODE_ARGSCAT(RNODE_ARGSPUSH(check_arg)->nd_head)->nd_head, NODE_LIST))) &&
6781 nd_type_p(RNODE_ARGSPUSH(check_arg)->nd_body, NODE_HASH) &&
6782 !RNODE_HASH(RNODE_ARGSPUSH(check_arg)->nd_body)->nd_brace);
6784 if (dup_rest == SPLATARRAY_FALSE) {
6786 NODE *node = RNODE_HASH(RNODE_ARGSPUSH(check_arg)->nd_body)->nd_head;
6788 NODE *key_node = RNODE_LIST(node)->nd_head;
6789 if (key_node && setup_args_dup_rest_p(key_node)) {
6790 dup_rest = SPLATARRAY_TRUE;
6794 node = RNODE_LIST(node)->nd_next;
6795 NODE *value_node = RNODE_LIST(node)->nd_head;
6796 if (setup_args_dup_rest_p(value_node)) {
6797 dup_rest = SPLATARRAY_TRUE;
6801 node = RNODE_LIST(node)->nd_next;
6810 if (check_arg != argn && setup_args_dup_rest_p(RNODE_BLOCK_PASS(argn)->nd_body)) {
6812 dup_rest = SPLATARRAY_TRUE | DUP_SINGLE_KW_SPLAT;
6815 initial_dup_rest = dup_rest;
6817 if (argn && nd_type_p(argn, NODE_BLOCK_PASS)) {
6818 DECL_ANCHOR(arg_block);
6819 INIT_ANCHOR(arg_block);
6821 if (RNODE_BLOCK_PASS(argn)->forwarding && ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->param.flags.forwardable) {
6822 int idx = ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->local_table_size;
6824 RUBY_ASSERT(nd_type_p(RNODE_BLOCK_PASS(argn)->nd_head, NODE_ARGSPUSH));
6825 const NODE * arg_node =
6826 RNODE_ARGSPUSH(RNODE_BLOCK_PASS(argn)->nd_head)->nd_head;
6833 if (nd_type_p(arg_node, NODE_ARGSCAT)) {
6834 argc += setup_args_core(iseq, args, RNODE_ARGSCAT(arg_node)->nd_head, &dup_rest, flag, keywords);
6837 *flag |= VM_CALL_FORWARDING;
6839 ADD_GETLOCAL(args, argn, idx, get_lvar_level(iseq));
6840 setup_args_splat_mut(flag, dup_rest, initial_dup_rest);
6844 *flag |= VM_CALL_ARGS_BLOCKARG;
6846 NO_CHECK(COMPILE(arg_block,
"block", RNODE_BLOCK_PASS(argn)->nd_body));
6849 if (LIST_INSN_SIZE_ONE(arg_block)) {
6851 if (IS_INSN(elem)) {
6853 if (iobj->insn_id == BIN(getblockparam)) {
6854 iobj->insn_id = BIN(getblockparamproxy);
6858 ret =
INT2FIX(setup_args_core(iseq, args, RNODE_BLOCK_PASS(argn)->nd_head, &dup_rest, flag, keywords));
6859 ADD_SEQ(args, arg_block);
6862 ret =
INT2FIX(setup_args_core(iseq, args, argn, &dup_rest, flag, keywords));
6864 setup_args_splat_mut(flag, dup_rest, initial_dup_rest);
6871 const NODE *body = ptr;
6872 int line = nd_line(body);
6874 const rb_iseq_t *block = NEW_CHILD_ISEQ(body, make_name_for_block(ISEQ_BODY(iseq)->parent_iseq), ISEQ_TYPE_BLOCK, line);
6876 ADD_INSN1(ret, body, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
6877 ADD_CALL_WITH_BLOCK(ret, body, id_core_set_postexe, argc, block);
6879 iseq_set_local_table(iseq, 0, 0);
6887 int line = nd_line(node);
6888 const NODE *line_node = node;
6889 LABEL *fail_label = NEW_LABEL(line), *end_label = NEW_LABEL(line);
6891#if !(defined(NAMED_CAPTURE_BY_SVAR) && NAMED_CAPTURE_BY_SVAR-0)
6892 ADD_INSN1(ret, line_node, getglobal,
ID2SYM(idBACKREF));
6896 ADD_INSN(ret, line_node, dup);
6897 ADD_INSNL(ret, line_node, branchunless, fail_label);
6899 for (vars = node; vars; vars = RNODE_BLOCK(vars)->nd_next) {
6901 if (RNODE_BLOCK(vars)->nd_next) {
6902 ADD_INSN(ret, line_node, dup);
6905 NO_CHECK(COMPILE_POPPED(ret,
"capture", RNODE_BLOCK(vars)->nd_head));
6907 cap = new_insn_send(iseq, nd_line(line_node), nd_node_id(line_node), idAREF,
INT2FIX(1),
6910#if !defined(NAMED_CAPTURE_SINGLE_OPT) || NAMED_CAPTURE_SINGLE_OPT-0
6911 if (!RNODE_BLOCK(vars)->nd_next && vars == node) {
6916 ADD_INSNL(nom, line_node, jump, end_label);
6917 ADD_LABEL(nom, fail_label);
6919 ADD_INSN(nom, line_node, pop);
6920 ADD_INSN(nom, line_node, putnil);
6922 ADD_LABEL(nom, end_label);
6923 (nom->last->next = cap->link.next)->prev = nom->last;
6924 (cap->link.next = nom->anchor.next)->prev = &cap->link;
6929 ADD_INSNL(ret, line_node, jump, end_label);
6930 ADD_LABEL(ret, fail_label);
6931 ADD_INSN(ret, line_node, pop);
6932 for (vars = node; vars; vars = RNODE_BLOCK(vars)->nd_next) {
6934 NO_CHECK(COMPILE_POPPED(ret,
"capture", RNODE_BLOCK(vars)->nd_head));
6936 ((
INSN*)last)->insn_id = BIN(putnil);
6937 ((
INSN*)last)->operand_size = 0;
6939 ADD_LABEL(ret, end_label);
6943optimizable_range_item_p(
const NODE *n)
6945 if (!n)
return FALSE;
6946 switch (nd_type(n)) {
6959optimized_range_item(
const NODE *n)
6961 switch (nd_type(n)) {
6963 return rb_node_line_lineno_val(n);
6965 return rb_node_integer_literal_val(n);
6967 return rb_node_float_literal_val(n);
6969 return rb_node_rational_literal_val(n);
6970 case NODE_IMAGINARY:
6971 return rb_node_imaginary_literal_val(n);
6975 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(n)));
6982 const NODE *
const node_body =
type == NODE_IF ? RNODE_IF(node)->nd_body : RNODE_UNLESS(node)->nd_else;
6983 const NODE *
const node_else =
type == NODE_IF ? RNODE_IF(node)->nd_else : RNODE_UNLESS(node)->nd_body;
6985 const int line = nd_line(node);
6986 const NODE *line_node = node;
6987 DECL_ANCHOR(cond_seq);
6988 LABEL *then_label, *else_label, *end_label;
6991 INIT_ANCHOR(cond_seq);
6992 then_label = NEW_LABEL(line);
6993 else_label = NEW_LABEL(line);
6996 NODE *cond = RNODE_IF(node)->nd_cond;
6997 if (nd_type(cond) == NODE_BLOCK) {
6998 cond = RNODE_BLOCK(cond)->nd_head;
7001 CHECK(compile_branch_condition(iseq, cond_seq, cond, then_label, else_label));
7002 ADD_SEQ(ret, cond_seq);
7004 if (then_label->refcnt && else_label->refcnt) {
7005 branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
type == NODE_IF ?
"if" :
"unless");
7008 if (then_label->refcnt) {
7009 ADD_LABEL(ret, then_label);
7011 DECL_ANCHOR(then_seq);
7012 INIT_ANCHOR(then_seq);
7013 CHECK(COMPILE_(then_seq,
"then", node_body, popped));
7015 if (else_label->refcnt) {
7016 const NODE *
const coverage_node = node_body ? node_body : node;
7017 add_trace_branch_coverage(
7020 nd_code_loc(coverage_node),
7021 nd_node_id(coverage_node),
7023 type == NODE_IF ?
"then" :
"else",
7025 end_label = NEW_LABEL(line);
7026 ADD_INSNL(then_seq, line_node, jump, end_label);
7028 ADD_INSN(then_seq, line_node, pop);
7031 ADD_SEQ(ret, then_seq);
7034 if (else_label->refcnt) {
7035 ADD_LABEL(ret, else_label);
7037 DECL_ANCHOR(else_seq);
7038 INIT_ANCHOR(else_seq);
7039 CHECK(COMPILE_(else_seq,
"else", node_else, popped));
7041 if (then_label->refcnt) {
7042 const NODE *
const coverage_node = node_else ? node_else : node;
7043 add_trace_branch_coverage(
7046 nd_code_loc(coverage_node),
7047 nd_node_id(coverage_node),
7049 type == NODE_IF ?
"else" :
"then",
7052 ADD_SEQ(ret, else_seq);
7056 ADD_LABEL(ret, end_label);
7066 const NODE *node = orig_node;
7067 LABEL *endlabel, *elselabel;
7069 DECL_ANCHOR(body_seq);
7070 DECL_ANCHOR(cond_seq);
7071 int only_special_literals = 1;
7072 VALUE literals = rb_hash_new_with_size_and_type(0, 0, &cdhash_type);
7074 enum node_type
type;
7075 const NODE *line_node;
7080 INIT_ANCHOR(body_seq);
7081 INIT_ANCHOR(cond_seq);
7083 CHECK(COMPILE(head,
"case base", RNODE_CASE(node)->nd_head));
7085 branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
"case");
7087 node = RNODE_CASE(node)->nd_body;
7088 EXPECT_NODE(
"NODE_CASE", node, NODE_WHEN, COMPILE_NG);
7089 type = nd_type(node);
7090 line = nd_line(node);
7093 endlabel = NEW_LABEL(line);
7094 elselabel = NEW_LABEL(line);
7098 while (
type == NODE_WHEN) {
7101 l1 = NEW_LABEL(line);
7102 ADD_LABEL(body_seq, l1);
7103 ADD_INSN(body_seq, line_node, pop);
7105 const NODE *
const coverage_node = RNODE_WHEN(node)->nd_body ? RNODE_WHEN(node)->nd_body : node;
7106 add_trace_branch_coverage(
7109 nd_code_loc(coverage_node),
7110 nd_node_id(coverage_node),
7115 CHECK(COMPILE_(body_seq,
"when body", RNODE_WHEN(node)->nd_body, popped));
7116 ADD_INSNL(body_seq, line_node, jump, endlabel);
7118 vals = RNODE_WHEN(node)->nd_head;
7120 switch (nd_type(vals)) {
7122 only_special_literals = when_vals(iseq, cond_seq, vals, l1, only_special_literals, literals);
7123 if (only_special_literals < 0)
return COMPILE_NG;
7128 only_special_literals = 0;
7129 CHECK(when_splat_vals(iseq, cond_seq, vals, l1, only_special_literals, literals));
7132 UNKNOWN_NODE(
"NODE_CASE", vals, COMPILE_NG);
7136 EXPECT_NODE_NONULL(
"NODE_CASE", node, NODE_LIST, COMPILE_NG);
7139 node = RNODE_WHEN(node)->nd_next;
7143 type = nd_type(node);
7144 line = nd_line(node);
7149 ADD_LABEL(cond_seq, elselabel);
7150 ADD_INSN(cond_seq, line_node, pop);
7151 add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(node), nd_node_id(node), branch_id,
"else", branches);
7152 CHECK(COMPILE_(cond_seq,
"else", node, popped));
7153 ADD_INSNL(cond_seq, line_node, jump, endlabel);
7156 debugs(
"== else (implicit)\n");
7157 ADD_LABEL(cond_seq, elselabel);
7158 ADD_INSN(cond_seq, orig_node, pop);
7159 add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(orig_node), nd_node_id(orig_node), branch_id,
"else", branches);
7161 ADD_INSN(cond_seq, orig_node, putnil);
7163 ADD_INSNL(cond_seq, orig_node, jump, endlabel);
7166 if (only_special_literals && ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction) {
7167 ADD_INSN(ret, orig_node, dup);
7168 ADD_INSN2(ret, orig_node, opt_case_dispatch, literals, elselabel);
7170 LABEL_REF(elselabel);
7173 ADD_SEQ(ret, cond_seq);
7174 ADD_SEQ(ret, body_seq);
7175 ADD_LABEL(ret, endlabel);
7184 const NODE *node = RNODE_CASE2(orig_node)->nd_body;
7186 DECL_ANCHOR(body_seq);
7190 branches = decl_branch_base(iseq, PTR2NUM(orig_node), nd_code_loc(orig_node),
"case");
7192 INIT_ANCHOR(body_seq);
7193 endlabel = NEW_LABEL(nd_line(node));
7195 while (node && nd_type_p(node, NODE_WHEN)) {
7196 const int line = nd_line(node);
7197 LABEL *l1 = NEW_LABEL(line);
7198 ADD_LABEL(body_seq, l1);
7200 const NODE *
const coverage_node = RNODE_WHEN(node)->nd_body ? RNODE_WHEN(node)->nd_body : node;
7201 add_trace_branch_coverage(
7204 nd_code_loc(coverage_node),
7205 nd_node_id(coverage_node),
7210 CHECK(COMPILE_(body_seq,
"when", RNODE_WHEN(node)->nd_body, popped));
7211 ADD_INSNL(body_seq, node, jump, endlabel);
7213 vals = RNODE_WHEN(node)->nd_head;
7215 EXPECT_NODE_NONULL(
"NODE_WHEN", node, NODE_LIST, COMPILE_NG);
7217 switch (nd_type(vals)) {
7221 val = RNODE_LIST(vals)->nd_head;
7222 lnext = NEW_LABEL(nd_line(val));
7223 debug_compile(
"== when2\n", (
void)0);
7224 CHECK(compile_branch_condition(iseq, ret, val, l1, lnext));
7225 ADD_LABEL(ret, lnext);
7226 vals = RNODE_LIST(vals)->nd_next;
7232 ADD_INSN(ret, vals, putnil);
7233 CHECK(COMPILE(ret,
"when2/cond splat", vals));
7234 ADD_INSN1(ret, vals, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_WHEN | VM_CHECKMATCH_ARRAY));
7235 ADD_INSNL(ret, vals, branchif, l1);
7238 UNKNOWN_NODE(
"NODE_WHEN", vals, COMPILE_NG);
7240 node = RNODE_WHEN(node)->nd_next;
7243 const NODE *
const coverage_node = node ? node : orig_node;
7244 add_trace_branch_coverage(
7247 nd_code_loc(coverage_node),
7248 nd_node_id(coverage_node),
7252 CHECK(COMPILE_(ret,
"else", node, popped));
7253 ADD_INSNL(ret, orig_node, jump, endlabel);
7255 ADD_SEQ(ret, body_seq);
7256 ADD_LABEL(ret, endlabel);
7260static int iseq_compile_pattern_match(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *unmatched,
bool in_single_pattern,
bool in_alt_pattern,
int base_index,
bool use_deconstructed_cache);
7262static int iseq_compile_pattern_constant(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *match_failed,
bool in_single_pattern,
int base_index);
7263static int iseq_compile_array_deconstruct(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *deconstruct,
LABEL *deconstructed,
LABEL *match_failed,
LABEL *type_error,
bool in_single_pattern,
int base_index,
bool use_deconstructed_cache);
7264static int iseq_compile_pattern_set_general_errmsg(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
VALUE errmsg,
int base_index);
7265static int iseq_compile_pattern_set_length_errmsg(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
VALUE errmsg,
VALUE pattern_length,
int base_index);
7266static int iseq_compile_pattern_set_eqq_errmsg(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
int base_index);
7268#define CASE3_BI_OFFSET_DECONSTRUCTED_CACHE 0
7269#define CASE3_BI_OFFSET_ERROR_STRING 1
7270#define CASE3_BI_OFFSET_KEY_ERROR_P 2
7271#define CASE3_BI_OFFSET_KEY_ERROR_MATCHEE 3
7272#define CASE3_BI_OFFSET_KEY_ERROR_KEY 4
7275iseq_compile_pattern_each(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *matched,
LABEL *unmatched,
bool in_single_pattern,
bool in_alt_pattern,
int base_index,
bool use_deconstructed_cache)
7277 const int line = nd_line(node);
7278 const NODE *line_node = node;
7280 switch (nd_type(node)) {
7334 const NODE *args = RNODE_ARYPTN(node)->pre_args;
7335 const int pre_args_num = RNODE_ARYPTN(node)->pre_args ?
rb_long2int(RNODE_LIST(RNODE_ARYPTN(node)->pre_args)->as.nd_alen) : 0;
7336 const int post_args_num = RNODE_ARYPTN(node)->post_args ?
rb_long2int(RNODE_LIST(RNODE_ARYPTN(node)->post_args)->as.nd_alen) : 0;
7338 const int min_argc = pre_args_num + post_args_num;
7339 const int use_rest_num = RNODE_ARYPTN(node)->rest_arg && (NODE_NAMED_REST_P(RNODE_ARYPTN(node)->rest_arg) ||
7340 (!NODE_NAMED_REST_P(RNODE_ARYPTN(node)->rest_arg) && post_args_num > 0));
7342 LABEL *match_failed, *type_error, *deconstruct, *deconstructed;
7344 match_failed = NEW_LABEL(line);
7345 type_error = NEW_LABEL(line);
7346 deconstruct = NEW_LABEL(line);
7347 deconstructed = NEW_LABEL(line);
7350 ADD_INSN1(ret, line_node, putobject,
INT2FIX(0));
7351 ADD_INSN(ret, line_node, swap);
7357 CHECK(iseq_compile_pattern_constant(iseq, ret, node, match_failed, in_single_pattern, base_index));
7359 CHECK(iseq_compile_array_deconstruct(iseq, ret, node, deconstruct, deconstructed, match_failed, type_error, in_single_pattern, base_index, use_deconstructed_cache));
7361 ADD_INSN(ret, line_node, dup);
7362 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7363 ADD_INSN1(ret, line_node, putobject,
INT2FIX(min_argc));
7364 ADD_SEND(ret, line_node, RNODE_ARYPTN(node)->rest_arg ? idGE : idEq,
INT2FIX(1));
7365 if (in_single_pattern) {
7366 CHECK(iseq_compile_pattern_set_length_errmsg(iseq, ret, node,
7367 RNODE_ARYPTN(node)->rest_arg ? rb_fstring_lit(
"%p length mismatch (given %p, expected %p+)") :
7368 rb_fstring_lit(
"%p length mismatch (given %p, expected %p)"),
7369 INT2FIX(min_argc), base_index + 1 ));
7371 ADD_INSNL(ret, line_node, branchunless, match_failed);
7373 for (i = 0; i < pre_args_num; i++) {
7374 ADD_INSN(ret, line_node, dup);
7375 ADD_INSN1(ret, line_node, putobject,
INT2FIX(i));
7376 ADD_SEND(ret, line_node, idAREF,
INT2FIX(1));
7377 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(args)->nd_head, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7378 args = RNODE_LIST(args)->nd_next;
7381 if (RNODE_ARYPTN(node)->rest_arg) {
7382 if (NODE_NAMED_REST_P(RNODE_ARYPTN(node)->rest_arg)) {
7383 ADD_INSN(ret, line_node, dup);
7384 ADD_INSN1(ret, line_node, putobject,
INT2FIX(pre_args_num));
7385 ADD_INSN1(ret, line_node, topn,
INT2FIX(1));
7386 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7387 ADD_INSN1(ret, line_node, putobject,
INT2FIX(min_argc));
7388 ADD_SEND(ret, line_node, idMINUS,
INT2FIX(1));
7389 ADD_INSN1(ret, line_node, setn,
INT2FIX(4));
7390 ADD_SEND(ret, line_node, idAREF,
INT2FIX(2));
7392 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_ARYPTN(node)->rest_arg, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7395 if (post_args_num > 0) {
7396 ADD_INSN(ret, line_node, dup);
7397 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7398 ADD_INSN1(ret, line_node, putobject,
INT2FIX(min_argc));
7399 ADD_SEND(ret, line_node, idMINUS,
INT2FIX(1));
7400 ADD_INSN1(ret, line_node, setn,
INT2FIX(2));
7401 ADD_INSN(ret, line_node, pop);
7406 args = RNODE_ARYPTN(node)->post_args;
7407 for (i = 0; i < post_args_num; i++) {
7408 ADD_INSN(ret, line_node, dup);
7410 ADD_INSN1(ret, line_node, putobject,
INT2FIX(pre_args_num + i));
7411 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7412 ADD_SEND(ret, line_node, idPLUS,
INT2FIX(1));
7414 ADD_SEND(ret, line_node, idAREF,
INT2FIX(1));
7415 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(args)->nd_head, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7416 args = RNODE_LIST(args)->nd_next;
7419 ADD_INSN(ret, line_node, pop);
7421 ADD_INSN(ret, line_node, pop);
7423 ADD_INSNL(ret, line_node, jump, matched);
7424 ADD_INSN(ret, line_node, putnil);
7426 ADD_INSN(ret, line_node, putnil);
7429 ADD_LABEL(ret, type_error);
7430 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7432 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"deconstruct must return Array"));
7433 ADD_SEND(ret, line_node, id_core_raise,
INT2FIX(2));
7434 ADD_INSN(ret, line_node, pop);
7436 ADD_LABEL(ret, match_failed);
7437 ADD_INSN(ret, line_node, pop);
7439 ADD_INSN(ret, line_node, pop);
7441 ADD_INSNL(ret, line_node, jump, unmatched);
7494 const NODE *args = RNODE_FNDPTN(node)->args;
7495 const int args_num = RNODE_FNDPTN(node)->args ?
rb_long2int(RNODE_LIST(RNODE_FNDPTN(node)->args)->as.nd_alen) : 0;
7497 LABEL *match_failed, *type_error, *deconstruct, *deconstructed;
7498 match_failed = NEW_LABEL(line);
7499 type_error = NEW_LABEL(line);
7500 deconstruct = NEW_LABEL(line);
7501 deconstructed = NEW_LABEL(line);
7503 CHECK(iseq_compile_pattern_constant(iseq, ret, node, match_failed, in_single_pattern, base_index));
7505 CHECK(iseq_compile_array_deconstruct(iseq, ret, node, deconstruct, deconstructed, match_failed, type_error, in_single_pattern, base_index, use_deconstructed_cache));
7507 ADD_INSN(ret, line_node, dup);
7508 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7509 ADD_INSN1(ret, line_node, putobject,
INT2FIX(args_num));
7510 ADD_SEND(ret, line_node, idGE,
INT2FIX(1));
7511 if (in_single_pattern) {
7512 CHECK(iseq_compile_pattern_set_length_errmsg(iseq, ret, node, rb_fstring_lit(
"%p length mismatch (given %p, expected %p+)"),
INT2FIX(args_num), base_index + 1 ));
7514 ADD_INSNL(ret, line_node, branchunless, match_failed);
7517 LABEL *while_begin = NEW_LABEL(nd_line(node));
7518 LABEL *next_loop = NEW_LABEL(nd_line(node));
7519 LABEL *find_succeeded = NEW_LABEL(line);
7520 LABEL *find_failed = NEW_LABEL(nd_line(node));
7523 ADD_INSN(ret, line_node, dup);
7524 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7526 ADD_INSN(ret, line_node, dup);
7527 ADD_INSN1(ret, line_node, putobject,
INT2FIX(args_num));
7528 ADD_SEND(ret, line_node, idMINUS,
INT2FIX(1));
7530 ADD_INSN1(ret, line_node, putobject,
INT2FIX(0));
7532 ADD_LABEL(ret, while_begin);
7534 ADD_INSN(ret, line_node, dup);
7535 ADD_INSN1(ret, line_node, topn,
INT2FIX(2));
7536 ADD_SEND(ret, line_node, idLE,
INT2FIX(1));
7537 ADD_INSNL(ret, line_node, branchunless, find_failed);
7539 for (j = 0; j < args_num; j++) {
7540 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7541 ADD_INSN1(ret, line_node, topn,
INT2FIX(1));
7543 ADD_INSN1(ret, line_node, putobject,
INT2FIX(j));
7544 ADD_SEND(ret, line_node, idPLUS,
INT2FIX(1));
7546 ADD_SEND(ret, line_node, idAREF,
INT2FIX(1));
7548 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(args)->nd_head, next_loop, in_single_pattern, in_alt_pattern, base_index + 4 ,
false));
7549 args = RNODE_LIST(args)->nd_next;
7552 if (NODE_NAMED_REST_P(RNODE_FNDPTN(node)->pre_rest_arg)) {
7553 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7554 ADD_INSN1(ret, line_node, putobject,
INT2FIX(0));
7555 ADD_INSN1(ret, line_node, topn,
INT2FIX(2));
7556 ADD_SEND(ret, line_node, idAREF,
INT2FIX(2));
7557 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_FNDPTN(node)->pre_rest_arg, find_failed, in_single_pattern, in_alt_pattern, base_index + 4 ,
false));
7559 if (NODE_NAMED_REST_P(RNODE_FNDPTN(node)->post_rest_arg)) {
7560 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7561 ADD_INSN1(ret, line_node, topn,
INT2FIX(1));
7562 ADD_INSN1(ret, line_node, putobject,
INT2FIX(args_num));
7563 ADD_SEND(ret, line_node, idPLUS,
INT2FIX(1));
7564 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7565 ADD_SEND(ret, line_node, idAREF,
INT2FIX(2));
7566 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_FNDPTN(node)->post_rest_arg, find_failed, in_single_pattern, in_alt_pattern, base_index + 4 ,
false));
7568 ADD_INSNL(ret, line_node, jump, find_succeeded);
7570 ADD_LABEL(ret, next_loop);
7571 ADD_INSN1(ret, line_node, putobject,
INT2FIX(1));
7572 ADD_SEND(ret, line_node, idPLUS,
INT2FIX(1));
7573 ADD_INSNL(ret, line_node, jump, while_begin);
7575 ADD_LABEL(ret, find_failed);
7576 ADD_INSN1(ret, line_node, adjuststack,
INT2FIX(3));
7577 if (in_single_pattern) {
7578 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7579 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"%p does not match to find pattern"));
7580 ADD_INSN1(ret, line_node, topn,
INT2FIX(2));
7581 ADD_SEND(ret, line_node, id_core_sprintf,
INT2FIX(2));
7582 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
7584 ADD_INSN1(ret, line_node, putobject,
Qfalse);
7585 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 ));
7587 ADD_INSN(ret, line_node, pop);
7588 ADD_INSN(ret, line_node, pop);
7590 ADD_INSNL(ret, line_node, jump, match_failed);
7591 ADD_INSN1(ret, line_node, dupn,
INT2FIX(3));
7593 ADD_LABEL(ret, find_succeeded);
7594 ADD_INSN1(ret, line_node, adjuststack,
INT2FIX(3));
7597 ADD_INSN(ret, line_node, pop);
7598 ADD_INSNL(ret, line_node, jump, matched);
7599 ADD_INSN(ret, line_node, putnil);
7601 ADD_LABEL(ret, type_error);
7602 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7604 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"deconstruct must return Array"));
7605 ADD_SEND(ret, line_node, id_core_raise,
INT2FIX(2));
7606 ADD_INSN(ret, line_node, pop);
7608 ADD_LABEL(ret, match_failed);
7609 ADD_INSN(ret, line_node, pop);
7610 ADD_INSNL(ret, line_node, jump, unmatched);
7674 LABEL *match_failed, *type_error;
7677 match_failed = NEW_LABEL(line);
7678 type_error = NEW_LABEL(line);
7680 if (RNODE_HSHPTN(node)->nd_pkwargs && !RNODE_HSHPTN(node)->nd_pkwrestarg) {
7681 const NODE *kw_args = RNODE_HASH(RNODE_HSHPTN(node)->nd_pkwargs)->nd_head;
7682 keys =
rb_ary_new_capa(kw_args ? RNODE_LIST(kw_args)->as.nd_alen/2 : 0);
7684 rb_ary_push(keys, get_symbol_value(iseq, RNODE_LIST(kw_args)->nd_head));
7685 kw_args = RNODE_LIST(RNODE_LIST(kw_args)->nd_next)->nd_next;
7689 CHECK(iseq_compile_pattern_constant(iseq, ret, node, match_failed, in_single_pattern, base_index));
7691 ADD_INSN(ret, line_node, dup);
7692 ADD_INSN1(ret, line_node, putobject,
ID2SYM(rb_intern(
"deconstruct_keys")));
7693 ADD_SEND(ret, line_node, idRespond_to,
INT2FIX(1));
7694 if (in_single_pattern) {
7695 CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit(
"%p does not respond to #deconstruct_keys"), base_index + 1 ));
7697 ADD_INSNL(ret, line_node, branchunless, match_failed);
7700 ADD_INSN(ret, line_node, putnil);
7703 RB_OBJ_SET_FROZEN_SHAREABLE(keys);
7704 ADD_INSN1(ret, line_node, duparray, keys);
7707 ADD_SEND(ret, line_node, rb_intern(
"deconstruct_keys"),
INT2FIX(1));
7709 ADD_INSN(ret, line_node, dup);
7711 ADD_INSNL(ret, line_node, branchunless, type_error);
7713 if (RNODE_HSHPTN(node)->nd_pkwrestarg) {
7714 ADD_SEND(ret, line_node, rb_intern(
"dup"),
INT2FIX(0));
7717 if (RNODE_HSHPTN(node)->nd_pkwargs) {
7721 args = RNODE_HASH(RNODE_HSHPTN(node)->nd_pkwargs)->nd_head;
7723 DECL_ANCHOR(match_values);
7724 INIT_ANCHOR(match_values);
7725 keys_num =
rb_long2int(RNODE_LIST(args)->as.nd_alen) / 2;
7726 for (i = 0; i < keys_num; i++) {
7727 NODE *key_node = RNODE_LIST(args)->nd_head;
7728 NODE *value_node = RNODE_LIST(RNODE_LIST(args)->nd_next)->nd_head;
7729 VALUE key = get_symbol_value(iseq, key_node);
7731 ADD_INSN(ret, line_node, dup);
7732 ADD_INSN1(ret, line_node, putobject, key);
7733 ADD_SEND(ret, line_node, rb_intern(
"key?"),
INT2FIX(1));
7734 if (in_single_pattern) {
7735 LABEL *match_succeeded;
7736 match_succeeded = NEW_LABEL(line);
7738 ADD_INSN(ret, line_node, dup);
7739 ADD_INSNL(ret, line_node, branchif, match_succeeded);
7742 ADD_INSN1(ret, line_node, putobject, RB_OBJ_SET_SHAREABLE(str));
7743 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 2 ));
7744 ADD_INSN1(ret, line_node, putobject,
Qtrue);
7745 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 3 ));
7746 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7747 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_MATCHEE + 4 ));
7748 ADD_INSN1(ret, line_node, putobject, key);
7749 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_KEY + 5 ));
7751 ADD_INSN1(ret, line_node, adjuststack,
INT2FIX(4));
7753 ADD_LABEL(ret, match_succeeded);
7755 ADD_INSNL(ret, line_node, branchunless, match_failed);
7757 ADD_INSN(match_values, line_node, dup);
7758 ADD_INSN1(match_values, line_node, putobject, key);
7759 ADD_SEND(match_values, line_node, RNODE_HSHPTN(node)->nd_pkwrestarg ? rb_intern(
"delete") : idAREF,
INT2FIX(1));
7760 CHECK(iseq_compile_pattern_match(iseq, match_values, value_node, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7761 args = RNODE_LIST(RNODE_LIST(args)->nd_next)->nd_next;
7763 ADD_SEQ(ret, match_values);
7767 ADD_INSN(ret, line_node, dup);
7768 ADD_SEND(ret, line_node, idEmptyP,
INT2FIX(0));
7769 if (in_single_pattern) {
7770 CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit(
"%p is not empty"), base_index + 1 ));
7772 ADD_INSNL(ret, line_node, branchunless, match_failed);
7775 if (RNODE_HSHPTN(node)->nd_pkwrestarg) {
7776 if (RNODE_HSHPTN(node)->nd_pkwrestarg == NODE_SPECIAL_NO_REST_KEYWORD) {
7777 ADD_INSN(ret, line_node, dup);
7778 ADD_SEND(ret, line_node, idEmptyP,
INT2FIX(0));
7779 if (in_single_pattern) {
7780 CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit(
"rest of %p is not empty"), base_index + 1 ));
7782 ADD_INSNL(ret, line_node, branchunless, match_failed);
7785 ADD_INSN(ret, line_node, dup);
7786 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_HSHPTN(node)->nd_pkwrestarg, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7790 ADD_INSN(ret, line_node, pop);
7791 ADD_INSNL(ret, line_node, jump, matched);
7792 ADD_INSN(ret, line_node, putnil);
7794 ADD_LABEL(ret, type_error);
7795 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7797 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"deconstruct_keys must return Hash"));
7798 ADD_SEND(ret, line_node, id_core_raise,
INT2FIX(2));
7799 ADD_INSN(ret, line_node, pop);
7801 ADD_LABEL(ret, match_failed);
7802 ADD_INSN(ret, line_node, pop);
7803 ADD_INSNL(ret, line_node, jump, unmatched);
7812 case NODE_IMAGINARY:
7840 CHECK(COMPILE(ret,
"case in literal", node));
7841 if (in_single_pattern) {
7842 ADD_INSN1(ret, line_node, dupn,
INT2FIX(2));
7844 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE));
7845 if (in_single_pattern) {
7846 CHECK(iseq_compile_pattern_set_eqq_errmsg(iseq, ret, node, base_index + 2 ));
7848 ADD_INSNL(ret, line_node, branchif, matched);
7849 ADD_INSNL(ret, line_node, jump, unmatched);
7853 ID id = RNODE_LASGN(node)->nd_vid;
7854 int idx = ISEQ_BODY(body->local_iseq)->local_table_size - get_local_var_idx(iseq,
id);
7856 if (in_alt_pattern) {
7857 const char *name = rb_id2name(
id);
7858 if (name && strlen(name) > 0 && name[0] !=
'_') {
7859 COMPILE_ERROR(ERROR_ARGS
"illegal variable in alternative pattern (%"PRIsVALUE
")",
7865 ADD_SETLOCAL(ret, line_node, idx, get_lvar_level(iseq));
7866 ADD_INSNL(ret, line_node, jump, matched);
7871 ID id = RNODE_DASGN(node)->nd_vid;
7873 idx = get_dyna_var_idx(iseq,
id, &lv, &ls);
7875 if (in_alt_pattern) {
7876 const char *name = rb_id2name(
id);
7877 if (name && strlen(name) > 0 && name[0] !=
'_') {
7878 COMPILE_ERROR(ERROR_ARGS
"illegal variable in alternative pattern (%"PRIsVALUE
")",
7885 COMPILE_ERROR(ERROR_ARGS
"NODE_DASGN: unknown id (%"PRIsVALUE
")",
7889 ADD_SETLOCAL(ret, line_node, ls - idx, lv);
7890 ADD_INSNL(ret, line_node, jump, matched);
7895 LABEL *match_failed;
7896 match_failed = unmatched;
7897 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_IF(node)->nd_body, unmatched, in_single_pattern, in_alt_pattern, base_index, use_deconstructed_cache));
7898 CHECK(COMPILE(ret,
"case in if", RNODE_IF(node)->nd_cond));
7899 if (in_single_pattern) {
7900 LABEL *match_succeeded;
7901 match_succeeded = NEW_LABEL(line);
7903 ADD_INSN(ret, line_node, dup);
7904 if (nd_type_p(node, NODE_IF)) {
7905 ADD_INSNL(ret, line_node, branchif, match_succeeded);
7908 ADD_INSNL(ret, line_node, branchunless, match_succeeded);
7911 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"guard clause does not return true"));
7912 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
7913 ADD_INSN1(ret, line_node, putobject,
Qfalse);
7914 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 ));
7916 ADD_INSN(ret, line_node, pop);
7917 ADD_INSN(ret, line_node, pop);
7919 ADD_LABEL(ret, match_succeeded);
7921 if (nd_type_p(node, NODE_IF)) {
7922 ADD_INSNL(ret, line_node, branchunless, match_failed);
7925 ADD_INSNL(ret, line_node, branchif, match_failed);
7927 ADD_INSNL(ret, line_node, jump, matched);
7932 LABEL *match_failed;
7933 match_failed = NEW_LABEL(line);
7935 n = RNODE_HASH(node)->nd_head;
7936 if (! (nd_type_p(n, NODE_LIST) && RNODE_LIST(n)->as.nd_alen == 2)) {
7937 COMPILE_ERROR(ERROR_ARGS
"unexpected node");
7941 ADD_INSN(ret, line_node, dup);
7942 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(n)->nd_head, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 , use_deconstructed_cache));
7943 CHECK(iseq_compile_pattern_each(iseq, ret, RNODE_LIST(RNODE_LIST(n)->nd_next)->nd_head, matched, match_failed, in_single_pattern, in_alt_pattern, base_index,
false));
7944 ADD_INSN(ret, line_node, putnil);
7946 ADD_LABEL(ret, match_failed);
7947 ADD_INSN(ret, line_node, pop);
7948 ADD_INSNL(ret, line_node, jump, unmatched);
7952 LABEL *match_succeeded, *fin;
7953 match_succeeded = NEW_LABEL(line);
7954 fin = NEW_LABEL(line);
7956 ADD_INSN(ret, line_node, dup);
7957 CHECK(iseq_compile_pattern_each(iseq, ret, RNODE_OR(node)->nd_1st, match_succeeded, fin, in_single_pattern,
true, base_index + 1 , use_deconstructed_cache));
7958 ADD_LABEL(ret, match_succeeded);
7959 ADD_INSN(ret, line_node, pop);
7960 ADD_INSNL(ret, line_node, jump, matched);
7961 ADD_INSN(ret, line_node, putnil);
7962 ADD_LABEL(ret, fin);
7963 CHECK(iseq_compile_pattern_each(iseq, ret, RNODE_OR(node)->nd_2nd, matched, unmatched, in_single_pattern,
true, base_index, use_deconstructed_cache));
7967 UNKNOWN_NODE(
"NODE_IN", node, COMPILE_NG);
7973iseq_compile_pattern_match(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *unmatched,
bool in_single_pattern,
bool in_alt_pattern,
int base_index,
bool use_deconstructed_cache)
7975 LABEL *fin = NEW_LABEL(nd_line(node));
7976 CHECK(iseq_compile_pattern_each(iseq, ret, node, fin, unmatched, in_single_pattern, in_alt_pattern, base_index, use_deconstructed_cache));
7977 ADD_LABEL(ret, fin);
7982iseq_compile_pattern_constant(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *match_failed,
bool in_single_pattern,
int base_index)
7984 const NODE *line_node = node;
7986 if (RNODE_ARYPTN(node)->nd_pconst) {
7987 ADD_INSN(ret, line_node, dup);
7988 CHECK(COMPILE(ret,
"constant", RNODE_ARYPTN(node)->nd_pconst));
7989 if (in_single_pattern) {
7990 ADD_INSN1(ret, line_node, dupn,
INT2FIX(2));
7992 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE));
7993 if (in_single_pattern) {
7994 CHECK(iseq_compile_pattern_set_eqq_errmsg(iseq, ret, node, base_index + 3 ));
7996 ADD_INSNL(ret, line_node, branchunless, match_failed);
8003iseq_compile_array_deconstruct(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *deconstruct,
LABEL *deconstructed,
LABEL *match_failed,
LABEL *type_error,
bool in_single_pattern,
int base_index,
bool use_deconstructed_cache)
8005 const NODE *line_node = node;
8009 if (use_deconstructed_cache) {
8011 ADD_INSN1(ret, line_node, topn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE));
8012 ADD_INSNL(ret, line_node, branchnil, deconstruct);
8015 ADD_INSN1(ret, line_node, topn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE));
8016 ADD_INSNL(ret, line_node, branchunless, match_failed);
8019 ADD_INSN(ret, line_node, pop);
8020 ADD_INSN1(ret, line_node, topn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE - 1 ));
8021 ADD_INSNL(ret, line_node, jump, deconstructed);
8024 ADD_INSNL(ret, line_node, jump, deconstruct);
8027 ADD_LABEL(ret, deconstruct);
8028 ADD_INSN(ret, line_node, dup);
8029 ADD_INSN1(ret, line_node, putobject,
ID2SYM(rb_intern(
"deconstruct")));
8030 ADD_SEND(ret, line_node, idRespond_to,
INT2FIX(1));
8033 if (use_deconstructed_cache) {
8034 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE + 1 ));
8037 if (in_single_pattern) {
8038 CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit(
"%p does not respond to #deconstruct"), base_index + 1 ));
8041 ADD_INSNL(ret, line_node, branchunless, match_failed);
8043 ADD_SEND(ret, line_node, rb_intern(
"deconstruct"),
INT2FIX(0));
8046 if (use_deconstructed_cache) {
8047 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE));
8050 ADD_INSN(ret, line_node, dup);
8052 ADD_INSNL(ret, line_node, branchunless, type_error);
8054 ADD_LABEL(ret, deconstructed);
8070 const int line = nd_line(node);
8071 const NODE *line_node = node;
8072 LABEL *match_succeeded = NEW_LABEL(line);
8074 ADD_INSN(ret, line_node, dup);
8075 ADD_INSNL(ret, line_node, branchif, match_succeeded);
8077 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8078 ADD_INSN1(ret, line_node, putobject, errmsg);
8079 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
8080 ADD_SEND(ret, line_node, id_core_sprintf,
INT2FIX(2));
8081 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
8083 ADD_INSN1(ret, line_node, putobject,
Qfalse);
8084 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 ));
8086 ADD_INSN(ret, line_node, pop);
8087 ADD_INSN(ret, line_node, pop);
8088 ADD_LABEL(ret, match_succeeded);
8104 const int line = nd_line(node);
8105 const NODE *line_node = node;
8106 LABEL *match_succeeded = NEW_LABEL(line);
8108 ADD_INSN(ret, line_node, dup);
8109 ADD_INSNL(ret, line_node, branchif, match_succeeded);
8111 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8112 ADD_INSN1(ret, line_node, putobject, errmsg);
8113 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
8114 ADD_INSN(ret, line_node, dup);
8115 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
8116 ADD_INSN1(ret, line_node, putobject, pattern_length);
8117 ADD_SEND(ret, line_node, id_core_sprintf,
INT2FIX(4));
8118 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
8120 ADD_INSN1(ret, line_node, putobject,
Qfalse);
8121 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2));
8123 ADD_INSN(ret, line_node, pop);
8124 ADD_INSN(ret, line_node, pop);
8125 ADD_LABEL(ret, match_succeeded);
8131iseq_compile_pattern_set_eqq_errmsg(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
int base_index)
8141 const int line = nd_line(node);
8142 const NODE *line_node = node;
8143 LABEL *match_succeeded = NEW_LABEL(line);
8145 ADD_INSN(ret, line_node, dup);
8146 ADD_INSNL(ret, line_node, branchif, match_succeeded);
8148 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8149 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"%p === %p does not return true"));
8150 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
8151 ADD_INSN1(ret, line_node, topn,
INT2FIX(5));
8152 ADD_SEND(ret, line_node, id_core_sprintf,
INT2FIX(3));
8153 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
8155 ADD_INSN1(ret, line_node, putobject,
Qfalse);
8156 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 ));
8158 ADD_INSN(ret, line_node, pop);
8159 ADD_INSN(ret, line_node, pop);
8161 ADD_LABEL(ret, match_succeeded);
8162 ADD_INSN1(ret, line_node, setn,
INT2FIX(2));
8163 ADD_INSN(ret, line_node, pop);
8164 ADD_INSN(ret, line_node, pop);
8172 const NODE *pattern;
8173 const NODE *node = orig_node;
8174 LABEL *endlabel, *elselabel;
8176 DECL_ANCHOR(body_seq);
8177 DECL_ANCHOR(cond_seq);
8179 enum node_type
type;
8180 const NODE *line_node;
8183 bool single_pattern;
8186 INIT_ANCHOR(body_seq);
8187 INIT_ANCHOR(cond_seq);
8189 branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
"case");
8191 node = RNODE_CASE3(node)->nd_body;
8192 EXPECT_NODE(
"NODE_CASE3", node, NODE_IN, COMPILE_NG);
8193 type = nd_type(node);
8194 line = nd_line(node);
8196 single_pattern = !RNODE_IN(node)->nd_next;
8198 endlabel = NEW_LABEL(line);
8199 elselabel = NEW_LABEL(line);
8201 if (single_pattern) {
8203 ADD_INSN(head, line_node, putnil);
8204 ADD_INSN(head, line_node, putnil);
8205 ADD_INSN1(head, line_node, putobject,
Qfalse);
8206 ADD_INSN(head, line_node, putnil);
8208 ADD_INSN(head, line_node, putnil);
8210 CHECK(COMPILE(head,
"case base", RNODE_CASE3(orig_node)->nd_head));
8214 while (
type == NODE_IN) {
8218 ADD_INSN(body_seq, line_node, putnil);
8220 l1 = NEW_LABEL(line);
8221 ADD_LABEL(body_seq, l1);
8222 ADD_INSN1(body_seq, line_node, adjuststack,
INT2FIX(single_pattern ? 6 : 2));
8224 const NODE *
const coverage_node = RNODE_IN(node)->nd_body ? RNODE_IN(node)->nd_body : node;
8225 add_trace_branch_coverage(
8228 nd_code_loc(coverage_node),
8229 nd_node_id(coverage_node),
8234 CHECK(COMPILE_(body_seq,
"in body", RNODE_IN(node)->nd_body, popped));
8235 ADD_INSNL(body_seq, line_node, jump, endlabel);
8237 pattern = RNODE_IN(node)->nd_head;
8239 int pat_line = nd_line(pattern);
8240 LABEL *next_pat = NEW_LABEL(pat_line);
8241 ADD_INSN (cond_seq, pattern, dup);
8243 CHECK(iseq_compile_pattern_each(iseq, cond_seq, pattern, l1, next_pat, single_pattern,
false, 2,
true));
8244 ADD_LABEL(cond_seq, next_pat);
8245 LABEL_UNREMOVABLE(next_pat);
8248 COMPILE_ERROR(ERROR_ARGS
"unexpected node");
8252 node = RNODE_IN(node)->nd_next;
8256 type = nd_type(node);
8257 line = nd_line(node);
8262 ADD_LABEL(cond_seq, elselabel);
8263 ADD_INSN(cond_seq, line_node, pop);
8264 ADD_INSN(cond_seq, line_node, pop);
8265 add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(node), nd_node_id(node), branch_id,
"else", branches);
8266 CHECK(COMPILE_(cond_seq,
"else", node, popped));
8267 ADD_INSNL(cond_seq, line_node, jump, endlabel);
8268 ADD_INSN(cond_seq, line_node, putnil);
8270 ADD_INSN(cond_seq, line_node, putnil);
8274 debugs(
"== else (implicit)\n");
8275 ADD_LABEL(cond_seq, elselabel);
8276 add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(orig_node), nd_node_id(orig_node), branch_id,
"else", branches);
8277 ADD_INSN1(cond_seq, orig_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8279 if (single_pattern) {
8287 LABEL *key_error, *fin;
8290 key_error = NEW_LABEL(line);
8291 fin = NEW_LABEL(line);
8294 kw_arg->references = 0;
8295 kw_arg->keyword_len = 2;
8296 kw_arg->keywords[0] =
ID2SYM(rb_intern(
"matchee"));
8297 kw_arg->keywords[1] =
ID2SYM(rb_intern(
"key"));
8299 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_KEY_ERROR_P + 2));
8300 ADD_INSNL(cond_seq, orig_node, branchif, key_error);
8302 ADD_INSN1(cond_seq, orig_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8303 ADD_INSN1(cond_seq, orig_node, putobject, rb_fstring_lit(
"%p: %s"));
8304 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(4));
8305 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_ERROR_STRING + 6));
8306 ADD_SEND(cond_seq, orig_node, id_core_sprintf,
INT2FIX(3));
8307 ADD_SEND(cond_seq, orig_node, id_core_raise,
INT2FIX(2));
8308 ADD_INSNL(cond_seq, orig_node, jump, fin);
8310 ADD_LABEL(cond_seq, key_error);
8312 ADD_INSN1(cond_seq, orig_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8313 ADD_INSN1(cond_seq, orig_node, putobject, rb_fstring_lit(
"%p: %s"));
8314 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(4));
8315 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_ERROR_STRING + 6));
8316 ADD_SEND(cond_seq, orig_node, id_core_sprintf,
INT2FIX(3));
8317 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_KEY_ERROR_MATCHEE + 4));
8318 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_KEY_ERROR_KEY + 5));
8319 ADD_SEND_R(cond_seq, orig_node, rb_intern(
"new"),
INT2FIX(1), NULL,
INT2FIX(VM_CALL_KWARG), kw_arg);
8320 ADD_SEND(cond_seq, orig_node, id_core_raise,
INT2FIX(1));
8322 ADD_LABEL(cond_seq, fin);
8326 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(2));
8327 ADD_SEND(cond_seq, orig_node, id_core_raise,
INT2FIX(2));
8329 ADD_INSN1(cond_seq, orig_node, adjuststack,
INT2FIX(single_pattern ? 7 : 3));
8331 ADD_INSN(cond_seq, orig_node, putnil);
8333 ADD_INSNL(cond_seq, orig_node, jump, endlabel);
8334 ADD_INSN1(cond_seq, orig_node, dupn,
INT2FIX(single_pattern ? 5 : 1));
8336 ADD_INSN(cond_seq, line_node, putnil);
8340 ADD_SEQ(ret, cond_seq);
8341 ADD_SEQ(ret, body_seq);
8342 ADD_LABEL(ret, endlabel);
8346#undef CASE3_BI_OFFSET_DECONSTRUCTED_CACHE
8347#undef CASE3_BI_OFFSET_ERROR_STRING
8348#undef CASE3_BI_OFFSET_KEY_ERROR_P
8349#undef CASE3_BI_OFFSET_KEY_ERROR_MATCHEE
8350#undef CASE3_BI_OFFSET_KEY_ERROR_KEY
8355 const int line = (int)nd_line(node);
8356 const NODE *line_node = node;
8358 LABEL *prev_start_label = ISEQ_COMPILE_DATA(iseq)->start_label;
8359 LABEL *prev_end_label = ISEQ_COMPILE_DATA(iseq)->end_label;
8360 LABEL *prev_redo_label = ISEQ_COMPILE_DATA(iseq)->redo_label;
8361 int prev_loopval_popped = ISEQ_COMPILE_DATA(iseq)->loopval_popped;
8366 LABEL *next_label = ISEQ_COMPILE_DATA(iseq)->start_label = NEW_LABEL(line);
8367 LABEL *redo_label = ISEQ_COMPILE_DATA(iseq)->redo_label = NEW_LABEL(line);
8368 LABEL *break_label = ISEQ_COMPILE_DATA(iseq)->end_label = NEW_LABEL(line);
8369 LABEL *end_label = NEW_LABEL(line);
8370 LABEL *adjust_label = NEW_LABEL(line);
8372 LABEL *next_catch_label = NEW_LABEL(line);
8373 LABEL *tmp_label = NULL;
8375 ISEQ_COMPILE_DATA(iseq)->loopval_popped = 0;
8376 push_ensure_entry(iseq, &enl, NULL, NULL);
8378 if (RNODE_WHILE(node)->nd_state == 1) {
8379 ADD_INSNL(ret, line_node, jump, next_label);
8382 tmp_label = NEW_LABEL(line);
8383 ADD_INSNL(ret, line_node, jump, tmp_label);
8385 ADD_LABEL(ret, adjust_label);
8386 ADD_INSN(ret, line_node, putnil);
8387 ADD_LABEL(ret, next_catch_label);
8388 ADD_INSN(ret, line_node, pop);
8389 ADD_INSNL(ret, line_node, jump, next_label);
8390 if (tmp_label) ADD_LABEL(ret, tmp_label);
8392 ADD_LABEL(ret, redo_label);
8393 branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
type == NODE_WHILE ?
"while" :
"until");
8395 const NODE *
const coverage_node = RNODE_WHILE(node)->nd_body ? RNODE_WHILE(node)->nd_body : node;
8396 add_trace_branch_coverage(
8399 nd_code_loc(coverage_node),
8400 nd_node_id(coverage_node),
8405 CHECK(COMPILE_POPPED(ret,
"while body", RNODE_WHILE(node)->nd_body));
8406 ADD_LABEL(ret, next_label);
8408 if (
type == NODE_WHILE) {
8409 CHECK(compile_branch_condition(iseq, ret, RNODE_WHILE(node)->nd_cond,
8410 redo_label, end_label));
8414 CHECK(compile_branch_condition(iseq, ret, RNODE_WHILE(node)->nd_cond,
8415 end_label, redo_label));
8418 ADD_LABEL(ret, end_label);
8419 ADD_ADJUST_RESTORE(ret, adjust_label);
8421 if (UNDEF_P(RNODE_WHILE(node)->nd_state)) {
8423 COMPILE_ERROR(ERROR_ARGS
"unsupported: putundef");
8427 ADD_INSN(ret, line_node, putnil);
8430 ADD_LABEL(ret, break_label);
8433 ADD_INSN(ret, line_node, pop);
8436 ADD_CATCH_ENTRY(CATCH_TYPE_BREAK, redo_label, break_label, NULL,
8438 ADD_CATCH_ENTRY(CATCH_TYPE_NEXT, redo_label, break_label, NULL,
8440 ADD_CATCH_ENTRY(CATCH_TYPE_REDO, redo_label, break_label, NULL,
8441 ISEQ_COMPILE_DATA(iseq)->redo_label);
8443 ISEQ_COMPILE_DATA(iseq)->start_label = prev_start_label;
8444 ISEQ_COMPILE_DATA(iseq)->end_label = prev_end_label;
8445 ISEQ_COMPILE_DATA(iseq)->redo_label = prev_redo_label;
8446 ISEQ_COMPILE_DATA(iseq)->loopval_popped = prev_loopval_popped;
8447 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack->prev;
8454 const int line = nd_line(node);
8455 const NODE *line_node = node;
8456 const rb_iseq_t *prevblock = ISEQ_COMPILE_DATA(iseq)->current_block;
8457 LABEL *retry_label = NEW_LABEL(line);
8458 LABEL *retry_end_l = NEW_LABEL(line);
8461 ADD_LABEL(ret, retry_label);
8462 if (nd_type_p(node, NODE_FOR)) {
8463 CHECK(COMPILE(ret,
"iter caller (for)", RNODE_FOR(node)->nd_iter));
8465 ISEQ_COMPILE_DATA(iseq)->current_block = child_iseq =
8466 NEW_CHILD_ISEQ(RNODE_FOR(node)->nd_body, make_name_for_block(iseq),
8467 ISEQ_TYPE_BLOCK, line);
8468 ADD_SEND_WITH_BLOCK(ret, line_node, idEach,
INT2FIX(0), child_iseq);
8471 ISEQ_COMPILE_DATA(iseq)->current_block = child_iseq =
8472 NEW_CHILD_ISEQ(RNODE_ITER(node)->nd_body, make_name_for_block(iseq),
8473 ISEQ_TYPE_BLOCK, line);
8474 CHECK(COMPILE(ret,
"iter caller", RNODE_ITER(node)->nd_iter));
8488 iobj = IS_INSN(last_elem) ? (
INSN*) last_elem : (
INSN*) get_prev_insn((
INSN*) last_elem);
8489 while (!IS_INSN_ID(iobj, send) && !IS_INSN_ID(iobj, invokesuper) && !IS_INSN_ID(iobj, sendforward) && !IS_INSN_ID(iobj, invokesuperforward)) {
8490 iobj = (
INSN*) get_prev_insn(iobj);
8492 ELEM_INSERT_NEXT(&iobj->link, (
LINK_ELEMENT*) retry_end_l);
8496 if (&iobj->link == LAST_ELEMENT(ret)) {
8502 ADD_INSN(ret, line_node, pop);
8505 ISEQ_COMPILE_DATA(iseq)->current_block = prevblock;
8507 ADD_CATCH_ENTRY(CATCH_TYPE_BREAK, retry_label, retry_end_l, child_iseq, retry_end_l);
8517 const NODE *line_node = node;
8518 const NODE *var = RNODE_FOR_MASGN(node)->nd_var;
8519 LABEL *not_single = NEW_LABEL(nd_line(var));
8520 LABEL *not_ary = NEW_LABEL(nd_line(var));
8521 CHECK(COMPILE(ret,
"for var", var));
8522 ADD_INSN(ret, line_node, dup);
8523 ADD_CALL(ret, line_node, idLength,
INT2FIX(0));
8524 ADD_INSN1(ret, line_node, putobject,
INT2FIX(1));
8525 ADD_CALL(ret, line_node, idEq,
INT2FIX(1));
8526 ADD_INSNL(ret, line_node, branchunless, not_single);
8527 ADD_INSN(ret, line_node, dup);
8528 ADD_INSN1(ret, line_node, putobject,
INT2FIX(0));
8529 ADD_CALL(ret, line_node, idAREF,
INT2FIX(1));
8530 ADD_INSN1(ret, line_node, putobject,
rb_cArray);
8531 ADD_INSN(ret, line_node, swap);
8532 ADD_CALL(ret, line_node, rb_intern(
"try_convert"),
INT2FIX(1));
8533 ADD_INSN(ret, line_node, dup);
8534 ADD_INSNL(ret, line_node, branchunless, not_ary);
8535 ADD_INSN(ret, line_node, swap);
8536 ADD_LABEL(ret, not_ary);
8537 ADD_INSN(ret, line_node, pop);
8538 ADD_LABEL(ret, not_single);
8545 const NODE *line_node = node;
8546 unsigned long throw_flag = 0;
8548 if (ISEQ_COMPILE_DATA(iseq)->redo_label != 0 && can_add_ensure_iseq(iseq)) {
8550 LABEL *splabel = NEW_LABEL(0);
8551 ADD_LABEL(ret, splabel);
8552 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->redo_label);
8553 CHECK(COMPILE_(ret,
"break val (while/until)", RNODE_BREAK(node)->nd_stts,
8554 ISEQ_COMPILE_DATA(iseq)->loopval_popped));
8555 add_ensure_iseq(ret, iseq, 0);
8556 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->end_label);
8557 ADD_ADJUST_RESTORE(ret, splabel);
8560 ADD_INSN(ret, line_node, putnil);
8567 if (!ISEQ_COMPILE_DATA(ip)) {
8572 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
8573 throw_flag = VM_THROW_NO_ESCAPE_FLAG;
8575 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_BLOCK) {
8578 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_EVAL) {
8579 COMPILE_ERROR(ERROR_ARGS
"Can't escape from eval with break");
8583 ip = ISEQ_BODY(ip)->parent_iseq;
8588 CHECK(COMPILE(ret,
"break val (block)", RNODE_BREAK(node)->nd_stts));
8589 ADD_INSN1(ret, line_node,
throw,
INT2FIX(throw_flag | TAG_BREAK));
8591 ADD_INSN(ret, line_node, pop);
8595 COMPILE_ERROR(ERROR_ARGS
"Invalid break");
8604 const NODE *line_node = node;
8605 unsigned long throw_flag = 0;
8607 if (ISEQ_COMPILE_DATA(iseq)->redo_label != 0 && can_add_ensure_iseq(iseq)) {
8608 LABEL *splabel = NEW_LABEL(0);
8609 debugs(
"next in while loop\n");
8610 ADD_LABEL(ret, splabel);
8611 CHECK(COMPILE(ret,
"next val/valid syntax?", RNODE_NEXT(node)->nd_stts));
8612 add_ensure_iseq(ret, iseq, 0);
8613 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->redo_label);
8614 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->start_label);
8615 ADD_ADJUST_RESTORE(ret, splabel);
8617 ADD_INSN(ret, line_node, putnil);
8620 else if (ISEQ_COMPILE_DATA(iseq)->end_label && can_add_ensure_iseq(iseq)) {
8621 LABEL *splabel = NEW_LABEL(0);
8622 debugs(
"next in block\n");
8623 ADD_LABEL(ret, splabel);
8624 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->start_label);
8625 CHECK(COMPILE(ret,
"next val", RNODE_NEXT(node)->nd_stts));
8626 add_ensure_iseq(ret, iseq, 0);
8627 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->end_label);
8628 ADD_ADJUST_RESTORE(ret, splabel);
8631 ADD_INSN(ret, line_node, putnil);
8638 if (!ISEQ_COMPILE_DATA(ip)) {
8643 throw_flag = VM_THROW_NO_ESCAPE_FLAG;
8644 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
8648 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_BLOCK) {
8651 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_EVAL) {
8652 COMPILE_ERROR(ERROR_ARGS
"Can't escape from eval with next");
8656 ip = ISEQ_BODY(ip)->parent_iseq;
8659 CHECK(COMPILE(ret,
"next val", RNODE_NEXT(node)->nd_stts));
8660 ADD_INSN1(ret, line_node,
throw,
INT2FIX(throw_flag | TAG_NEXT));
8663 ADD_INSN(ret, line_node, pop);
8667 COMPILE_ERROR(ERROR_ARGS
"Invalid next");
8677 const NODE *line_node = node;
8679 if (ISEQ_COMPILE_DATA(iseq)->redo_label && can_add_ensure_iseq(iseq)) {
8680 LABEL *splabel = NEW_LABEL(0);
8681 debugs(
"redo in while");
8682 ADD_LABEL(ret, splabel);
8683 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->redo_label);
8684 add_ensure_iseq(ret, iseq, 0);
8685 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->redo_label);
8686 ADD_ADJUST_RESTORE(ret, splabel);
8688 ADD_INSN(ret, line_node, putnil);
8691 else if (ISEQ_BODY(iseq)->
type != ISEQ_TYPE_EVAL && ISEQ_COMPILE_DATA(iseq)->start_label && can_add_ensure_iseq(iseq)) {
8692 LABEL *splabel = NEW_LABEL(0);
8694 debugs(
"redo in block");
8695 ADD_LABEL(ret, splabel);
8696 add_ensure_iseq(ret, iseq, 0);
8697 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->start_label);
8698 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->start_label);
8699 ADD_ADJUST_RESTORE(ret, splabel);
8702 ADD_INSN(ret, line_node, putnil);
8709 if (!ISEQ_COMPILE_DATA(ip)) {
8714 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
8717 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_BLOCK) {
8720 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_EVAL) {
8721 COMPILE_ERROR(ERROR_ARGS
"Can't escape from eval with redo");
8725 ip = ISEQ_BODY(ip)->parent_iseq;
8728 ADD_INSN(ret, line_node, putnil);
8729 ADD_INSN1(ret, line_node,
throw,
INT2FIX(VM_THROW_NO_ESCAPE_FLAG | TAG_REDO));
8732 ADD_INSN(ret, line_node, pop);
8736 COMPILE_ERROR(ERROR_ARGS
"Invalid redo");
8746 const NODE *line_node = node;
8748 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_RESCUE) {
8749 ADD_INSN(ret, line_node, putnil);
8750 ADD_INSN1(ret, line_node,
throw,
INT2FIX(TAG_RETRY));
8753 ADD_INSN(ret, line_node, pop);
8757 COMPILE_ERROR(ERROR_ARGS
"Invalid retry");
8766 const int line = nd_line(node);
8767 const NODE *line_node = node;
8768 LABEL *lstart = NEW_LABEL(line);
8769 LABEL *lend = NEW_LABEL(line);
8770 LABEL *lcont = NEW_LABEL(line);
8771 const rb_iseq_t *rescue = NEW_CHILD_ISEQ(RNODE_RESCUE(node)->nd_resq,
8773 ISEQ_BODY(iseq)->location.label),
8774 ISEQ_TYPE_RESCUE, line);
8776 lstart->rescued = LABEL_RESCUE_BEG;
8777 lend->rescued = LABEL_RESCUE_END;
8778 ADD_LABEL(ret, lstart);
8780 bool prev_in_rescue = ISEQ_COMPILE_DATA(iseq)->in_rescue;
8781 ISEQ_COMPILE_DATA(iseq)->in_rescue =
true;
8783 CHECK(COMPILE(ret,
"rescue head", RNODE_RESCUE(node)->nd_head));
8785 ISEQ_COMPILE_DATA(iseq)->in_rescue = prev_in_rescue;
8787 ADD_LABEL(ret, lend);
8788 if (RNODE_RESCUE(node)->nd_else) {
8789 ADD_INSN(ret, line_node, pop);
8790 CHECK(COMPILE(ret,
"rescue else", RNODE_RESCUE(node)->nd_else));
8792 ADD_INSN(ret, line_node, nop);
8793 ADD_LABEL(ret, lcont);
8796 ADD_INSN(ret, line_node, pop);
8800 ADD_CATCH_ENTRY(CATCH_TYPE_RESCUE, lstart, lend, rescue, lcont);
8801 ADD_CATCH_ENTRY(CATCH_TYPE_RETRY, lend, lcont, NULL, lstart);
8808 const int line = nd_line(node);
8809 const NODE *line_node = node;
8810 const NODE *resq = node;
8812 LABEL *label_miss, *label_hit;
8815 label_miss = NEW_LABEL(line);
8816 label_hit = NEW_LABEL(line);
8818 narg = RNODE_RESBODY(resq)->nd_args;
8820 switch (nd_type(narg)) {
8823 ADD_GETLOCAL(ret, line_node, LVAR_ERRINFO, 0);
8824 CHECK(COMPILE(ret,
"rescue arg", RNODE_LIST(narg)->nd_head));
8825 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_RESCUE));
8826 ADD_INSNL(ret, line_node, branchif, label_hit);
8827 narg = RNODE_LIST(narg)->nd_next;
8833 ADD_GETLOCAL(ret, line_node, LVAR_ERRINFO, 0);
8834 CHECK(COMPILE(ret,
"rescue/cond splat", narg));
8835 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_RESCUE | VM_CHECKMATCH_ARRAY));
8836 ADD_INSNL(ret, line_node, branchif, label_hit);
8839 UNKNOWN_NODE(
"NODE_RESBODY", narg, COMPILE_NG);
8843 ADD_GETLOCAL(ret, line_node, LVAR_ERRINFO, 0);
8845 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_RESCUE));
8846 ADD_INSNL(ret, line_node, branchif, label_hit);
8848 ADD_INSNL(ret, line_node, jump, label_miss);
8849 ADD_LABEL(ret, label_hit);
8852 if (RNODE_RESBODY(resq)->nd_exc_var) {
8853 CHECK(COMPILE_POPPED(ret,
"resbody exc_var", RNODE_RESBODY(resq)->nd_exc_var));
8856 if (nd_type(RNODE_RESBODY(resq)->nd_body) == NODE_BEGIN && RNODE_BEGIN(RNODE_RESBODY(resq)->nd_body)->nd_body == NULL && !RNODE_RESBODY(resq)->nd_exc_var) {
8858 ADD_SYNTHETIC_INSN(ret, nd_line(RNODE_RESBODY(resq)->nd_body), -1, putnil);
8861 CHECK(COMPILE(ret,
"resbody body", RNODE_RESBODY(resq)->nd_body));
8864 if (ISEQ_COMPILE_DATA(iseq)->option->tailcall_optimization) {
8865 ADD_INSN(ret, line_node, nop);
8867 ADD_INSN(ret, line_node, leave);
8868 ADD_LABEL(ret, label_miss);
8869 resq = RNODE_RESBODY(resq)->nd_next;
8877 const int line = nd_line(RNODE_ENSURE(node)->nd_ensr);
8878 const NODE *line_node = node;
8880 const rb_iseq_t *ensure = NEW_CHILD_ISEQ(RNODE_ENSURE(node)->nd_ensr,
8882 ISEQ_TYPE_ENSURE, line);
8883 LABEL *lstart = NEW_LABEL(line);
8884 LABEL *lend = NEW_LABEL(line);
8885 LABEL *lcont = NEW_LABEL(line);
8893 CHECK(COMPILE_POPPED(ensr,
"ensure ensr", RNODE_ENSURE(node)->nd_ensr));
8895 last_leave = last && IS_INSN(last) && IS_INSN_ID(last, leave);
8900 push_ensure_entry(iseq, &enl, &er, RNODE_ENSURE(node)->nd_ensr);
8902 ADD_LABEL(ret, lstart);
8903 CHECK(COMPILE_(ret,
"ensure head", RNODE_ENSURE(node)->nd_head, (popped | last_leave)));
8904 ADD_LABEL(ret, lend);
8906 if (!popped && last_leave) ADD_INSN(ret, line_node, putnil);
8907 ADD_LABEL(ret, lcont);
8908 if (last_leave) ADD_INSN(ret, line_node, pop);
8910 erange = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack->erange;
8911 if (lstart->link.next != &lend->link) {
8913 ADD_CATCH_ENTRY(CATCH_TYPE_ENSURE, erange->begin, erange->end,
8915 erange = erange->next;
8919 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enl.prev;
8926 const NODE *line_node = node;
8929 enum rb_iseq_type
type = ISEQ_BODY(iseq)->type;
8931 enum rb_iseq_type t =
type;
8932 const NODE *retval = RNODE_RETURN(node)->nd_stts;
8935 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE) {
8936 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
8937 t = ISEQ_BODY(is)->type;
8941 case ISEQ_TYPE_MAIN:
8943 rb_warn(
"argument of top-level return is ignored");
8947 type = ISEQ_TYPE_METHOD;
8954 if (
type == ISEQ_TYPE_METHOD) {
8955 splabel = NEW_LABEL(0);
8956 ADD_LABEL(ret, splabel);
8957 ADD_ADJUST(ret, line_node, 0);
8960 CHECK(COMPILE(ret,
"return nd_stts (return val)", retval));
8962 if (
type == ISEQ_TYPE_METHOD && can_add_ensure_iseq(iseq)) {
8963 add_ensure_iseq(ret, iseq, 1);
8965 ADD_INSN(ret, line_node, leave);
8966 ADD_ADJUST_RESTORE(ret, splabel);
8969 ADD_INSN(ret, line_node, putnil);
8973 ADD_INSN1(ret, line_node,
throw,
INT2FIX(TAG_RETURN));
8975 ADD_INSN(ret, line_node, pop);
8986 if (!i)
return false;
8987 if (IS_TRACE(i)) i = i->prev;
8988 if (!IS_INSN(i) || !IS_INSN_ID(i, putnil))
return false;
8990 if (IS_ADJUST(i)) i = i->prev;
8991 if (!IS_INSN(i))
return false;
8992 switch (INSN_OF(i)) {
8999 (ret->last = last->prev)->next = NULL;
9006 CHECK(COMPILE_(ret,
"nd_body", node, popped));
9008 if (!popped && !all_string_result_p(node)) {
9009 const NODE *line_node = node;
9010 const unsigned int flag = VM_CALL_FCALL;
9014 ADD_INSN(ret, line_node, dup);
9015 ADD_INSN1(ret, line_node, objtostring, new_callinfo(iseq, idTo_s, 0, flag, NULL, FALSE));
9016 ADD_INSN(ret, line_node, anytostring);
9024 int idx = ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->local_table_size - get_local_var_idx(iseq,
id);
9026 debugs(
"id: %s idx: %d\n", rb_id2name(
id), idx);
9027 ADD_GETLOCAL(ret, line_node, idx, get_lvar_level(iseq));
9033 LABEL *else_label = NEW_LABEL(nd_line(line_node));
9036 br = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
"&.");
9038 ADD_INSN(recv, line_node, dup);
9039 ADD_INSNL(recv, line_node, branchnil, else_label);
9040 add_trace_branch_coverage(iseq, recv, nd_code_loc(node), nd_node_id(node), 0,
"then", br);
9048 if (!else_label)
return;
9049 end_label = NEW_LABEL(nd_line(line_node));
9050 ADD_INSNL(ret, line_node, jump, end_label);
9051 ADD_LABEL(ret, else_label);
9052 add_trace_branch_coverage(iseq, ret, nd_code_loc(node), nd_node_id(node), 1,
"else", branches);
9053 ADD_LABEL(ret, end_label);
9062 if (get_nd_recv(node) &&
9063 (nd_type_p(get_nd_recv(node), NODE_STR) || nd_type_p(get_nd_recv(node), NODE_FILE)) &&
9064 (get_node_call_nd_mid(node) == idFreeze || get_node_call_nd_mid(node) == idUMinus) &&
9065 get_nd_args(node) == NULL &&
9066 ISEQ_COMPILE_DATA(iseq)->current_block == NULL &&
9067 ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction) {
9068 VALUE str = get_string_value(get_nd_recv(node));
9069 if (get_node_call_nd_mid(node) == idUMinus) {
9070 ADD_INSN2(ret, line_node, opt_str_uminus, str,
9071 new_callinfo(iseq, idUMinus, 0, 0, NULL, FALSE));
9074 ADD_INSN2(ret, line_node, opt_str_freeze, str,
9075 new_callinfo(iseq, idFreeze, 0, 0, NULL, FALSE));
9079 ADD_INSN(ret, line_node, pop);
9087iseq_has_builtin_function_table(
const rb_iseq_t *iseq)
9089 return ISEQ_COMPILE_DATA(iseq)->builtin_function_table != NULL;
9093iseq_builtin_function_lookup(
const rb_iseq_t *iseq,
const char *name)
9096 const struct rb_builtin_function *table = ISEQ_COMPILE_DATA(iseq)->builtin_function_table;
9097 for (i=0; table[i].index != -1; i++) {
9098 if (strcmp(table[i].name, name) == 0) {
9106iseq_builtin_function_name(
const enum node_type
type,
const NODE *recv,
ID mid)
9108 const char *name = rb_id2name(mid);
9109 static const char prefix[] =
"__builtin_";
9110 const size_t prefix_len =
sizeof(prefix) - 1;
9115 switch (nd_type(recv)) {
9117 if (RNODE_VCALL(recv)->nd_mid == rb_intern(
"__builtin")) {
9122 if (RNODE_CONST(recv)->nd_vid == rb_intern(
"Primitive")) {
9132 if (UNLIKELY(strncmp(prefix, name, prefix_len) == 0)) {
9133 return &name[prefix_len];
9142delegate_call_p(
const rb_iseq_t *iseq,
unsigned int argc,
const LINK_ANCHOR *args,
unsigned int *pstart_index)
9149 else if (argc <= ISEQ_BODY(iseq)->local_table_size) {
9150 unsigned int start=0;
9155 argc + start <= ISEQ_BODY(iseq)->local_table_size;
9159 for (
unsigned int i=start; i-start<argc; i++) {
9160 if (IS_INSN(elem) &&
9161 INSN_OF(elem) == BIN(getlocal)) {
9162 int local_index =
FIX2INT(OPERAND_AT(elem, 0));
9163 int local_level =
FIX2INT(OPERAND_AT(elem, 1));
9165 if (local_level == 0) {
9166 unsigned int index = ISEQ_BODY(iseq)->local_table_size - (local_index - VM_ENV_DATA_SIZE + 1);
9168 fprintf(stderr,
"lvar:%s (%d), id:%s (%d) local_index:%d, local_size:%d\n",
9169 rb_id2name(ISEQ_BODY(iseq)->local_table[i]), i,
9170 rb_id2name(ISEQ_BODY(iseq)->local_table[index]), index,
9171 local_index, (
int)ISEQ_BODY(iseq)->local_table_size);
9195 *pstart_index = start;
9209 if (!node)
goto no_arg;
9211 if (!nd_type_p(node, NODE_LIST))
goto bad_arg;
9212 const NODE *next = RNODE_LIST(node)->nd_next;
9214 node = RNODE_LIST(node)->nd_head;
9215 if (!node)
goto no_arg;
9216 switch (nd_type(node)) {
9218 symbol = rb_node_sym_string_val(node);
9224 if (!
SYMBOL_P(symbol))
goto non_symbol_arg;
9227 if (strcmp(RSTRING_PTR(
string),
"leaf") == 0) {
9228 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_LEAF;
9230 else if (strcmp(RSTRING_PTR(
string),
"inline_block") == 0) {
9231 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_INLINE_BLOCK;
9233 else if (strcmp(RSTRING_PTR(
string),
"use_block") == 0) {
9234 iseq_set_use_block(iseq);
9236 else if (strcmp(RSTRING_PTR(
string),
"c_trace") == 0) {
9238 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_C_TRACE;
9240 else if (strcmp(RSTRING_PTR(
string),
"without_interrupts") == 0) {
9241 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_WITHOUT_INTERRUPTS;
9250 COMPILE_ERROR(ERROR_ARGS
"attr!: no argument");
9253 COMPILE_ERROR(ERROR_ARGS
"non symbol argument to attr!: %s", rb_builtin_class_name(symbol));
9256 COMPILE_ERROR(ERROR_ARGS
"unknown argument to attr!: %s", RSTRING_PTR(
string));
9259 UNKNOWN_NODE(
"attr!", node, COMPILE_NG);
9267 if (!node)
goto no_arg;
9268 if (!nd_type_p(node, NODE_LIST))
goto bad_arg;
9269 if (RNODE_LIST(node)->nd_next)
goto too_many_arg;
9270 node = RNODE_LIST(node)->nd_head;
9271 if (!node)
goto no_arg;
9272 switch (nd_type(node)) {
9274 name = rb_node_sym_string_val(node);
9279 if (!
SYMBOL_P(name))
goto non_symbol_arg;
9281 compile_lvar(iseq, ret, line_node,
SYM2ID(name));
9285 COMPILE_ERROR(ERROR_ARGS
"arg!: no argument");
9288 COMPILE_ERROR(ERROR_ARGS
"arg!: too many argument");
9291 COMPILE_ERROR(ERROR_ARGS
"non symbol argument to arg!: %s",
9292 rb_builtin_class_name(name));
9295 UNKNOWN_NODE(
"arg!", node, COMPILE_NG);
9301 const NODE *node = ISEQ_COMPILE_DATA(iseq)->root_node;
9302 if (nd_type(node) == NODE_IF && RNODE_IF(node)->nd_cond == cond_node) {
9303 return RNODE_IF(node)->nd_body;
9306 rb_bug(
"mandatory_node: can't find mandatory node");
9311compile_builtin_mandatory_only_method(
rb_iseq_t *iseq,
const NODE *node,
const NODE *line_node)
9315 .pre_args_num = ISEQ_BODY(iseq)->param.lead_num,
9318 rb_node_init(RNODE(&args_node), NODE_ARGS);
9319 args_node.nd_ainfo = args;
9322 const int skip_local_size = ISEQ_BODY(iseq)->param.size - ISEQ_BODY(iseq)->param.lead_num;
9323 const int table_size = ISEQ_BODY(iseq)->local_table_size - skip_local_size;
9327 tbl->size = table_size;
9332 for (i=0; i<ISEQ_BODY(iseq)->param.lead_num; i++) {
9333 tbl->ids[i] = ISEQ_BODY(iseq)->local_table[i];
9336 for (; i<table_size; i++) {
9337 tbl->ids[i] = ISEQ_BODY(iseq)->local_table[i + skip_local_size];
9341 rb_node_init(RNODE(&scope_node), NODE_SCOPE);
9342 scope_node.nd_tbl = tbl;
9343 scope_node.nd_body = mandatory_node(iseq, node);
9344 scope_node.nd_parent = NULL;
9345 scope_node.nd_args = &args_node;
9347 VALUE ast_value = rb_ruby_ast_new(RNODE(&scope_node));
9350 rb_iseq_new_with_opt(ast_value, rb_iseq_base_label(iseq),
9351 rb_iseq_path(iseq), rb_iseq_realpath(iseq),
9352 nd_line(line_node), NULL, 0,
9353 ISEQ_TYPE_METHOD, ISEQ_COMPILE_DATA(iseq)->option,
9354 ISEQ_BODY(iseq)->variable.script_lines);
9355 RB_OBJ_WRITE(iseq, &ISEQ_BODY(iseq)->mandatory_only_iseq, (
VALUE)mandatory_only_iseq);
9365 NODE *args_node = get_nd_args(node);
9367 if (parent_block != NULL) {
9368 COMPILE_ERROR(ERROR_ARGS_AT(line_node)
"should not call builtins here.");
9372# define BUILTIN_INLINE_PREFIX "_bi"
9373 char inline_func[
sizeof(BUILTIN_INLINE_PREFIX) +
DECIMAL_SIZE_OF(
int)];
9374 bool cconst =
false;
9379 if (strcmp(
"cstmt!", builtin_func) == 0 ||
9380 strcmp(
"cexpr!", builtin_func) == 0) {
9383 else if (strcmp(
"cconst!", builtin_func) == 0) {
9386 else if (strcmp(
"cinit!", builtin_func) == 0) {
9390 else if (strcmp(
"attr!", builtin_func) == 0) {
9391 return compile_builtin_attr(iseq, args_node);
9393 else if (strcmp(
"arg!", builtin_func) == 0) {
9394 return compile_builtin_arg(iseq, ret, args_node, line_node, popped);
9396 else if (strcmp(
"mandatory_only?", builtin_func) == 0) {
9398 rb_bug(
"mandatory_only? should be in if condition");
9400 else if (!LIST_INSN_SIZE_ZERO(ret)) {
9401 rb_bug(
"mandatory_only? should be put on top");
9404 ADD_INSN1(ret, line_node, putobject,
Qfalse);
9405 return compile_builtin_mandatory_only_method(iseq, node, line_node);
9408 rb_bug(
"can't find builtin function:%s", builtin_func);
9411 COMPILE_ERROR(ERROR_ARGS
"can't find builtin function:%s", builtin_func);
9415 int inline_index = nd_line(node);
9416 snprintf(inline_func,
sizeof(inline_func), BUILTIN_INLINE_PREFIX
"%d", inline_index);
9417 builtin_func = inline_func;
9423 typedef VALUE(*builtin_func0)(
void *,
VALUE);
9424 VALUE const_val = (*(builtin_func0)(uintptr_t)bf->func_ptr)(NULL,
Qnil);
9425 ADD_INSN1(ret, line_node, putobject, const_val);
9431 unsigned int flag = 0;
9433 VALUE argc = setup_args(iseq, args, args_node, &flag, &keywords);
9435 if (
FIX2INT(argc) != bf->argc) {
9436 COMPILE_ERROR(ERROR_ARGS
"argc is not match for builtin function:%s (expect %d but %d)",
9437 builtin_func, bf->argc,
FIX2INT(argc));
9441 unsigned int start_index;
9442 if (delegate_call_p(iseq,
FIX2INT(argc), args, &start_index)) {
9443 ADD_INSN2(ret, line_node, opt_invokebuiltin_delegate, bf,
INT2FIX(start_index));
9447 ADD_INSN1(ret, line_node, invokebuiltin, bf);
9450 if (popped) ADD_INSN(ret, line_node, pop);
9456compile_call(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
const enum node_type
type,
const NODE *
const line_node,
int popped,
bool assume_receiver)
9464 ID mid = get_node_call_nd_mid(node);
9466 unsigned int flag = 0;
9468 const rb_iseq_t *parent_block = ISEQ_COMPILE_DATA(iseq)->current_block;
9469 LABEL *else_label = NULL;
9472 ISEQ_COMPILE_DATA(iseq)->current_block = NULL;
9478 if (nd_type_p(node, NODE_VCALL)) {
9483 CONST_ID(id_answer,
"the_answer_to_life_the_universe_and_everything");
9485 if (mid == id_bitblt) {
9486 ADD_INSN(ret, line_node, bitblt);
9489 else if (mid == id_answer) {
9490 ADD_INSN(ret, line_node, answer);
9502 if (nd_type_p(node, NODE_FCALL) &&
9503 (mid == goto_id || mid == label_id)) {
9506 st_table *labels_table = ISEQ_COMPILE_DATA(iseq)->labels_table;
9509 if (!labels_table) {
9510 labels_table = st_init_numtable();
9511 ISEQ_COMPILE_DATA(iseq)->labels_table = labels_table;
9514 COMPILE_ERROR(ERROR_ARGS
"invalid goto/label format");
9518 if (mid == goto_id) {
9519 ADD_INSNL(ret, line_node, jump, label);
9522 ADD_LABEL(ret, label);
9529 const char *builtin_func;
9530 if (UNLIKELY(iseq_has_builtin_function_table(iseq)) &&
9531 (builtin_func = iseq_builtin_function_name(
type, get_nd_recv(node), mid)) != NULL) {
9532 return compile_builtin_function_call(iseq, ret, node, line_node, popped, parent_block, args, builtin_func);
9536 if (!assume_receiver) {
9537 if (
type == NODE_CALL ||
type == NODE_OPCALL ||
type == NODE_QCALL) {
9540 if (mid == idCall &&
9541 nd_type_p(get_nd_recv(node), NODE_LVAR) &&
9542 iseq_block_param_id_p(iseq, RNODE_LVAR(get_nd_recv(node))->nd_vid, &idx, &level)) {
9543 ADD_INSN2(recv, get_nd_recv(node), getblockparamproxy,
INT2FIX(idx + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
9545 else if (private_recv_p(node)) {
9546 ADD_INSN(recv, node, putself);
9547 flag |= VM_CALL_FCALL;
9550 CHECK(COMPILE(recv,
"recv", get_nd_recv(node)));
9553 if (
type == NODE_QCALL) {
9554 else_label = qcall_branch_start(iseq, recv, &branches, node, line_node);
9557 else if (
type == NODE_FCALL ||
type == NODE_VCALL) {
9558 ADD_CALL_RECEIVER(recv, line_node);
9563 if (
type != NODE_VCALL) {
9564 argc = setup_args(iseq, args, get_nd_args(node), &flag, &keywords);
9565 CHECK(!
NIL_P(argc));
9573 bool inline_new = ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction &&
9574 mid == rb_intern(
"new") &&
9575 parent_block == NULL &&
9576 !(flag & VM_CALL_ARGS_BLOCKARG);
9579 ADD_INSN(ret, node, putnil);
9580 ADD_INSN(ret, node, swap);
9585 debugp_param(
"call args argc", argc);
9586 debugp_param(
"call method",
ID2SYM(mid));
9588 switch ((
int)
type) {
9590 flag |= VM_CALL_VCALL;
9593 flag |= VM_CALL_FCALL;
9596 if ((flag & VM_CALL_ARGS_BLOCKARG) && (flag & VM_CALL_KW_SPLAT) && !(flag & VM_CALL_KW_SPLAT_MUT)) {
9597 ADD_INSN(ret, line_node, splatkw);
9600 LABEL *not_basic_new = NEW_LABEL(nd_line(node));
9601 LABEL *not_basic_new_finish = NEW_LABEL(nd_line(node));
9606 if (flag & VM_CALL_FORWARDING) {
9607 ci = (
VALUE)new_callinfo(iseq, mid,
NUM2INT(argc) + 1, flag, keywords, 0);
9610 ci = (
VALUE)new_callinfo(iseq, mid,
NUM2INT(argc), flag, keywords, 0);
9612 ADD_INSN2(ret, node, opt_new, ci, not_basic_new);
9613 LABEL_REF(not_basic_new);
9616 ADD_SEND_R(ret, line_node, rb_intern(
"initialize"), argc, parent_block,
INT2FIX(flag | VM_CALL_FCALL), keywords);
9617 ADD_INSNL(ret, line_node, jump, not_basic_new_finish);
9619 ADD_LABEL(ret, not_basic_new);
9621 ADD_SEND_R(ret, line_node, mid, argc, parent_block,
INT2FIX(flag), keywords);
9622 ADD_INSN(ret, line_node, swap);
9624 ADD_LABEL(ret, not_basic_new_finish);
9625 ADD_INSN(ret, line_node, pop);
9628 ADD_SEND_R(ret, line_node, mid, argc, parent_block,
INT2FIX(flag), keywords);
9631 qcall_branch_end(iseq, ret, else_label, branches, node, line_node);
9633 ADD_INSN(ret, line_node, pop);
9641 const int line = nd_line(node);
9643 unsigned int flag = 0;
9645 ID id = RNODE_OP_ASGN1(node)->nd_mid;
9671 ADD_INSN(ret, node, putnil);
9673 asgnflag = COMPILE_RECV(ret,
"NODE_OP_ASGN1 recv", node, RNODE_OP_ASGN1(node)->nd_recv);
9674 CHECK(asgnflag != -1);
9675 switch (nd_type(RNODE_OP_ASGN1(node)->nd_index)) {
9680 argc = setup_args(iseq, ret, RNODE_OP_ASGN1(node)->nd_index, &flag, NULL);
9681 CHECK(!
NIL_P(argc));
9683 int dup_argn =
FIX2INT(argc) + 1;
9684 ADD_INSN1(ret, node, dupn,
INT2FIX(dup_argn));
9686 ADD_SEND_R(ret, node, idAREF, argc, NULL,
INT2FIX(flag & ~VM_CALL_ARGS_SPLAT_MUT), NULL);
9688 if (
id == idOROP ||
id == idANDOP) {
9697 LABEL *label = NEW_LABEL(line);
9698 LABEL *lfin = NEW_LABEL(line);
9700 ADD_INSN(ret, node, dup);
9702 ADD_INSNL(ret, node, branchif, label);
9705 ADD_INSNL(ret, node, branchunless, label);
9707 ADD_INSN(ret, node, pop);
9709 CHECK(COMPILE(ret,
"NODE_OP_ASGN1 nd_rvalue: ", RNODE_OP_ASGN1(node)->nd_rvalue));
9711 ADD_INSN1(ret, node, setn,
INT2FIX(dup_argn+1));
9713 if (flag & VM_CALL_ARGS_SPLAT) {
9714 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
9715 ADD_INSN(ret, node, swap);
9716 ADD_INSN1(ret, node, splatarray,
Qtrue);
9717 ADD_INSN(ret, node, swap);
9718 flag |= VM_CALL_ARGS_SPLAT_MUT;
9720 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
9721 ADD_SEND_R(ret, node, idASET, argc, NULL,
INT2FIX(flag), NULL);
9724 ADD_SEND_R(ret, node, idASET, FIXNUM_INC(argc, 1), NULL,
INT2FIX(flag), NULL);
9726 ADD_INSN(ret, node, pop);
9727 ADD_INSNL(ret, node, jump, lfin);
9728 ADD_LABEL(ret, label);
9730 ADD_INSN1(ret, node, setn,
INT2FIX(dup_argn+1));
9732 ADD_INSN1(ret, node, adjuststack,
INT2FIX(dup_argn+1));
9733 ADD_LABEL(ret, lfin);
9736 CHECK(COMPILE(ret,
"NODE_OP_ASGN1 nd_rvalue: ", RNODE_OP_ASGN1(node)->nd_rvalue));
9737 ADD_SEND(ret, node,
id,
INT2FIX(1));
9739 ADD_INSN1(ret, node, setn,
INT2FIX(dup_argn+1));
9741 if (flag & VM_CALL_ARGS_SPLAT) {
9742 if (flag & VM_CALL_KW_SPLAT) {
9743 ADD_INSN1(ret, node, topn,
INT2FIX(2));
9744 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
9745 ADD_INSN1(ret, node, splatarray,
Qtrue);
9746 flag |= VM_CALL_ARGS_SPLAT_MUT;
9748 ADD_INSN(ret, node, swap);
9749 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
9750 ADD_INSN1(ret, node, setn,
INT2FIX(2));
9751 ADD_INSN(ret, node, pop);
9754 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
9755 ADD_INSN(ret, node, swap);
9756 ADD_INSN1(ret, node, splatarray,
Qtrue);
9757 ADD_INSN(ret, node, swap);
9758 flag |= VM_CALL_ARGS_SPLAT_MUT;
9760 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
9762 ADD_SEND_R(ret, node, idASET, argc, NULL,
INT2FIX(flag), NULL);
9765 ADD_SEND_R(ret, node, idASET, FIXNUM_INC(argc, 1), NULL,
INT2FIX(flag), NULL);
9767 ADD_INSN(ret, node, pop);
9775 const int line = nd_line(node);
9776 ID atype = RNODE_OP_ASGN2(node)->nd_mid;
9777 ID vid = RNODE_OP_ASGN2(node)->nd_vid, aid = rb_id_attrset(vid);
9779 LABEL *lfin = NEW_LABEL(line);
9780 LABEL *lcfin = NEW_LABEL(line);
9835 asgnflag = COMPILE_RECV(ret,
"NODE_OP_ASGN2#recv", node, RNODE_OP_ASGN2(node)->nd_recv);
9836 CHECK(asgnflag != -1);
9837 if (RNODE_OP_ASGN2(node)->nd_aid) {
9838 lskip = NEW_LABEL(line);
9839 ADD_INSN(ret, node, dup);
9840 ADD_INSNL(ret, node, branchnil, lskip);
9842 ADD_INSN(ret, node, dup);
9843 ADD_SEND_WITH_FLAG(ret, node, vid,
INT2FIX(0),
INT2FIX(asgnflag));
9845 if (atype == idOROP || atype == idANDOP) {
9847 ADD_INSN(ret, node, dup);
9849 if (atype == idOROP) {
9850 ADD_INSNL(ret, node, branchif, lcfin);
9853 ADD_INSNL(ret, node, branchunless, lcfin);
9856 ADD_INSN(ret, node, pop);
9858 CHECK(COMPILE(ret,
"NODE_OP_ASGN2 val", RNODE_OP_ASGN2(node)->nd_value));
9860 ADD_INSN(ret, node, swap);
9861 ADD_INSN1(ret, node, topn,
INT2FIX(1));
9863 ADD_SEND_WITH_FLAG(ret, node, aid,
INT2FIX(1),
INT2FIX(asgnflag));
9864 ADD_INSNL(ret, node, jump, lfin);
9866 ADD_LABEL(ret, lcfin);
9868 ADD_INSN(ret, node, swap);
9871 ADD_LABEL(ret, lfin);
9874 CHECK(COMPILE(ret,
"NODE_OP_ASGN2 val", RNODE_OP_ASGN2(node)->nd_value));
9875 ADD_SEND(ret, node, atype,
INT2FIX(1));
9877 ADD_INSN(ret, node, swap);
9878 ADD_INSN1(ret, node, topn,
INT2FIX(1));
9880 ADD_SEND_WITH_FLAG(ret, node, aid,
INT2FIX(1),
INT2FIX(asgnflag));
9882 if (lskip && popped) {
9883 ADD_LABEL(ret, lskip);
9885 ADD_INSN(ret, node, pop);
9886 if (lskip && !popped) {
9887 ADD_LABEL(ret, lskip);
9892static int compile_shareable_constant_value(
rb_iseq_t *iseq,
LINK_ANCHOR *ret,
enum rb_parser_shareability shareable,
const NODE *lhs,
const NODE *value);
9897 const int line = nd_line(node);
9902 switch (nd_type(RNODE_OP_CDECL(node)->nd_head)) {
9907 CHECK(COMPILE(ret,
"NODE_OP_CDECL/colon2#nd_head", RNODE_COLON2(RNODE_OP_CDECL(node)->nd_head)->nd_head));
9910 COMPILE_ERROR(ERROR_ARGS
"%s: invalid node in NODE_OP_CDECL",
9911 ruby_node_name(nd_type(RNODE_OP_CDECL(node)->nd_head)));
9914 mid = get_node_colon_nd_mid(RNODE_OP_CDECL(node)->nd_head);
9916 if (RNODE_OP_CDECL(node)->nd_aid == idOROP) {
9917 lassign = NEW_LABEL(line);
9918 ADD_INSN(ret, node, dup);
9919 ADD_INSN3(ret, node, defined,
INT2FIX(DEFINED_CONST_FROM),
9921 ADD_INSNL(ret, node, branchunless, lassign);
9923 ADD_INSN(ret, node, dup);
9924 ADD_INSN1(ret, node, putobject,
Qtrue);
9925 ADD_INSN1(ret, node, getconstant,
ID2SYM(mid));
9927 if (RNODE_OP_CDECL(node)->nd_aid == idOROP || RNODE_OP_CDECL(node)->nd_aid == idANDOP) {
9928 lfin = NEW_LABEL(line);
9929 if (!popped) ADD_INSN(ret, node, dup);
9930 if (RNODE_OP_CDECL(node)->nd_aid == idOROP)
9931 ADD_INSNL(ret, node, branchif, lfin);
9933 ADD_INSNL(ret, node, branchunless, lfin);
9935 if (!popped) ADD_INSN(ret, node, pop);
9936 if (lassign) ADD_LABEL(ret, lassign);
9937 CHECK(compile_shareable_constant_value(iseq, ret, RNODE_OP_CDECL(node)->shareability, RNODE_OP_CDECL(node)->nd_head, RNODE_OP_CDECL(node)->nd_value));
9940 ADD_INSN1(ret, node, topn,
INT2FIX(1));
9942 ADD_INSN1(ret, node, dupn,
INT2FIX(2));
9943 ADD_INSN(ret, node, swap);
9945 ADD_INSN1(ret, node, setconstant,
ID2SYM(mid));
9946 ADD_LABEL(ret, lfin);
9947 if (!popped) ADD_INSN(ret, node, swap);
9948 ADD_INSN(ret, node, pop);
9951 CHECK(compile_shareable_constant_value(iseq, ret, RNODE_OP_CDECL(node)->shareability, RNODE_OP_CDECL(node)->nd_head, RNODE_OP_CDECL(node)->nd_value));
9953 ADD_CALL(ret, node, RNODE_OP_CDECL(node)->nd_aid,
INT2FIX(1));
9955 ADD_INSN(ret, node, swap);
9957 ADD_INSN1(ret, node, topn,
INT2FIX(1));
9958 ADD_INSN(ret, node, swap);
9960 ADD_INSN1(ret, node, setconstant,
ID2SYM(mid));
9968 const int line = nd_line(node);
9969 LABEL *lfin = NEW_LABEL(line);
9972 if (
type == NODE_OP_ASGN_OR && !nd_type_p(RNODE_OP_ASGN_OR(node)->nd_head, NODE_IVAR)) {
9976 defined_expr(iseq, ret, RNODE_OP_ASGN_OR(node)->nd_head, lfinish,
Qfalse,
false);
9977 lassign = lfinish[1];
9979 lassign = NEW_LABEL(line);
9981 ADD_INSNL(ret, node, branchunless, lassign);
9984 lassign = NEW_LABEL(line);
9987 CHECK(COMPILE(ret,
"NODE_OP_ASGN_AND/OR#nd_head", RNODE_OP_ASGN_OR(node)->nd_head));
9990 ADD_INSN(ret, node, dup);
9993 if (
type == NODE_OP_ASGN_AND) {
9994 ADD_INSNL(ret, node, branchunless, lfin);
9997 ADD_INSNL(ret, node, branchif, lfin);
10001 ADD_INSN(ret, node, pop);
10004 ADD_LABEL(ret, lassign);
10005 CHECK(COMPILE_(ret,
"NODE_OP_ASGN_AND/OR#nd_value", RNODE_OP_ASGN_OR(node)->nd_value, popped));
10006 ADD_LABEL(ret, lfin);
10016 unsigned int flag = 0;
10018 const rb_iseq_t *parent_block = ISEQ_COMPILE_DATA(iseq)->current_block;
10022 ISEQ_COMPILE_DATA(iseq)->current_block = NULL;
10024 if (
type == NODE_SUPER) {
10025 VALUE vargc = setup_args(iseq, args, RNODE_SUPER(node)->nd_args, &flag, &keywords);
10026 CHECK(!
NIL_P(vargc));
10028 if ((flag & VM_CALL_ARGS_BLOCKARG) && (flag & VM_CALL_KW_SPLAT) && !(flag & VM_CALL_KW_SPLAT_MUT)) {
10029 ADD_INSN(args, node, splatkw);
10032 if (flag & VM_CALL_ARGS_BLOCKARG) {
10039 const rb_iseq_t *liseq = body->local_iseq;
10041 const struct rb_iseq_param_keyword *
const local_kwd = local_body->param.keyword;
10042 int lvar_level = get_lvar_level(iseq);
10044 argc = local_body->param.lead_num;
10047 for (i = 0; i < local_body->param.lead_num; i++) {
10048 int idx = local_body->local_table_size - i;
10049 ADD_GETLOCAL(args, node, idx, lvar_level);
10053 if (local_body->param.flags.forwardable) {
10054 flag |= VM_CALL_FORWARDING;
10055 int idx = local_body->local_table_size - get_local_var_idx(liseq, idDot3);
10056 ADD_GETLOCAL(args, node, idx, lvar_level);
10059 if (local_body->param.flags.has_opt) {
10062 for (j = 0; j < local_body->param.opt_num; j++) {
10063 int idx = local_body->local_table_size - (i + j);
10064 ADD_GETLOCAL(args, node, idx, lvar_level);
10069 if (local_body->param.flags.has_rest) {
10071 int idx = local_body->local_table_size - local_body->param.rest_start;
10072 ADD_GETLOCAL(args, node, idx, lvar_level);
10073 ADD_INSN1(args, node, splatarray, RBOOL(local_body->param.flags.has_post));
10075 argc = local_body->param.rest_start + 1;
10076 flag |= VM_CALL_ARGS_SPLAT;
10078 if (local_body->param.flags.has_post) {
10080 int post_len = local_body->param.post_num;
10081 int post_start = local_body->param.post_start;
10083 if (local_body->param.flags.has_rest) {
10085 for (j=0; j<post_len; j++) {
10086 int idx = local_body->local_table_size - (post_start + j);
10087 ADD_GETLOCAL(args, node, idx, lvar_level);
10089 ADD_INSN1(args, node, pushtoarray,
INT2FIX(j));
10090 flag |= VM_CALL_ARGS_SPLAT_MUT;
10095 for (j=0; j<post_len; j++) {
10096 int idx = local_body->local_table_size - (post_start + j);
10097 ADD_GETLOCAL(args, node, idx, lvar_level);
10099 argc = post_len + post_start;
10103 if (local_body->param.flags.has_kw) {
10104 int local_size = local_body->local_table_size;
10107 ADD_INSN1(args, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
10109 if (local_body->param.flags.has_kwrest) {
10110 int idx = local_body->local_table_size - local_kwd->rest_start;
10111 ADD_GETLOCAL(args, node, idx, lvar_level);
10113 ADD_SEND (args, node, rb_intern(
"dup"),
INT2FIX(0));
10116 ADD_INSN1(args, node, newhash,
INT2FIX(0));
10118 for (i = 0; i < local_kwd->num; ++i) {
10119 ID id = local_kwd->table[i];
10120 int idx = local_size - get_local_var_idx(liseq,
id);
10121 ADD_INSN1(args, node, putobject,
ID2SYM(
id));
10122 ADD_GETLOCAL(args, node, idx, lvar_level);
10124 ADD_SEND(args, node, id_core_hash_merge_ptr,
INT2FIX(i * 2 + 1));
10125 flag |= VM_CALL_KW_SPLAT| VM_CALL_KW_SPLAT_MUT;
10127 else if (local_body->param.flags.has_kwrest) {
10128 int idx = local_body->local_table_size - local_kwd->rest_start;
10129 ADD_GETLOCAL(args, node, idx, lvar_level);
10131 flag |= VM_CALL_KW_SPLAT;
10135 if (use_block && parent_block == NULL) {
10136 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
10139 flag |= VM_CALL_SUPER | VM_CALL_FCALL;
10140 if (
type == NODE_ZSUPER) flag |= VM_CALL_ZSUPER;
10141 ADD_INSN(ret, node, putself);
10142 ADD_SEQ(ret, args);
10144 const struct rb_callinfo * ci = new_callinfo(iseq, 0, argc, flag, keywords, parent_block != NULL);
10146 if (vm_ci_flag(ci) & VM_CALL_FORWARDING) {
10147 ADD_INSN2(ret, node, invokesuperforward, ci, parent_block);
10150 ADD_INSN2(ret, node, invokesuper, ci, parent_block);
10154 ADD_INSN(ret, node, pop);
10164 unsigned int flag = 0;
10169 switch (ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->
type) {
10170 case ISEQ_TYPE_TOP:
10171 case ISEQ_TYPE_MAIN:
10172 case ISEQ_TYPE_CLASS:
10173 COMPILE_ERROR(ERROR_ARGS
"Invalid yield");
10178 if (RNODE_YIELD(node)->nd_head) {
10179 argc = setup_args(iseq, args, RNODE_YIELD(node)->nd_head, &flag, &keywords);
10180 CHECK(!
NIL_P(argc));
10186 ADD_SEQ(ret, args);
10187 ADD_INSN1(ret, node, invokeblock, new_callinfo(iseq, 0,
FIX2INT(argc), flag, keywords, FALSE));
10188 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
10191 ADD_INSN(ret, node, pop);
10196 for (; tmp_iseq != ISEQ_BODY(iseq)->local_iseq; level++ ) {
10197 tmp_iseq = ISEQ_BODY(tmp_iseq)->parent_iseq;
10199 if (level > 0) access_outer_variables(iseq, level, rb_intern(
"yield"),
true);
10212 switch ((
int)
type) {
10215 VALUE re = rb_node_regx_string_val(node);
10216 RB_OBJ_SET_FROZEN_SHAREABLE(re);
10217 ADD_INSN1(recv, node, putobject, re);
10218 ADD_INSN2(val, node, getspecial,
INT2FIX(0),
10223 CHECK(COMPILE(recv,
"receiver", RNODE_MATCH2(node)->nd_recv));
10224 CHECK(COMPILE(val,
"value", RNODE_MATCH2(node)->nd_value));
10227 CHECK(COMPILE(recv,
"receiver", RNODE_MATCH3(node)->nd_value));
10228 CHECK(COMPILE(val,
"value", RNODE_MATCH3(node)->nd_recv));
10232 ADD_SEQ(ret, recv);
10234 ADD_SEND(ret, node, idEqTilde,
INT2FIX(1));
10236 if (nd_type_p(node, NODE_MATCH2) && RNODE_MATCH2(node)->nd_args) {
10237 compile_named_capture_assign(iseq, ret, RNODE_MATCH2(node)->nd_args);
10241 ADD_INSN(ret, node, pop);
10252 if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache &&
10253 (segments = collect_const_segments(iseq, node))) {
10254 ISEQ_BODY(iseq)->ic_size++;
10255 ADD_INSN1(ret, node, opt_getconstant_path, segments);
10265 CHECK(compile_const_prefix(iseq, node, pref, body));
10266 if (LIST_INSN_SIZE_ZERO(pref)) {
10267 ADD_INSN(ret, node, putnil);
10268 ADD_SEQ(ret, body);
10271 ADD_SEQ(ret, pref);
10272 ADD_SEQ(ret, body);
10278 ADD_CALL_RECEIVER(ret, node);
10279 CHECK(COMPILE(ret,
"colon2#nd_head", RNODE_COLON2(node)->nd_head));
10280 ADD_CALL(ret, node, RNODE_COLON2(node)->nd_mid,
INT2FIX(1));
10283 ADD_INSN(ret, node, pop);
10291 debugi(
"colon3#nd_mid", RNODE_COLON3(node)->nd_mid);
10294 if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache) {
10295 ISEQ_BODY(iseq)->ic_size++;
10296 VALUE segments = rb_ary_new_from_args(2,
ID2SYM(idNULL),
ID2SYM(RNODE_COLON3(node)->nd_mid));
10297 RB_OBJ_SET_FROZEN_SHAREABLE(segments);
10298 ADD_INSN1(ret, node, opt_getconstant_path, segments);
10302 ADD_INSN1(ret, node, putobject,
rb_cObject);
10303 ADD_INSN1(ret, node, putobject,
Qtrue);
10304 ADD_INSN1(ret, node, getconstant,
ID2SYM(RNODE_COLON3(node)->nd_mid));
10308 ADD_INSN(ret, node, pop);
10317 const NODE *b = RNODE_DOT2(node)->nd_beg;
10318 const NODE *e = RNODE_DOT2(node)->nd_end;
10320 if (optimizable_range_item_p(b) && optimizable_range_item_p(e)) {
10322 VALUE bv = optimized_range_item(b);
10323 VALUE ev = optimized_range_item(e);
10326 ADD_INSN1(ret, node, putobject, val);
10331 CHECK(COMPILE_(ret,
"min", b, popped));
10332 CHECK(COMPILE_(ret,
"max", e, popped));
10334 ADD_INSN1(ret, node, newrange, flag);
10344 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_RESCUE) {
10345 ADD_GETLOCAL(ret, node, LVAR_ERRINFO, 0);
10351 if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_RESCUE) {
10354 ip = ISEQ_BODY(ip)->parent_iseq;
10358 ADD_GETLOCAL(ret, node, LVAR_ERRINFO, level);
10361 ADD_INSN(ret, node, putnil);
10372 LABEL *end_label = NEW_LABEL(nd_line(node));
10373 const NODE *default_value = get_nd_value(RNODE_KW_ARG(node)->nd_body);
10375 if (default_value == NODE_SPECIAL_REQUIRED_KEYWORD) {
10377 COMPILE_ERROR(ERROR_ARGS
"unreachable");
10380 else if (nd_type_p(default_value, NODE_SYM) ||
10381 nd_type_p(default_value, NODE_REGX) ||
10382 nd_type_p(default_value, NODE_LINE) ||
10383 nd_type_p(default_value, NODE_INTEGER) ||
10384 nd_type_p(default_value, NODE_FLOAT) ||
10385 nd_type_p(default_value, NODE_RATIONAL) ||
10386 nd_type_p(default_value, NODE_IMAGINARY) ||
10387 nd_type_p(default_value, NODE_NIL) ||
10388 nd_type_p(default_value, NODE_TRUE) ||
10389 nd_type_p(default_value, NODE_FALSE)) {
10390 COMPILE_ERROR(ERROR_ARGS
"unreachable");
10398 int kw_bits_idx = body->local_table_size - body->param.keyword->bits_start;
10399 int keyword_idx = body->param.keyword->num;
10401 ADD_INSN2(ret, node, checkkeyword,
INT2FIX(kw_bits_idx + VM_ENV_DATA_SIZE - 1),
INT2FIX(keyword_idx));
10402 ADD_INSNL(ret, node, branchif, end_label);
10403 CHECK(COMPILE_POPPED(ret,
"keyword default argument", RNODE_KW_ARG(node)->nd_body));
10404 ADD_LABEL(ret, end_label);
10414 unsigned int flag = 0;
10415 ID mid = RNODE_ATTRASGN(node)->nd_mid;
10417 LABEL *else_label = NULL;
10422 argc = setup_args(iseq, args, RNODE_ATTRASGN(node)->nd_args, &flag, NULL);
10423 CHECK(!
NIL_P(argc));
10425 int asgnflag = COMPILE_RECV(recv,
"recv", node, RNODE_ATTRASGN(node)->nd_recv);
10426 CHECK(asgnflag != -1);
10427 flag |= (
unsigned int)asgnflag;
10429 debugp_param(
"argc", argc);
10430 debugp_param(
"nd_mid",
ID2SYM(mid));
10434 mid = rb_id_attrset(mid);
10435 else_label = qcall_branch_start(iseq, recv, &branches, node, node);
10438 ADD_INSN(ret, node, putnil);
10439 ADD_SEQ(ret, recv);
10440 ADD_SEQ(ret, args);
10442 if (flag & VM_CALL_ARGS_SPLAT) {
10443 ADD_INSN(ret, node, dup);
10444 ADD_INSN1(ret, node, putobject,
INT2FIX(-1));
10445 ADD_SEND_WITH_FLAG(ret, node, idAREF,
INT2FIX(1),
INT2FIX(asgnflag));
10446 ADD_INSN1(ret, node, setn, FIXNUM_INC(argc, 2));
10447 ADD_INSN (ret, node, pop);
10450 ADD_INSN1(ret, node, setn, FIXNUM_INC(argc, 1));
10454 ADD_SEQ(ret, recv);
10455 ADD_SEQ(ret, args);
10457 ADD_SEND_WITH_FLAG(ret, node, mid, argc,
INT2FIX(flag));
10458 qcall_branch_end(iseq, ret, else_label, branches, node, node);
10459 ADD_INSN(ret, node, pop);
10466 ADD_INSN1(ret, value, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
10474 ADD_SEND_WITH_FLAG(ret, value, rb_intern(
"make_shareable_copy"),
INT2FIX(1),
INT2FIX(VM_CALL_ARGS_SIMPLE));
10481 ADD_SEND_WITH_FLAG(ret, value, rb_intern(
"make_shareable"),
INT2FIX(1),
INT2FIX(VM_CALL_ARGS_SIMPLE));
10488node_const_decl_val(
const NODE *node)
10491 switch (nd_type(node)) {
10493 if (RNODE_CDECL(node)->nd_vid) {
10494 path = rb_id2str(RNODE_CDECL(node)->nd_vid);
10498 node = RNODE_CDECL(node)->nd_else;
10506 rb_str_append(path, rb_id2str(RNODE_COLON3(node)->nd_mid));
10509 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
10515 for (; node && nd_type_p(node, NODE_COLON2); node = RNODE_COLON2(node)->nd_head) {
10516 rb_ary_push(path, rb_id2str(RNODE_COLON2(node)->nd_mid));
10518 if (node && nd_type_p(node, NODE_CONST)) {
10520 rb_ary_push(path, rb_id2str(RNODE_CONST(node)->nd_vid));
10522 else if (node && nd_type_p(node, NODE_COLON3)) {
10524 rb_ary_push(path, rb_id2str(RNODE_COLON3(node)->nd_mid));
10534 path = rb_fstring(path);
10539const_decl_path(
NODE *dest)
10542 if (!nd_type_p(dest, NODE_CALL)) {
10543 path = node_const_decl_val(dest);
10554 VALUE path = const_decl_path(dest);
10555 ADD_INSN1(ret, value, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
10556 CHECK(COMPILE(ret,
"compile_ensure_shareable_node", value));
10557 ADD_INSN1(ret, value, putobject, path);
10559 ADD_SEND_WITH_FLAG(ret, value, rb_intern(
"ensure_shareable"),
INT2FIX(2),
INT2FIX(VM_CALL_ARGS_SIMPLE));
10564#ifndef SHAREABLE_BARE_EXPRESSION
10565#define SHAREABLE_BARE_EXPRESSION 1
10569compile_shareable_literal_constant(
rb_iseq_t *iseq,
LINK_ANCHOR *ret,
enum rb_parser_shareability shareable,
NODE *dest,
const NODE *node,
size_t level,
VALUE *value_p,
int *shareable_literal_p)
10571# define compile_shareable_literal_constant_next(node, anchor, value_p, shareable_literal_p) \
10572 compile_shareable_literal_constant(iseq, anchor, shareable, dest, node, level+1, value_p, shareable_literal_p)
10574 DECL_ANCHOR(anchor);
10576 enum node_type
type = node ? nd_type(node) : NODE_NIL;
10588 *value_p = rb_node_sym_string_val(node);
10591 *value_p = rb_node_regx_string_val(node);
10594 *value_p = rb_node_line_lineno_val(node);
10597 *value_p = rb_node_integer_literal_val(node);
10600 *value_p = rb_node_float_literal_val(node);
10602 case NODE_RATIONAL:
10603 *value_p = rb_node_rational_literal_val(node);
10605 case NODE_IMAGINARY:
10606 *value_p = rb_node_imaginary_literal_val(node);
10608 case NODE_ENCODING:
10609 *value_p = rb_node_encoding_val(node);
10612 CHECK(COMPILE(ret,
"shareable_literal_constant", node));
10613 *shareable_literal_p = 1;
10617 CHECK(COMPILE(ret,
"shareable_literal_constant", node));
10618 if (shareable == rb_parser_shareable_literal) {
10624 ADD_SEND_WITH_FLAG(ret, node, idUMinus,
INT2FIX(0),
INT2FIX(VM_CALL_ARGS_SIMPLE));
10627 *shareable_literal_p = 1;
10631 VALUE lit = rb_node_str_string_val(node);
10632 ADD_INSN1(ret, node, putobject, lit);
10635 *shareable_literal_p = 1;
10641 VALUE lit = rb_node_file_path_val(node);
10642 ADD_INSN1(ret, node, putobject, lit);
10645 *shareable_literal_p = 1;
10653 ADD_INSN1(ret, node, putobject, lit);
10656 *shareable_literal_p = 1;
10662 INIT_ANCHOR(anchor);
10664 for (
NODE *n = (
NODE *)node; n; n = RNODE_LIST(n)->nd_next) {
10666 int shareable_literal_p2;
10667 NODE *elt = RNODE_LIST(n)->nd_head;
10669 CHECK(compile_shareable_literal_constant_next(elt, anchor, &val, &shareable_literal_p2));
10670 if (shareable_literal_p2) {
10673 else if (
RTEST(lit)) {
10679 if (!UNDEF_P(val)) {
10691 if (!RNODE_HASH(node)->nd_brace) {
10693 *shareable_literal_p = 0;
10696 for (
NODE *n = RNODE_HASH(node)->nd_head; n; n = RNODE_LIST(RNODE_LIST(n)->nd_next)->nd_next) {
10697 if (!RNODE_LIST(n)->nd_head) {
10699 goto compile_shareable;
10703 INIT_ANCHOR(anchor);
10704 lit = rb_hash_new();
10705 for (
NODE *n = RNODE_HASH(node)->nd_head; n; n = RNODE_LIST(RNODE_LIST(n)->nd_next)->nd_next) {
10707 VALUE value_val = 0;
10708 int shareable_literal_p2;
10709 NODE *key = RNODE_LIST(n)->nd_head;
10710 NODE *val = RNODE_LIST(RNODE_LIST(n)->nd_next)->nd_head;
10711 CHECK(compile_shareable_literal_constant_next(key, anchor, &key_val, &shareable_literal_p2));
10712 if (shareable_literal_p2) {
10715 else if (
RTEST(lit)) {
10716 rb_hash_clear(lit);
10719 CHECK(compile_shareable_literal_constant_next(val, anchor, &value_val, &shareable_literal_p2));
10720 if (shareable_literal_p2) {
10723 else if (
RTEST(lit)) {
10724 rb_hash_clear(lit);
10728 if (!UNDEF_P(key_val) && !UNDEF_P(value_val)) {
10729 rb_hash_aset(lit, key_val, value_val);
10732 rb_hash_clear(lit);
10743 if (shareable == rb_parser_shareable_literal &&
10744 (SHAREABLE_BARE_EXPRESSION || level > 0)) {
10745 CHECK(compile_ensure_shareable_node(iseq, ret, dest, node));
10747 *shareable_literal_p = 1;
10750 CHECK(COMPILE(ret,
"shareable_literal_constant", node));
10752 *shareable_literal_p = 0;
10758 if (nd_type(node) == NODE_LIST) {
10759 ADD_INSN1(anchor, node, newarray,
INT2FIX(RNODE_LIST(node)->as.nd_alen));
10761 else if (nd_type(node) == NODE_HASH) {
10762 long len = RNODE_LIST(RNODE_HASH(node)->nd_head)->as.nd_alen;
10768 *shareable_literal_p = 0;
10769 ADD_SEQ(ret, anchor);
10775 if (nd_type(node) == NODE_LIST) {
10776 ADD_INSN1(anchor, node, newarray,
INT2FIX(RNODE_LIST(node)->as.nd_alen));
10778 else if (nd_type(node) == NODE_HASH) {
10779 long len = RNODE_LIST(RNODE_HASH(node)->nd_head)->as.nd_alen;
10784 CHECK(compile_make_shareable_node(iseq, ret, anchor, node,
false));
10786 *shareable_literal_p = 1;
10790 ADD_INSN1(ret, node, putobject, val);
10793 *shareable_literal_p = 1;
10800compile_shareable_constant_value(
rb_iseq_t *iseq,
LINK_ANCHOR *ret,
enum rb_parser_shareability shareable,
const NODE *lhs,
const NODE *value)
10804 DECL_ANCHOR(anchor);
10805 INIT_ANCHOR(anchor);
10807 switch (shareable) {
10808 case rb_parser_shareable_none:
10809 CHECK(COMPILE(ret,
"compile_shareable_constant_value", value));
10812 case rb_parser_shareable_literal:
10813 CHECK(compile_shareable_literal_constant(iseq, anchor, shareable, (
NODE *)lhs, value, 0, &val, &literal_p));
10814 ADD_SEQ(ret, anchor);
10817 case rb_parser_shareable_copy:
10818 case rb_parser_shareable_everything:
10819 CHECK(compile_shareable_literal_constant(iseq, anchor, shareable, (
NODE *)lhs, value, 0, &val, &literal_p));
10821 CHECK(compile_make_shareable_node(iseq, ret, anchor, value, shareable == rb_parser_shareable_copy));
10824 ADD_SEQ(ret, anchor);
10828 rb_bug(
"unexpected rb_parser_shareability: %d", shareable);
10845 int lineno = ISEQ_COMPILE_DATA(iseq)->last_line;
10846 if (lineno == 0) lineno =
FIX2INT(rb_iseq_first_lineno(iseq));
10847 debugs(
"node: NODE_NIL(implicit)\n");
10848 ADD_SYNTHETIC_INSN(ret, lineno, -1, putnil);
10852 return iseq_compile_each0(iseq, ret, node, popped);
10858 const int line = (int)nd_line(node);
10859 const enum node_type
type = nd_type(node);
10862 if (ISEQ_COMPILE_DATA(iseq)->last_line == line) {
10866 if (nd_fl_newline(node)) {
10868 ISEQ_COMPILE_DATA(iseq)->last_line = line;
10869 if (line > 0 && ISEQ_COVERAGE(iseq) && ISEQ_LINE_COVERAGE(iseq)) {
10870 event |= RUBY_EVENT_COVERAGE_LINE;
10872 ADD_TRACE(ret, event);
10876 debug_node_start(node);
10877#undef BEFORE_RETURN
10878#define BEFORE_RETURN debug_node_end()
10882 CHECK(compile_block(iseq, ret, node, popped));
10886 CHECK(compile_if(iseq, ret, node, popped,
type));
10889 CHECK(compile_case(iseq, ret, node, popped));
10892 CHECK(compile_case2(iseq, ret, node, popped));
10895 CHECK(compile_case3(iseq, ret, node, popped));
10899 CHECK(compile_loop(iseq, ret, node, popped,
type));
10903 CHECK(compile_iter(iseq, ret, node, popped));
10905 case NODE_FOR_MASGN:
10906 CHECK(compile_for_masgn(iseq, ret, node, popped));
10909 CHECK(compile_break(iseq, ret, node, popped));
10912 CHECK(compile_next(iseq, ret, node, popped));
10915 CHECK(compile_redo(iseq, ret, node, popped));
10918 CHECK(compile_retry(iseq, ret, node, popped));
10921 CHECK(COMPILE_(ret,
"NODE_BEGIN", RNODE_BEGIN(node)->nd_body, popped));
10925 CHECK(compile_rescue(iseq, ret, node, popped));
10928 CHECK(compile_resbody(iseq, ret, node, popped));
10931 CHECK(compile_ensure(iseq, ret, node, popped));
10936 LABEL *end_label = NEW_LABEL(line);
10937 CHECK(COMPILE(ret,
"nd_1st", RNODE_OR(node)->nd_1st));
10939 ADD_INSN(ret, node, dup);
10941 if (
type == NODE_AND) {
10942 ADD_INSNL(ret, node, branchunless, end_label);
10945 ADD_INSNL(ret, node, branchif, end_label);
10948 ADD_INSN(ret, node, pop);
10950 CHECK(COMPILE_(ret,
"nd_2nd", RNODE_OR(node)->nd_2nd, popped));
10951 ADD_LABEL(ret, end_label);
10956 compile_massign(iseq, ret, node, popped);
10961 ID id = RNODE_LASGN(node)->nd_vid;
10962 int idx = ISEQ_BODY(body->local_iseq)->local_table_size - get_local_var_idx(iseq,
id);
10964 debugs(
"lvar: %s idx: %d\n", rb_id2name(
id), idx);
10965 CHECK(COMPILE(ret,
"rvalue", RNODE_LASGN(node)->nd_value));
10968 ADD_INSN(ret, node, dup);
10970 ADD_SETLOCAL(ret, node, idx, get_lvar_level(iseq));
10975 ID id = RNODE_DASGN(node)->nd_vid;
10976 CHECK(COMPILE(ret,
"dvalue", RNODE_DASGN(node)->nd_value));
10977 debugi(
"dassn id", rb_id2str(
id) ?
id :
'*');
10980 ADD_INSN(ret, node, dup);
10983 idx = get_dyna_var_idx(iseq,
id, &lv, &ls);
10986 COMPILE_ERROR(ERROR_ARGS
"NODE_DASGN: unknown id (%"PRIsVALUE
")",
10990 ADD_SETLOCAL(ret, node, ls - idx, lv);
10994 CHECK(COMPILE(ret,
"lvalue", RNODE_GASGN(node)->nd_value));
10997 ADD_INSN(ret, node, dup);
10999 ADD_INSN1(ret, node, setglobal,
ID2SYM(RNODE_GASGN(node)->nd_vid));
11003 CHECK(COMPILE(ret,
"lvalue", RNODE_IASGN(node)->nd_value));
11005 ADD_INSN(ret, node, dup);
11007 ADD_INSN2(ret, node, setinstancevariable,
11008 ID2SYM(RNODE_IASGN(node)->nd_vid),
11009 get_ivar_ic_value(iseq,RNODE_IASGN(node)->nd_vid));
11013 if (RNODE_CDECL(node)->nd_vid) {
11014 CHECK(compile_shareable_constant_value(iseq, ret, RNODE_CDECL(node)->shareability, node, RNODE_CDECL(node)->nd_value));
11017 ADD_INSN(ret, node, dup);
11020 ADD_INSN1(ret, node, putspecialobject,
11021 INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
11022 ADD_INSN1(ret, node, setconstant,
ID2SYM(RNODE_CDECL(node)->nd_vid));
11025 compile_cpath(ret, iseq, RNODE_CDECL(node)->nd_else);
11026 CHECK(compile_shareable_constant_value(iseq, ret, RNODE_CDECL(node)->shareability, node, RNODE_CDECL(node)->nd_value));
11027 ADD_INSN(ret, node, swap);
11030 ADD_INSN1(ret, node, topn,
INT2FIX(1));
11031 ADD_INSN(ret, node, swap);
11034 ADD_INSN1(ret, node, setconstant,
ID2SYM(get_node_colon_nd_mid(RNODE_CDECL(node)->nd_else)));
11039 CHECK(COMPILE(ret,
"cvasgn val", RNODE_CVASGN(node)->nd_value));
11041 ADD_INSN(ret, node, dup);
11043 ADD_INSN2(ret, node, setclassvariable,
11044 ID2SYM(RNODE_CVASGN(node)->nd_vid),
11045 get_cvar_ic_value(iseq, RNODE_CVASGN(node)->nd_vid));
11048 case NODE_OP_ASGN1:
11049 CHECK(compile_op_asgn1(iseq, ret, node, popped));
11051 case NODE_OP_ASGN2:
11052 CHECK(compile_op_asgn2(iseq, ret, node, popped));
11054 case NODE_OP_CDECL:
11055 CHECK(compile_op_cdecl(iseq, ret, node, popped));
11057 case NODE_OP_ASGN_AND:
11058 case NODE_OP_ASGN_OR:
11059 CHECK(compile_op_log(iseq, ret, node, popped,
type));
11063 if (compile_call_precheck_freeze(iseq, ret, node, node, popped) == TRUE) {
11069 if (compile_call(iseq, ret, node,
type, node, popped,
false) == COMPILE_NG) {
11075 CHECK(compile_super(iseq, ret, node, popped,
type));
11078 CHECK(compile_array(iseq, ret, node, popped, TRUE) >= 0);
11083 ADD_INSN1(ret, node, newarray,
INT2FIX(0));
11088 CHECK(compile_hash(iseq, ret, node, FALSE, popped) >= 0);
11091 CHECK(compile_return(iseq, ret, node, popped));
11094 CHECK(compile_yield(iseq, ret, node, popped));
11098 compile_lvar(iseq, ret, node, RNODE_LVAR(node)->nd_vid);
11104 debugi(
"nd_vid", RNODE_DVAR(node)->nd_vid);
11106 idx = get_dyna_var_idx(iseq, RNODE_DVAR(node)->nd_vid, &lv, &ls);
11108 COMPILE_ERROR(ERROR_ARGS
"unknown dvar (%"PRIsVALUE
")",
11109 rb_id2str(RNODE_DVAR(node)->nd_vid));
11112 ADD_GETLOCAL(ret, node, ls - idx, lv);
11117 ADD_INSN1(ret, node, getglobal,
ID2SYM(RNODE_GVAR(node)->nd_vid));
11119 ADD_INSN(ret, node, pop);
11124 debugi(
"nd_vid", RNODE_IVAR(node)->nd_vid);
11126 ADD_INSN2(ret, node, getinstancevariable,
11127 ID2SYM(RNODE_IVAR(node)->nd_vid),
11128 get_ivar_ic_value(iseq, RNODE_IVAR(node)->nd_vid));
11133 debugi(
"nd_vid", RNODE_CONST(node)->nd_vid);
11135 if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache) {
11137 VALUE segments = rb_ary_new_from_args(1,
ID2SYM(RNODE_CONST(node)->nd_vid));
11138 RB_OBJ_SET_FROZEN_SHAREABLE(segments);
11139 ADD_INSN1(ret, node, opt_getconstant_path, segments);
11143 ADD_INSN(ret, node, putnil);
11144 ADD_INSN1(ret, node, putobject,
Qtrue);
11145 ADD_INSN1(ret, node, getconstant,
ID2SYM(RNODE_CONST(node)->nd_vid));
11149 ADD_INSN(ret, node, pop);
11155 ADD_INSN2(ret, node, getclassvariable,
11156 ID2SYM(RNODE_CVAR(node)->nd_vid),
11157 get_cvar_ic_value(iseq, RNODE_CVAR(node)->nd_vid));
11161 case NODE_NTH_REF:{
11163 if (!RNODE_NTH_REF(node)->nd_nth) {
11164 ADD_INSN(ret, node, putnil);
11167 ADD_INSN2(ret, node, getspecial,
INT2FIX(1) ,
11168 INT2FIX(RNODE_NTH_REF(node)->nd_nth << 1));
11172 case NODE_BACK_REF:{
11174 ADD_INSN2(ret, node, getspecial,
INT2FIX(1) ,
11175 INT2FIX(0x01 | (RNODE_BACK_REF(node)->nd_nth << 1)));
11182 CHECK(compile_match(iseq, ret, node, popped,
type));
11186 ADD_INSN1(ret, node, putobject, rb_node_sym_string_val(node));
11192 ADD_INSN1(ret, node, putobject, rb_node_line_lineno_val(node));
11196 case NODE_ENCODING:{
11198 ADD_INSN1(ret, node, putobject, rb_node_encoding_val(node));
11202 case NODE_INTEGER:{
11203 VALUE lit = rb_node_integer_literal_val(node);
11205 debugp_param(
"integer", lit);
11207 ADD_INSN1(ret, node, putobject, lit);
11213 VALUE lit = rb_node_float_literal_val(node);
11215 debugp_param(
"float", lit);
11217 ADD_INSN1(ret, node, putobject, lit);
11222 case NODE_RATIONAL:{
11223 VALUE lit = rb_node_rational_literal_val(node);
11225 debugp_param(
"rational", lit);
11227 ADD_INSN1(ret, node, putobject, lit);
11232 case NODE_IMAGINARY:{
11233 VALUE lit = rb_node_imaginary_literal_val(node);
11235 debugp_param(
"imaginary", lit);
11237 ADD_INSN1(ret, node, putobject, lit);
11244 debugp_param(
"nd_lit", get_string_value(node));
11246 VALUE lit = get_string_value(node);
11249 option->frozen_string_literal != ISEQ_FROZEN_STRING_LITERAL_DISABLED) {
11250 lit = rb_str_with_debug_created_info(lit, rb_iseq_path(iseq), line);
11251 RB_OBJ_SET_SHAREABLE(lit);
11253 switch (option->frozen_string_literal) {
11254 case ISEQ_FROZEN_STRING_LITERAL_UNSET:
11255 ADD_INSN1(ret, node, dupchilledstring, lit);
11257 case ISEQ_FROZEN_STRING_LITERAL_DISABLED:
11258 ADD_INSN1(ret, node, dupstring, lit);
11260 case ISEQ_FROZEN_STRING_LITERAL_ENABLED:
11261 ADD_INSN1(ret, node, putobject, lit);
11264 rb_bug(
"invalid frozen_string_literal");
11271 compile_dstr(iseq, ret, node);
11274 ADD_INSN(ret, node, pop);
11279 ADD_CALL_RECEIVER(ret, node);
11280 VALUE str = rb_node_str_string_val(node);
11281 ADD_INSN1(ret, node, putobject, str);
11283 ADD_CALL(ret, node, idBackquote,
INT2FIX(1));
11286 ADD_INSN(ret, node, pop);
11291 ADD_CALL_RECEIVER(ret, node);
11292 compile_dstr(iseq, ret, node);
11293 ADD_CALL(ret, node, idBackquote,
INT2FIX(1));
11296 ADD_INSN(ret, node, pop);
11301 CHECK(compile_evstr(iseq, ret, RNODE_EVSTR(node)->nd_body, popped));
11305 VALUE lit = rb_node_regx_string_val(node);
11306 RB_OBJ_SET_SHAREABLE(lit);
11307 ADD_INSN1(ret, node, putobject, lit);
11313 compile_dregx(iseq, ret, node, popped);
11316 int ic_index = body->ise_size++;
11318 block_iseq = NEW_CHILD_ISEQ(RNODE_ONCE(node)->nd_body, make_name_for_block(iseq), ISEQ_TYPE_PLAIN, line);
11320 ADD_INSN2(ret, node, once, block_iseq,
INT2FIX(ic_index));
11324 ADD_INSN(ret, node, pop);
11328 case NODE_ARGSCAT:{
11330 CHECK(COMPILE(ret,
"argscat head", RNODE_ARGSCAT(node)->nd_head));
11331 ADD_INSN1(ret, node, splatarray,
Qfalse);
11332 ADD_INSN(ret, node, pop);
11333 CHECK(COMPILE(ret,
"argscat body", RNODE_ARGSCAT(node)->nd_body));
11334 ADD_INSN1(ret, node, splatarray,
Qfalse);
11335 ADD_INSN(ret, node, pop);
11338 CHECK(COMPILE(ret,
"argscat head", RNODE_ARGSCAT(node)->nd_head));
11339 const NODE *body_node = RNODE_ARGSCAT(node)->nd_body;
11340 if (nd_type_p(body_node, NODE_LIST)) {
11341 CHECK(compile_array(iseq, ret, body_node, popped, FALSE) >= 0);
11344 CHECK(COMPILE(ret,
"argscat body", body_node));
11345 ADD_INSN(ret, node, concattoarray);
11350 case NODE_ARGSPUSH:{
11352 CHECK(COMPILE(ret,
"argspush head", RNODE_ARGSPUSH(node)->nd_head));
11353 ADD_INSN1(ret, node, splatarray,
Qfalse);
11354 ADD_INSN(ret, node, pop);
11355 CHECK(COMPILE_(ret,
"argspush body", RNODE_ARGSPUSH(node)->nd_body, popped));
11358 CHECK(COMPILE(ret,
"argspush head", RNODE_ARGSPUSH(node)->nd_head));
11359 const NODE *body_node = RNODE_ARGSPUSH(node)->nd_body;
11360 if (keyword_node_p(body_node)) {
11361 CHECK(COMPILE_(ret,
"array element", body_node, FALSE));
11362 ADD_INSN(ret, node, pushtoarraykwsplat);
11364 else if (static_literal_node_p(body_node, iseq,
false)) {
11365 ADD_INSN1(ret, body_node, putobject, static_literal_value(body_node, iseq));
11366 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
11369 CHECK(COMPILE_(ret,
"array element", body_node, FALSE));
11370 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
11376 CHECK(COMPILE(ret,
"splat", RNODE_SPLAT(node)->nd_head));
11377 ADD_INSN1(ret, node, splatarray,
Qtrue);
11380 ADD_INSN(ret, node, pop);
11385 ID mid = RNODE_DEFN(node)->nd_mid;
11386 const rb_iseq_t *method_iseq = NEW_ISEQ(RNODE_DEFN(node)->nd_defn,
11388 ISEQ_TYPE_METHOD, line);
11390 debugp_param(
"defn/iseq", rb_iseqw_new(method_iseq));
11391 ADD_INSN2(ret, node, definemethod,
ID2SYM(mid), method_iseq);
11395 ADD_INSN1(ret, node, putobject,
ID2SYM(mid));
11401 ID mid = RNODE_DEFS(node)->nd_mid;
11402 const rb_iseq_t * singleton_method_iseq = NEW_ISEQ(RNODE_DEFS(node)->nd_defn,
11404 ISEQ_TYPE_METHOD, line);
11406 debugp_param(
"defs/iseq", rb_iseqw_new(singleton_method_iseq));
11407 CHECK(COMPILE(ret,
"defs: recv", RNODE_DEFS(node)->nd_recv));
11408 ADD_INSN2(ret, node, definesmethod,
ID2SYM(mid), singleton_method_iseq);
11412 ADD_INSN1(ret, node, putobject,
ID2SYM(mid));
11417 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11418 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_CBASE));
11419 CHECK(COMPILE(ret,
"alias arg1", RNODE_ALIAS(node)->nd_1st));
11420 CHECK(COMPILE(ret,
"alias arg2", RNODE_ALIAS(node)->nd_2nd));
11421 ADD_SEND(ret, node, id_core_set_method_alias,
INT2FIX(3));
11424 ADD_INSN(ret, node, pop);
11429 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11430 ADD_INSN1(ret, node, putobject,
ID2SYM(RNODE_VALIAS(node)->nd_alias));
11431 ADD_INSN1(ret, node, putobject,
ID2SYM(RNODE_VALIAS(node)->nd_orig));
11432 ADD_SEND(ret, node, id_core_set_variable_alias,
INT2FIX(2));
11435 ADD_INSN(ret, node, pop);
11442 for (
long i = 0; i < ary->len; i++) {
11443 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11444 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_CBASE));
11445 CHECK(COMPILE(ret,
"undef arg", ary->data[i]));
11446 ADD_SEND(ret, node, id_core_undef_method,
INT2FIX(2));
11448 if (i < ary->
len - 1) {
11449 ADD_INSN(ret, node, pop);
11454 ADD_INSN(ret, node, pop);
11459 const rb_iseq_t *class_iseq = NEW_CHILD_ISEQ(RNODE_CLASS(node)->nd_body,
11460 rb_str_freeze(rb_sprintf(
"<class:%"PRIsVALUE
">", rb_id2str(get_node_colon_nd_mid(RNODE_CLASS(node)->nd_cpath)))),
11461 ISEQ_TYPE_CLASS, line);
11462 const int flags = VM_DEFINECLASS_TYPE_CLASS |
11463 (RNODE_CLASS(node)->nd_super ? VM_DEFINECLASS_FLAG_HAS_SUPERCLASS : 0) |
11464 compile_cpath(ret, iseq, RNODE_CLASS(node)->nd_cpath);
11466 CHECK(COMPILE(ret,
"super", RNODE_CLASS(node)->nd_super));
11467 ADD_INSN3(ret, node, defineclass,
ID2SYM(get_node_colon_nd_mid(RNODE_CLASS(node)->nd_cpath)), class_iseq,
INT2FIX(flags));
11471 ADD_INSN(ret, node, pop);
11476 const rb_iseq_t *module_iseq = NEW_CHILD_ISEQ(RNODE_MODULE(node)->nd_body,
11477 rb_str_freeze(rb_sprintf(
"<module:%"PRIsVALUE
">", rb_id2str(get_node_colon_nd_mid(RNODE_MODULE(node)->nd_cpath)))),
11478 ISEQ_TYPE_CLASS, line);
11479 const int flags = VM_DEFINECLASS_TYPE_MODULE |
11480 compile_cpath(ret, iseq, RNODE_MODULE(node)->nd_cpath);
11482 ADD_INSN (ret, node, putnil);
11483 ADD_INSN3(ret, node, defineclass,
ID2SYM(get_node_colon_nd_mid(RNODE_MODULE(node)->nd_cpath)), module_iseq,
INT2FIX(flags));
11487 ADD_INSN(ret, node, pop);
11493 const rb_iseq_t *singleton_class = NEW_ISEQ(RNODE_SCLASS(node)->nd_body, rb_fstring_lit(
"singleton class"),
11494 ISEQ_TYPE_CLASS, line);
11496 CHECK(COMPILE(ret,
"sclass#recv", RNODE_SCLASS(node)->nd_recv));
11497 ADD_INSN (ret, node, putnil);
11498 CONST_ID(singletonclass,
"singletonclass");
11502 int sclass_flags = VM_DEFINECLASS_TYPE_SINGLETON_CLASS;
11503 const NODE *recv = RNODE_SCLASS(node)->nd_recv;
11504 if (!(nd_type_p(recv, NODE_SELF) &&
11505 ISEQ_BODY(iseq)->
type == ISEQ_TYPE_CLASS) &&
11506 !cpath_const_p(recv)) {
11507 sclass_flags |= VM_DEFINECLASS_FLAG_DYNAMIC_CREF;
11510 ADD_INSN3(ret, node, defineclass,
11511 ID2SYM(singletonclass), singleton_class,
11516 ADD_INSN(ret, node, pop);
11521 CHECK(compile_colon2(iseq, ret, node, popped));
11524 CHECK(compile_colon3(iseq, ret, node, popped));
11527 CHECK(compile_dots(iseq, ret, node, popped, FALSE));
11530 CHECK(compile_dots(iseq, ret, node, popped, TRUE));
11534 LABEL *lend = NEW_LABEL(line);
11535 LABEL *ltrue = NEW_LABEL(line);
11536 LABEL *lfalse = NEW_LABEL(line);
11537 CHECK(compile_flip_flop(iseq, ret, node,
type == NODE_FLIP2,
11539 ADD_LABEL(ret, ltrue);
11540 ADD_INSN1(ret, node, putobject,
Qtrue);
11541 ADD_INSNL(ret, node, jump, lend);
11542 ADD_LABEL(ret, lfalse);
11543 ADD_INSN1(ret, node, putobject,
Qfalse);
11544 ADD_LABEL(ret, lend);
11549 ADD_INSN(ret, node, putself);
11555 ADD_INSN(ret, node, putnil);
11561 ADD_INSN1(ret, node, putobject,
Qtrue);
11567 ADD_INSN1(ret, node, putobject,
Qfalse);
11572 CHECK(compile_errinfo(iseq, ret, node, popped));
11576 CHECK(compile_defined_expr(iseq, ret, node,
Qtrue,
false));
11579 case NODE_POSTEXE:{
11583 int is_index = body->ise_size++;
11585 rb_iseq_new_with_callback_new_callback(build_postexe_iseq, RNODE_POSTEXE(node)->nd_body);
11587 NEW_CHILD_ISEQ_WITH_CALLBACK(ifunc, rb_fstring(make_name_for_block(iseq)), ISEQ_TYPE_BLOCK, line);
11589 ADD_INSN2(ret, node, once, once_iseq,
INT2FIX(is_index));
11593 ADD_INSN(ret, node, pop);
11598 CHECK(compile_kw_arg(iseq, ret, node, popped));
11601 compile_dstr(iseq, ret, node);
11603 ADD_INSN(ret, node, intern);
11606 ADD_INSN(ret, node, pop);
11610 case NODE_ATTRASGN:
11611 CHECK(compile_attrasgn(iseq, ret, node, popped));
11615 const rb_iseq_t *block = NEW_CHILD_ISEQ(RNODE_LAMBDA(node)->nd_body, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, line);
11618 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11619 ADD_CALL_WITH_BLOCK(ret, node, idLambda, argc, block);
11623 ADD_INSN(ret, node, pop);
11628 UNKNOWN_NODE(
"iseq_compile_each", node, COMPILE_NG);
11643insn_data_length(
INSN *iobj)
11645 return insn_len(iobj->insn_id);
11649calc_sp_depth(
int depth,
INSN *insn)
11651 return comptime_insn_stack_increase(depth, insn->insn_id, insn->operands);
11655opobj_inspect(
VALUE obj)
11675insn_data_to_s_detail(
INSN *iobj)
11677 VALUE str = rb_sprintf(
"%-20s ", insn_name(iobj->insn_id));
11679 if (iobj->operands) {
11680 const char *types = insn_op_types(iobj->insn_id);
11683 for (j = 0; types[j]; j++) {
11684 char type = types[j];
11690 rb_str_catf(str, LABEL_FORMAT, lobj->label_no);
11708 VALUE v = OPERAND_AT(iobj, j);
11723 rb_str_catf(str,
"<ivc:%d>",
FIX2INT(OPERAND_AT(iobj, j)));
11726 rb_str_catf(str,
"<icvarc:%d>",
FIX2INT(OPERAND_AT(iobj, j)));
11729 rb_str_catf(str,
"<ise:%d>",
FIX2INT(OPERAND_AT(iobj, j)));
11735 if (vm_ci_mid(ci)) rb_str_catf(str,
"%"PRIsVALUE, rb_id2str(vm_ci_mid(ci)));
11736 rb_str_catf(str,
", %d>", vm_ci_argc(ci));
11744 void *func = (
void *)OPERAND_AT(iobj, j);
11747 if (dladdr(func, &info) && info.dli_sname) {
11752 rb_str_catf(str,
"<%p>", func);
11762 if (types[j + 1]) {
11773 dump_disasm_list_with_cursor(link, NULL, NULL);
11784 printf(
"-- raw disasm--------\n");
11787 if (curr) printf(curr == link ?
"*" :
" ");
11788 switch (link->type) {
11789 case ISEQ_ELEMENT_INSN:
11791 iobj = (
INSN *)link;
11792 str = insn_data_to_s_detail(iobj);
11793 printf(
" %04d %-65s(%4u)\n", pos,
StringValueCStr(str), iobj->insn_info.line_no);
11794 pos += insn_data_length(iobj);
11797 case ISEQ_ELEMENT_LABEL:
11799 lobj = (
LABEL *)link;
11800 printf(LABEL_FORMAT
" [sp: %d, unremovable: %d, refcnt: %d]%s\n", lobj->label_no, lobj->sp, lobj->unremovable, lobj->refcnt,
11801 dest == lobj ?
" <---" :
"");
11804 case ISEQ_ELEMENT_TRACE:
11807 printf(
" trace: %0x\n", trace->event);
11810 case ISEQ_ELEMENT_ADJUST:
11813 printf(
" adjust: [label: %d]\n", adjust->label ? adjust->label->label_no : -1);
11818 rb_raise(
rb_eSyntaxError,
"dump_disasm_list error: %d\n", (
int)link->type);
11822 printf(
"---------------------\n");
11827rb_insn_len(
VALUE insn)
11829 return insn_len(insn);
11833rb_insns_name(
int i)
11835 return insn_name(i);
11839rb_insns_name_array(
void)
11843 for (i = 0; i < VM_INSTRUCTION_SIZE; i++) {
11854 obj = rb_to_symbol_type(obj);
11856 if (st_lookup(labels_table, obj, &tmp) == 0) {
11857 label = NEW_LABEL(0);
11858 st_insert(labels_table, obj, (st_data_t)label);
11861 label = (
LABEL *)tmp;
11868get_exception_sym2type(
VALUE sym)
11870 static VALUE symRescue, symEnsure, symRetry;
11871 static VALUE symBreak, symRedo, symNext;
11873 if (symRescue == 0) {
11882 if (sym == symRescue)
return CATCH_TYPE_RESCUE;
11883 if (sym == symEnsure)
return CATCH_TYPE_ENSURE;
11884 if (sym == symRetry)
return CATCH_TYPE_RETRY;
11885 if (sym == symBreak)
return CATCH_TYPE_BREAK;
11886 if (sym == symRedo)
return CATCH_TYPE_REDO;
11887 if (sym == symNext)
return CATCH_TYPE_NEXT;
11888 rb_raise(
rb_eSyntaxError,
"invalid exception symbol: %+"PRIsVALUE, sym);
11901 LABEL *lstart, *lend, *lcont;
11916 lstart = register_label(iseq, labels_table,
RARRAY_AREF(v, 2));
11917 lend = register_label(iseq, labels_table,
RARRAY_AREF(v, 3));
11918 lcont = register_label(iseq, labels_table,
RARRAY_AREF(v, 4));
11922 if (
type == CATCH_TYPE_RESCUE ||
11923 type == CATCH_TYPE_BREAK ||
11924 type == CATCH_TYPE_NEXT) {
11930 ADD_CATCH_ENTRY(
type, lstart, lend, eiseq, lcont);
11938insn_make_insn_table(
void)
11942 table = st_init_numtable_with_size(VM_INSTRUCTION_SIZE);
11944 for (i=0; i<VM_INSTRUCTION_SIZE; i++) {
11958 iseqw = rb_iseq_load(op, (
VALUE)iseq,
Qnil);
11960 else if (
CLASS_OF(op) == rb_cISeq) {
11967 loaded_iseq = rb_iseqw_to_iseq(iseqw);
11968 return loaded_iseq;
11976 unsigned int flag = 0;
11987 if (!
NIL_P(vorig_argc)) orig_argc =
FIX2INT(vorig_argc);
11989 if (!
NIL_P(vkw_arg)) {
11992 size_t n = rb_callinfo_kwarg_bytes(
len);
11995 kw_arg->references = 0;
11996 kw_arg->keyword_len =
len;
11997 for (i = 0; i <
len; i++) {
12000 kw_arg->keywords[i] = kw;
12005 const struct rb_callinfo *ci = new_callinfo(iseq, mid, orig_argc, flag, kw_arg, (flag & VM_CALL_ARGS_SIMPLE) == 0);
12011event_name_to_flag(
VALUE sym)
12013#define CHECK_EVENT(ev) if (sym == ID2SYM(rb_intern_const(#ev))) return ev;
12034 int line_no = 0, node_id = -1, insn_idx = 0;
12035 int ret = COMPILE_OK;
12040 static struct st_table *insn_table;
12042 if (insn_table == 0) {
12043 insn_table = insn_make_insn_table();
12046 for (i=0; i<
len; i++) {
12052 ADD_TRACE(anchor, event);
12055 LABEL *label = register_label(iseq, labels_table, obj);
12056 ADD_LABEL(anchor, label);
12073 if (st_lookup(insn_table, (st_data_t)insn, &insn_id) == 0) {
12075 COMPILE_ERROR(iseq, line_no,
12076 "unknown instruction: %+"PRIsVALUE, insn);
12081 if (argc != insn_len((
VALUE)insn_id)-1) {
12082 COMPILE_ERROR(iseq, line_no,
12083 "operand size mismatch");
12089 argv = compile_data_calloc2_type(iseq,
VALUE, argc);
12094 (
enum ruby_vminsn_type)insn_id, argc, argv));
12096 for (j=0; j<argc; j++) {
12098 switch (insn_op_type((
VALUE)insn_id, j)) {
12100 LABEL *label = register_label(iseq, labels_table, op);
12101 argv[j] = (
VALUE)label;
12116 VALUE v = (
VALUE)iseq_build_load_iseq(iseq, op);
12127 if (
NUM2UINT(op) >= ISEQ_BODY(iseq)->ise_size) {
12128 ISEQ_BODY(iseq)->ise_size =
NUM2INT(op) + 1;
12134 op = rb_to_array_type(op);
12138 sym = rb_to_symbol_type(sym);
12143 argv[j] = segments;
12145 ISEQ_BODY(iseq)->ic_size++;
12150 if (
NUM2UINT(op) >= ISEQ_BODY(iseq)->ivc_size) {
12151 ISEQ_BODY(iseq)->ivc_size =
NUM2INT(op) + 1;
12156 if (
NUM2UINT(op) >= ISEQ_BODY(iseq)->icvarc_size) {
12157 ISEQ_BODY(iseq)->icvarc_size =
NUM2INT(op) + 1;
12161 argv[j] = iseq_build_callinfo_from_hash(iseq, op);
12164 argv[j] = rb_to_symbol_type(op);
12169 VALUE map = rb_hash_new_with_size_and_type(0,
RARRAY_LEN(op)/2, &cdhash_type);
12171 op = rb_to_array_type(op);
12176 register_label(iseq, labels_table, sym);
12177 rb_hash_aset(map, key, (
VALUE)label | 1);
12180 RB_OBJ_SET_SHAREABLE(map);
12187#if SIZEOF_VALUE <= SIZEOF_LONG
12192 argv[j] = (
VALUE)funcptr;
12203 (
enum ruby_vminsn_type)insn_id, argc, NULL));
12207 rb_raise(
rb_eTypeError,
"unexpected object for instruction");
12212 validate_labels(iseq, labels_table);
12213 if (!ret)
return ret;
12214 return iseq_setup(iseq, anchor);
12217#define CHECK_ARRAY(v) rb_to_array_type(v)
12218#define CHECK_SYMBOL(v) rb_to_symbol_type(v)
12223 VALUE val = rb_hash_aref(param, sym);
12228 else if (!
NIL_P(val)) {
12229 rb_raise(
rb_eTypeError,
"invalid %+"PRIsVALUE
" Fixnum: %+"PRIsVALUE,
12235static const struct rb_iseq_param_keyword *
12241 VALUE key, sym, default_val;
12244 struct rb_iseq_param_keyword *keyword =
ZALLOC(
struct rb_iseq_param_keyword);
12246 ISEQ_BODY(iseq)->param.flags.has_kw = TRUE;
12248 keyword->num =
len;
12249#define SYM(s) ID2SYM(rb_intern_const(#s))
12250 (void)int_param(&keyword->bits_start, params, SYM(kwbits));
12251 i = keyword->bits_start - keyword->num;
12252 ids = (
ID *)&ISEQ_BODY(iseq)->local_table[i];
12256 for (i = 0; i <
len; i++) {
12260 goto default_values;
12263 keyword->required_num++;
12267 default_len =
len - i;
12268 if (default_len == 0) {
12269 keyword->table = ids;
12272 else if (default_len < 0) {
12278 for (j = 0; i <
len; i++, j++) {
12292 rb_raise(
rb_eTypeError,
"keyword default has unsupported len %+"PRIsVALUE, key);
12298 keyword->table = ids;
12299 keyword->default_values = dvs;
12305iseq_insn_each_object_mark_and_move(
VALUE * obj,
VALUE _)
12307 rb_gc_mark_and_move(obj);
12314 size_t size =
sizeof(
INSN);
12315 size_t align = ALIGNMENT_SIZE_OF(
INSN);
12316 unsigned int pos = 0;
12319 size_t padding = calc_padding((
void *)&storage->buff[pos], align);
12320 size_t offset = pos + size + padding;
12321 if (offset > storage->size || offset > storage->pos) {
12323 storage = storage->next;
12326 pos += (int)padding;
12328 iobj = (
INSN *)&storage->buff[pos];
12330 if (iobj->operands) {
12331 iseq_insn_each_markable_object(iobj, iseq_insn_each_object_mark_and_move, (
VALUE)0);
12351#define SYM(s) ID2SYM(rb_intern_const(#s))
12353 unsigned int arg_size, local_size, stack_max;
12355 struct st_table *labels_table = st_init_numtable();
12357 VALUE arg_opt_labels = rb_hash_aref(params, SYM(opt));
12358 VALUE keywords = rb_hash_aref(params, SYM(keyword));
12360 DECL_ANCHOR(anchor);
12361 INIT_ANCHOR(anchor);
12364 ISEQ_BODY(iseq)->local_table_size =
len;
12365 ISEQ_BODY(iseq)->local_table = tbl =
len > 0 ? (
ID *)
ALLOC_N(
ID, ISEQ_BODY(iseq)->local_table_size) : NULL;
12367 for (i = 0; i <
len; i++) {
12370 if (sym_arg_rest == lv) {
12378#define INT_PARAM(F) int_param(&ISEQ_BODY(iseq)->param.F, params, SYM(F))
12379 if (INT_PARAM(lead_num)) {
12380 ISEQ_BODY(iseq)->param.flags.has_lead = TRUE;
12382 if (INT_PARAM(post_num)) ISEQ_BODY(iseq)->param.flags.has_post = TRUE;
12383 if (INT_PARAM(post_start)) ISEQ_BODY(iseq)->param.flags.has_post = TRUE;
12384 if (INT_PARAM(rest_start)) ISEQ_BODY(iseq)->param.flags.has_rest = TRUE;
12385 if (INT_PARAM(block_start)) ISEQ_BODY(iseq)->param.flags.has_block = TRUE;
12388#define INT_PARAM(F) F = (int_param(&x, misc, SYM(F)) ? (unsigned int)x : 0)
12390 INT_PARAM(arg_size);
12391 INT_PARAM(local_size);
12392 INT_PARAM(stack_max);
12397#ifdef USE_ISEQ_NODE_ID
12398 node_ids = rb_hash_aref(misc,
ID2SYM(rb_intern(
"node_ids")));
12406 ISEQ_BODY(iseq)->param.flags.has_opt = !!(
len - 1 >= 0);
12408 if (ISEQ_BODY(iseq)->param.flags.has_opt) {
12411 for (i = 0; i <
len; i++) {
12413 LABEL *label = register_label(iseq, labels_table, ent);
12414 opt_table[i] = (
VALUE)label;
12417 ISEQ_BODY(iseq)->param.opt_num =
len - 1;
12418 ISEQ_BODY(iseq)->param.opt_table = opt_table;
12421 else if (!
NIL_P(arg_opt_labels)) {
12422 rb_raise(
rb_eTypeError,
":opt param is not an array: %+"PRIsVALUE,
12427 ISEQ_BODY(iseq)->param.keyword = iseq_build_kw(iseq, params, keywords);
12429 else if (!
NIL_P(keywords)) {
12430 rb_raise(
rb_eTypeError,
":keywords param is not an array: %+"PRIsVALUE,
12434 if (
Qtrue == rb_hash_aref(params, SYM(ambiguous_param0))) {
12435 ISEQ_BODY(iseq)->param.flags.ambiguous_param0 = TRUE;
12438 if (
Qtrue == rb_hash_aref(params, SYM(use_block))) {
12439 ISEQ_BODY(iseq)->param.flags.use_block = TRUE;
12442 if (int_param(&i, params, SYM(kwrest))) {
12443 struct rb_iseq_param_keyword *keyword = (
struct rb_iseq_param_keyword *)ISEQ_BODY(iseq)->param.keyword;
12444 if (keyword == NULL) {
12445 ISEQ_BODY(iseq)->param.keyword = keyword =
ZALLOC(
struct rb_iseq_param_keyword);
12447 keyword->rest_start = i;
12448 ISEQ_BODY(iseq)->param.flags.has_kwrest = TRUE;
12451 iseq_calc_param_size(iseq);
12454 iseq_build_from_ary_exception(iseq, labels_table, exception);
12457 iseq_build_from_ary_body(iseq, anchor, body, node_ids, labels_wrapper);
12459 ISEQ_BODY(iseq)->param.size = arg_size;
12460 ISEQ_BODY(iseq)->local_table_size = local_size;
12461 ISEQ_BODY(iseq)->stack_max = stack_max;
12471 while (body->type == ISEQ_TYPE_BLOCK ||
12472 body->type == ISEQ_TYPE_RESCUE ||
12473 body->type == ISEQ_TYPE_ENSURE ||
12474 body->type == ISEQ_TYPE_EVAL ||
12475 body->type == ISEQ_TYPE_MAIN
12479 for (i = 0; i < body->local_table_size; i++) {
12480 if (body->local_table[i] ==
id) {
12484 iseq = body->parent_iseq;
12485 body = ISEQ_BODY(iseq);
12498 for (i=0; i<body->local_table_size; i++) {
12499 if (body->local_table[i] ==
id) {
12509#ifndef IBF_ISEQ_DEBUG
12510#define IBF_ISEQ_DEBUG 0
12513#ifndef IBF_ISEQ_ENABLE_LOCAL_BUFFER
12514#define IBF_ISEQ_ENABLE_LOCAL_BUFFER 0
12517typedef uint32_t ibf_offset_t;
12518#define IBF_OFFSET(ptr) ((ibf_offset_t)(VALUE)(ptr))
12520#define IBF_MAJOR_VERSION ISEQ_MAJOR_VERSION
12522#define IBF_DEVEL_VERSION 5
12523#define IBF_MINOR_VERSION (ISEQ_MINOR_VERSION * 10000 + IBF_DEVEL_VERSION)
12525#define IBF_MINOR_VERSION ISEQ_MINOR_VERSION
12528static const char IBF_ENDIAN_MARK =
12529#ifdef WORDS_BIGENDIAN
12538 uint32_t major_version;
12539 uint32_t minor_version;
12541 uint32_t extra_size;
12543 uint32_t iseq_list_size;
12544 uint32_t global_object_list_size;
12545 ibf_offset_t iseq_list_offset;
12546 ibf_offset_t global_object_list_offset;
12567 unsigned int obj_list_size;
12568 ibf_offset_t obj_list_offset;
12587pinned_list_mark(
void *ptr)
12591 for (i = 0; i < list->size; i++) {
12592 if (list->buffer[i]) {
12593 rb_gc_mark(list->buffer[i]);
12609pinned_list_fetch(
VALUE list,
long offset)
12615 if (offset >= ptr->size) {
12616 rb_raise(
rb_eIndexError,
"object index out of range: %ld", offset);
12619 return ptr->buffer[offset];
12623pinned_list_store(
VALUE list,
long offset,
VALUE object)
12629 if (offset >= ptr->size) {
12630 rb_raise(
rb_eIndexError,
"object index out of range: %ld", offset);
12637pinned_list_new(
long size)
12639 size_t memsize = offsetof(
struct pinned_list, buffer) + size *
sizeof(
VALUE);
12640 VALUE obj_list = rb_data_typed_object_zalloc(0, memsize, &pinned_list_type);
12641 struct pinned_list * ptr = RTYPEDDATA_GET_DATA(obj_list);
12647ibf_dump_pos(
struct ibf_dump *dump)
12649 long pos = RSTRING_LEN(dump->current_buffer->str);
12650#if SIZEOF_LONG > SIZEOF_INT
12651 if (pos >= UINT_MAX) {
12655 return (
unsigned int)pos;
12659ibf_dump_align(
struct ibf_dump *dump,
size_t align)
12661 ibf_offset_t pos = ibf_dump_pos(dump);
12663 static const char padding[
sizeof(
VALUE)];
12664 size_t size = align - ((size_t)pos % align);
12665#if SIZEOF_LONG > SIZEOF_INT
12666 if (pos + size >= UINT_MAX) {
12670 for (; size >
sizeof(padding); size -=
sizeof(padding)) {
12671 rb_str_cat(dump->current_buffer->str, padding,
sizeof(padding));
12673 rb_str_cat(dump->current_buffer->str, padding, size);
12678ibf_dump_write(
struct ibf_dump *dump,
const void *buff,
unsigned long size)
12680 ibf_offset_t pos = ibf_dump_pos(dump);
12681#if SIZEOF_LONG > SIZEOF_INT
12683 if (size >= UINT_MAX || pos + size >= UINT_MAX) {
12687 rb_str_cat(dump->current_buffer->str, (
const char *)buff, size);
12692ibf_dump_write_byte(
struct ibf_dump *dump,
unsigned char byte)
12694 return ibf_dump_write(dump, &
byte,
sizeof(
unsigned char));
12698ibf_dump_overwrite(
struct ibf_dump *dump,
void *buff,
unsigned int size,
long offset)
12700 VALUE str = dump->current_buffer->str;
12701 char *ptr = RSTRING_PTR(str);
12702 if ((
unsigned long)(size + offset) > (
unsigned long)RSTRING_LEN(str))
12703 rb_bug(
"ibf_dump_overwrite: overflow");
12704 memcpy(ptr + offset, buff, size);
12708ibf_load_ptr(
const struct ibf_load *load, ibf_offset_t *offset,
int size)
12710 ibf_offset_t beg = *offset;
12712 return load->current_buffer->buff + beg;
12716ibf_load_alloc(
const struct ibf_load *load, ibf_offset_t offset,
size_t x,
size_t y)
12718 void *buff = ruby_xmalloc2(x, y);
12719 size_t size = x * y;
12720 memcpy(buff, load->current_buffer->buff + offset, size);
12724#define IBF_W_ALIGN(type) (RUBY_ALIGNOF(type) > 1 ? ibf_dump_align(dump, RUBY_ALIGNOF(type)) : (void)0)
12726#define IBF_W(b, type, n) (IBF_W_ALIGN(type), (type *)(VALUE)IBF_WP(b, type, n))
12727#define IBF_WV(variable) ibf_dump_write(dump, &(variable), sizeof(variable))
12728#define IBF_WP(b, type, n) ibf_dump_write(dump, (b), sizeof(type) * (n))
12729#define IBF_R(val, type, n) (type *)ibf_load_alloc(load, IBF_OFFSET(val), sizeof(type), (n))
12730#define IBF_ZERO(variable) memset(&(variable), 0, sizeof(variable))
12733ibf_table_lookup(
struct st_table *table, st_data_t key)
12737 if (st_lookup(table, key, &val)) {
12746ibf_table_find_or_insert(
struct st_table *table, st_data_t key)
12748 int index = ibf_table_lookup(table, key);
12751 index = (int)table->num_entries;
12752 st_insert(table, key, (st_data_t)index);
12760static void ibf_dump_object_list(
struct ibf_dump *dump, ibf_offset_t *obj_list_offset,
unsigned int *obj_list_size);
12766ibf_dump_object_table_new(
void)
12768 st_table *obj_table = st_init_numtable();
12769 st_insert(obj_table, (st_data_t)
Qnil, (st_data_t)0);
12777 return ibf_table_find_or_insert(dump->current_buffer->obj_table, (st_data_t)obj);
12783 if (
id == 0 || rb_id2name(
id) == NULL) {
12786 return ibf_dump_object(dump,
rb_id2sym(
id));
12790ibf_load_id(
const struct ibf_load *load,
const ID id_index)
12792 if (id_index == 0) {
12795 VALUE sym = ibf_load_object(load, id_index);
12805static ibf_offset_t ibf_dump_iseq_each(
struct ibf_dump *dump,
const rb_iseq_t *iseq);
12810 if (iseq == NULL) {
12814 return ibf_table_find_or_insert(dump->iseq_table, (st_data_t)iseq);
12818static unsigned char
12819ibf_load_byte(
const struct ibf_load *load, ibf_offset_t *offset)
12821 if (*offset >= load->current_buffer->size) { rb_raise(
rb_eRuntimeError,
"invalid bytecode"); }
12822 return (
unsigned char)load->current_buffer->buff[(*offset)++];
12838 if (
sizeof(
VALUE) > 8 || CHAR_BIT != 8) {
12839 ibf_dump_write(dump, &x,
sizeof(
VALUE));
12843 enum { max_byte_length =
sizeof(
VALUE) + 1 };
12845 unsigned char bytes[max_byte_length];
12848 for (n = 0; n <
sizeof(
VALUE) && (x >> (7 - n)); n++, x >>= 8) {
12849 bytes[max_byte_length - 1 - n] = (
unsigned char)x;
12855 bytes[max_byte_length - 1 - n] = (
unsigned char)x;
12858 ibf_dump_write(dump, bytes + max_byte_length - n, n);
12862ibf_load_small_value(
const struct ibf_load *load, ibf_offset_t *offset)
12864 if (
sizeof(
VALUE) > 8 || CHAR_BIT != 8) {
12865 union {
char s[
sizeof(
VALUE)];
VALUE v; } x;
12867 memcpy(x.s, load->current_buffer->buff + *offset,
sizeof(
VALUE));
12868 *offset +=
sizeof(
VALUE);
12873 enum { max_byte_length =
sizeof(
VALUE) + 1 };
12875 const unsigned char *buffer = (
const unsigned char *)load->current_buffer->buff;
12876 const unsigned char c = buffer[*offset];
12880 c == 0 ? 9 : ntz_int32(c) + 1;
12883 if (*offset + n > load->current_buffer->size) {
12888 for (i = 1; i < n; i++) {
12890 x |= (
VALUE)buffer[*offset + i];
12904 ibf_dump_write_small_value(dump, (
VALUE)bf->index);
12906 size_t len = strlen(bf->name);
12907 ibf_dump_write_small_value(dump, (
VALUE)
len);
12908 ibf_dump_write(dump, bf->name,
len);
12912ibf_load_builtin(
const struct ibf_load *load, ibf_offset_t *offset)
12914 int i = (int)ibf_load_small_value(load, offset);
12915 int len = (int)ibf_load_small_value(load, offset);
12916 const char *name = (
char *)ibf_load_ptr(load, offset,
len);
12919 fprintf(stderr,
"%.*s!!\n",
len, name);
12923 if (table == NULL) rb_raise(rb_eArgError,
"builtin function table is not provided");
12924 if (strncmp(table[i].name, name,
len) != 0) {
12925 rb_raise(rb_eArgError,
"builtin function index (%d) mismatch (expect %.*s but %s)",
12926 i,
len, name, table[i].name);
12937 const int iseq_size = body->iseq_size;
12939 const VALUE *orig_code = rb_iseq_original_iseq(iseq);
12941 ibf_offset_t offset = ibf_dump_pos(dump);
12943 for (code_index=0; code_index<iseq_size;) {
12944 const VALUE insn = orig_code[code_index++];
12945 const char *types = insn_op_types(insn);
12950 ibf_dump_write_small_value(dump, insn);
12953 for (op_index=0; types[op_index]; op_index++, code_index++) {
12954 VALUE op = orig_code[code_index];
12957 switch (types[op_index]) {
12960 wv = ibf_dump_object(dump, op);
12969 wv = ibf_dump_object(dump, arr);
12977 wv = is - ISEQ_IS_ENTRY_START(body, types[op_index]);
12985 wv = ibf_dump_id(dump, (
ID)op);
12997 ibf_dump_write_small_value(dump, wv);
13007cdhash_copy_i(st_data_t key, st_data_t val, st_data_t arg)
13010 return ST_CONTINUE;
13016 rb_hash_stlike_foreach(src, cdhash_copy_i, dest);
13021ibf_load_code(
const struct ibf_load *load,
rb_iseq_t *iseq, ibf_offset_t bytecode_offset, ibf_offset_t bytecode_size,
unsigned int iseq_size)
13024 unsigned int code_index;
13025 ibf_offset_t reading_pos = bytecode_offset;
13029 struct rb_call_data *cd_entries = load_body->call_data;
13032 load_body->iseq_encoded = code;
13033 load_body->iseq_size = iseq_size;
13035 iseq_bits_t * mark_offset_bits;
13036 if (ISEQ_MBITS_BUFLEN(iseq_size) == 1) {
13037 load_body->mark_bits.single = 0;
13038 mark_offset_bits = &load_body->mark_bits.single;
13041 load_body->mark_bits.list =
ZALLOC_N(iseq_bits_t, ISEQ_MBITS_BUFLEN(iseq_size));
13042 mark_offset_bits = load_body->mark_bits.list;
13044 bool needs_bitmap =
false;
13046 for (code_index=0; code_index<iseq_size;) {
13048 const VALUE insn = code[code_index] = ibf_load_small_value(load, &reading_pos);
13049 const char *types = insn_op_types(insn);
13055 for (op_index=0; types[op_index]; op_index++, code_index++) {
13056 const char operand_type = types[op_index];
13057 switch (operand_type) {
13060 VALUE op = ibf_load_small_value(load, &reading_pos);
13061 VALUE v = ibf_load_object(load, op);
13062 code[code_index] = v;
13065 ISEQ_MBITS_SET(mark_offset_bits, code_index);
13066 needs_bitmap =
true;
13072 VALUE op = ibf_load_small_value(load, &reading_pos);
13073 VALUE src = ibf_load_object(load, op);
13074 VALUE v = rb_hash_new_with_size_and_type(0,
RHASH_SIZE(src), &cdhash_type);
13075 rb_hash_rehash(cdhash_copy(v, src));
13076 RB_OBJ_SET_SHAREABLE(v);
13081 pinned_list_store(load->current_buffer->obj_list, (
long)op, v);
13083 code[code_index] = v;
13084 ISEQ_MBITS_SET(mark_offset_bits, code_index);
13086 needs_bitmap =
true;
13091 VALUE op = (
VALUE)ibf_load_small_value(load, &reading_pos);
13093 code[code_index] = v;
13096 ISEQ_MBITS_SET(mark_offset_bits, code_index);
13097 needs_bitmap =
true;
13103 VALUE op = ibf_load_small_value(load, &reading_pos);
13104 VALUE arr = ibf_load_object(load, op);
13106 IC ic = &ISEQ_IS_IC_ENTRY(load_body, ic_index++);
13107 ic->
segments = array_to_idlist(arr);
13109 code[code_index] = (
VALUE)ic;
13116 unsigned int op = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13118 ISE ic = ISEQ_IS_ENTRY_START(load_body, operand_type) + op;
13119 code[code_index] = (
VALUE)ic;
13121 if (operand_type == TS_IVC) {
13124 if (insn == BIN(setinstancevariable)) {
13125 ID iv_name = (
ID)code[code_index - 1];
13126 cache->iv_set_name = iv_name;
13127 cache->value = IVAR_CACHE_INIT;
13130 cache->iv_set_name = 0;
13131 cache->value = rb_getivar_cache_pack(ROOT_SHAPE_ID, ATTR_INDEX_NOT_SET);
13139 code[code_index] = (
VALUE)cd_entries++;
13144 VALUE op = ibf_load_small_value(load, &reading_pos);
13145 code[code_index] = ibf_load_id(load, (
ID)(
VALUE)op);
13152 code[code_index] = (
VALUE)ibf_load_builtin(load, &reading_pos);
13155 code[code_index] = ibf_load_small_value(load, &reading_pos);
13159 if (insn_len(insn) != op_index+1) {
13164 if (!needs_bitmap) {
13165 SIZED_FREE_N(load_body->mark_bits.list, ISEQ_MBITS_BUFLEN(iseq_size));
13166 load_body->mark_bits.list = NULL;
13170 RUBY_ASSERT(reading_pos == bytecode_offset + bytecode_size);
13177 int opt_num = ISEQ_BODY(iseq)->param.opt_num;
13180 IBF_W_ALIGN(
VALUE);
13181 return ibf_dump_write(dump, ISEQ_BODY(iseq)->param.opt_table,
sizeof(
VALUE) * (opt_num + 1));
13184 return ibf_dump_pos(dump);
13189ibf_load_param_opt_table(
const struct ibf_load *load, ibf_offset_t opt_table_offset,
int opt_num)
13193 MEMCPY(table, load->current_buffer->buff + opt_table_offset,
VALUE, opt_num+1);
13204 const struct rb_iseq_param_keyword *kw = ISEQ_BODY(iseq)->param.keyword;
13207 struct rb_iseq_param_keyword dump_kw = *kw;
13208 int dv_num = kw->num - kw->required_num;
13213 for (i=0; i<kw->num; i++) ids[i] = (
ID)ibf_dump_id(dump, kw->table[i]);
13214 for (i=0; i<dv_num; i++) dvs[i] = (
VALUE)ibf_dump_object(dump, kw->default_values[i]);
13216 dump_kw.table = IBF_W(ids,
ID, kw->num);
13217 dump_kw.default_values = IBF_W(dvs,
VALUE, dv_num);
13218 IBF_W_ALIGN(
struct rb_iseq_param_keyword);
13219 return ibf_dump_write(dump, &dump_kw,
sizeof(
struct rb_iseq_param_keyword) * 1);
13226static const struct rb_iseq_param_keyword *
13227ibf_load_param_keyword(
const struct ibf_load *load, ibf_offset_t param_keyword_offset)
13229 if (param_keyword_offset) {
13230 struct rb_iseq_param_keyword *kw = IBF_R(param_keyword_offset,
struct rb_iseq_param_keyword, 1);
13231 int dv_num = kw->num - kw->required_num;
13232 VALUE *dvs = dv_num ? IBF_R(kw->default_values,
VALUE, dv_num) : NULL;
13235 for (i=0; i<dv_num; i++) {
13236 dvs[i] = ibf_load_object(load, dvs[i]);
13242 kw->default_values = dvs;
13253 ibf_offset_t offset = ibf_dump_pos(dump);
13257 for (i = 0; i < ISEQ_BODY(iseq)->insns_info.size; i++) {
13258 ibf_dump_write_small_value(dump, entries[i].line_no);
13259#ifdef USE_ISEQ_NODE_ID
13260 ibf_dump_write_small_value(dump, entries[i].node_id);
13262 ibf_dump_write_small_value(dump, entries[i].events);
13269ibf_load_insns_info_body(
const struct ibf_load *load, ibf_offset_t body_offset,
unsigned int size)
13271 ibf_offset_t reading_pos = body_offset;
13275 for (i = 0; i < size; i++) {
13276 entries[i].line_no = (int)ibf_load_small_value(load, &reading_pos);
13277#ifdef USE_ISEQ_NODE_ID
13278 entries[i].node_id = (int)ibf_load_small_value(load, &reading_pos);
13280 entries[i].events = (
rb_event_flag_t)ibf_load_small_value(load, &reading_pos);
13287ibf_dump_insns_info_positions(
struct ibf_dump *dump,
const unsigned int *positions,
unsigned int size)
13289 ibf_offset_t offset = ibf_dump_pos(dump);
13291 unsigned int last = 0;
13293 for (i = 0; i < size; i++) {
13294 ibf_dump_write_small_value(dump, positions[i] - last);
13295 last = positions[i];
13301static unsigned int *
13302ibf_load_insns_info_positions(
const struct ibf_load *load, ibf_offset_t positions_offset,
unsigned int size)
13304 ibf_offset_t reading_pos = positions_offset;
13305 unsigned int *positions =
ALLOC_N(
unsigned int, size);
13307 unsigned int last = 0;
13309 for (i = 0; i < size; i++) {
13310 positions[i] = last + (
unsigned int)ibf_load_small_value(load, &reading_pos);
13311 last = positions[i];
13321 const int size = body->local_table_size;
13325 for (i=0; i<size; i++) {
13326 VALUE v = ibf_dump_id(dump, body->local_table[i]);
13329 v = ibf_dump_object(dump,
ULONG2NUM(body->local_table[i]));
13335 return ibf_dump_write(dump, table,
sizeof(
ID) * size);
13339ibf_load_local_table(
const struct ibf_load *load, ibf_offset_t local_table_offset,
int size)
13342 ID *table = IBF_R(local_table_offset,
ID, size);
13345 for (i=0; i<size; i++) {
13346 table[i] = ibf_load_id(load, table[i]);
13349 if (size == 1 && table[0] == idERROR_INFO) {
13350 ruby_xfree_sized(table,
sizeof(
ID) * size);
13351 return rb_iseq_shared_exc_local_tbl;
13366 const int size = body->local_table_size;
13367 IBF_W_ALIGN(
enum lvar_state);
13368 return ibf_dump_write(dump, body->lvar_states,
sizeof(
enum lvar_state) * (body->lvar_states ? size : 0));
13371static enum lvar_state *
13372ibf_load_lvar_states(
const struct ibf_load *load, ibf_offset_t lvar_states_offset,
int size,
const ID *local_table)
13374 if (local_table == rb_iseq_shared_exc_local_tbl ||
13379 enum lvar_state *states = IBF_R(lvar_states_offset,
enum lvar_state, size);
13390 int *iseq_indices =
ALLOCA_N(
int, table->size);
13393 for (i=0; i<table->size; i++) {
13394 iseq_indices[i] = ibf_dump_iseq(dump, table->entries[i].iseq);
13397 const ibf_offset_t offset = ibf_dump_pos(dump);
13399 for (i=0; i<table->size; i++) {
13400 ibf_dump_write_small_value(dump, iseq_indices[i]);
13401 ibf_dump_write_small_value(dump, table->entries[i].type);
13402 ibf_dump_write_small_value(dump, table->entries[i].start);
13403 ibf_dump_write_small_value(dump, table->entries[i].end);
13404 ibf_dump_write_small_value(dump, table->entries[i].cont);
13405 ibf_dump_write_small_value(dump, table->entries[i].sp);
13410 return ibf_dump_pos(dump);
13415ibf_load_catch_table(
const struct ibf_load *load, ibf_offset_t catch_table_offset,
unsigned int size,
const rb_iseq_t *parent_iseq)
13418 struct iseq_catch_table *table = ruby_xcalloc(1, iseq_catch_table_bytes(size));
13419 table->size = size;
13420 ISEQ_BODY(parent_iseq)->catch_table = table;
13422 ibf_offset_t reading_pos = catch_table_offset;
13425 for (i=0; i<table->size; i++) {
13426 int iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13427 table->entries[i].type = (
enum rb_catch_type)ibf_load_small_value(load, &reading_pos);
13428 table->entries[i].start = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13429 table->entries[i].end = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13430 table->entries[i].cont = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13431 table->entries[i].sp = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13434 RB_OBJ_WRITE(parent_iseq, UNALIGNED_MEMBER_PTR(&table->entries[i], iseq), catch_iseq);
13438 ISEQ_BODY(parent_iseq)->catch_table = NULL;
13446 const unsigned int ci_size = body->ci_size;
13449 ibf_offset_t offset = ibf_dump_pos(dump);
13453 for (i = 0; i < ci_size; i++) {
13456 ibf_dump_write_small_value(dump, ibf_dump_id(dump, vm_ci_mid(ci)));
13457 ibf_dump_write_small_value(dump, vm_ci_flag(ci));
13458 ibf_dump_write_small_value(dump, vm_ci_argc(ci));
13462 int len = kwarg->keyword_len;
13463 ibf_dump_write_small_value(dump,
len);
13464 for (
int j=0; j<
len; j++) {
13465 VALUE keyword = ibf_dump_object(dump, kwarg->keywords[j]);
13466 ibf_dump_write_small_value(dump, keyword);
13470 ibf_dump_write_small_value(dump, 0);
13475 ibf_dump_write_small_value(dump, (
VALUE)-1);
13493static enum rb_id_table_iterator_result
13494store_outer_variable(
ID id,
VALUE val,
void *dump)
13499 pair->name = rb_id2str(
id);
13501 return ID_TABLE_CONTINUE;
13505outer_variable_cmp(
const void *a,
const void *b,
void *arg)
13513 else if (!bp->name) {
13523 struct rb_id_table * ovs = ISEQ_BODY(iseq)->outer_variables;
13525 ibf_offset_t offset = ibf_dump_pos(dump);
13527 size_t size = ovs ? rb_id_table_size(ovs) : 0;
13528 ibf_dump_write_small_value(dump, (
VALUE)size);
13537 rb_id_table_foreach(ovs, store_outer_variable, ovlist);
13539 for (
size_t i = 0; i < size; ++i) {
13540 ID id = ovlist->pairs[i].id;
13541 ID val = ovlist->pairs[i].val;
13542 ibf_dump_write_small_value(dump, ibf_dump_id(dump,
id));
13543 ibf_dump_write_small_value(dump, val);
13552ibf_load_ci_entries(
const struct ibf_load *load,
13553 ibf_offset_t ci_entries_offset,
13554 unsigned int ci_size,
13562 ibf_offset_t reading_pos = ci_entries_offset;
13569 for (i = 0; i < ci_size; i++) {
13570 VALUE mid_index = ibf_load_small_value(load, &reading_pos);
13571 if (mid_index != (
VALUE)-1) {
13572 ID mid = ibf_load_id(load, mid_index);
13573 unsigned int flag = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13574 unsigned int argc = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13577 int kwlen = (int)ibf_load_small_value(load, &reading_pos);
13580 kwarg->references = 0;
13581 kwarg->keyword_len = kwlen;
13582 for (
int j=0; j<kwlen; j++) {
13583 VALUE keyword = ibf_load_small_value(load, &reading_pos);
13584 kwarg->keywords[j] = ibf_load_object(load, keyword);
13588 cds[i].ci = vm_ci_new(mid, flag, argc, kwarg);
13590 cds[i].cc = vm_cc_empty();
13601ibf_load_outer_variables(
const struct ibf_load * load, ibf_offset_t outer_variables_offset)
13603 ibf_offset_t reading_pos = outer_variables_offset;
13607 size_t table_size = (size_t)ibf_load_small_value(load, &reading_pos);
13609 if (table_size > 0) {
13610 tbl = rb_id_table_create(table_size);
13613 for (
size_t i = 0; i < table_size; i++) {
13614 ID key = ibf_load_id(load, (
ID)ibf_load_small_value(load, &reading_pos));
13615 VALUE value = ibf_load_small_value(load, &reading_pos);
13616 if (!key) key = rb_make_temporary_id(i);
13617 rb_id_table_insert(tbl, key, value);
13626 RUBY_ASSERT(dump->current_buffer == &dump->global_buffer);
13628 unsigned int *positions;
13632 const VALUE location_pathobj_index = ibf_dump_object(dump, body->location.pathobj);
13633 const VALUE location_base_label_index = ibf_dump_object(dump, body->location.base_label);
13634 const VALUE location_label_index = ibf_dump_object(dump, body->location.label);
13636#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13637 ibf_offset_t iseq_start = ibf_dump_pos(dump);
13642 buffer.obj_table = ibf_dump_object_table_new();
13643 dump->current_buffer = &buffer;
13646 const ibf_offset_t bytecode_offset = ibf_dump_code(dump, iseq);
13647 const ibf_offset_t bytecode_size = ibf_dump_pos(dump) - bytecode_offset;
13648 const ibf_offset_t param_opt_table_offset = ibf_dump_param_opt_table(dump, iseq);
13649 const ibf_offset_t param_keyword_offset = ibf_dump_param_keyword(dump, iseq);
13650 const ibf_offset_t insns_info_body_offset = ibf_dump_insns_info_body(dump, iseq);
13652 positions = rb_iseq_insns_info_decode_positions(ISEQ_BODY(iseq));
13653 const ibf_offset_t insns_info_positions_offset = ibf_dump_insns_info_positions(dump, positions, body->insns_info.size);
13654 SIZED_FREE_N(positions, ISEQ_BODY(iseq)->insns_info.size);
13656 const ibf_offset_t local_table_offset = ibf_dump_local_table(dump, iseq);
13657 const ibf_offset_t lvar_states_offset = ibf_dump_lvar_states(dump, iseq);
13658 const unsigned int catch_table_size = body->catch_table ? body->catch_table->size : 0;
13659 const ibf_offset_t catch_table_offset = ibf_dump_catch_table(dump, iseq);
13660 const int parent_iseq_index = ibf_dump_iseq(dump, ISEQ_BODY(iseq)->parent_iseq);
13661 const int local_iseq_index = ibf_dump_iseq(dump, ISEQ_BODY(iseq)->local_iseq);
13662 const int mandatory_only_iseq_index = ibf_dump_iseq(dump, ISEQ_BODY(iseq)->mandatory_only_iseq);
13663 const ibf_offset_t ci_entries_offset = ibf_dump_ci_entries(dump, iseq);
13664 const ibf_offset_t outer_variables_offset = ibf_dump_outer_variables(dump, iseq);
13666#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13667 ibf_offset_t local_obj_list_offset;
13668 unsigned int local_obj_list_size;
13670 ibf_dump_object_list(dump, &local_obj_list_offset, &local_obj_list_size);
13673 ibf_offset_t body_offset = ibf_dump_pos(dump);
13676 unsigned int param_flags =
13677 (body->param.flags.has_lead << 0) |
13678 (body->param.flags.has_opt << 1) |
13679 (body->param.flags.has_rest << 2) |
13680 (body->param.flags.has_post << 3) |
13681 (body->param.flags.has_kw << 4) |
13682 (body->param.flags.has_kwrest << 5) |
13683 (body->param.flags.has_block << 6) |
13684 (body->param.flags.ambiguous_param0 << 7) |
13685 (body->param.flags.accepts_no_kwarg << 8) |
13686 (body->param.flags.ruby2_keywords << 9) |
13687 (body->param.flags.anon_rest << 10) |
13688 (body->param.flags.anon_kwrest << 11) |
13689 (body->param.flags.use_block << 12) |
13690 (body->param.flags.forwardable << 13) |
13691 (body->param.flags.accepts_no_block << 14);
13693#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13694# define IBF_BODY_OFFSET(x) (x)
13696# define IBF_BODY_OFFSET(x) (body_offset - (x))
13699 ibf_dump_write_small_value(dump, body->type);
13700 ibf_dump_write_small_value(dump, body->iseq_size);
13701 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(bytecode_offset));
13702 ibf_dump_write_small_value(dump, bytecode_size);
13703 ibf_dump_write_small_value(dump, param_flags);
13704 ibf_dump_write_small_value(dump, body->param.size);
13705 ibf_dump_write_small_value(dump, body->param.lead_num);
13706 ibf_dump_write_small_value(dump, body->param.opt_num);
13707 ibf_dump_write_small_value(dump, body->param.rest_start);
13708 ibf_dump_write_small_value(dump, body->param.post_start);
13709 ibf_dump_write_small_value(dump, body->param.post_num);
13710 ibf_dump_write_small_value(dump, body->param.block_start);
13711 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(param_opt_table_offset));
13712 ibf_dump_write_small_value(dump, param_keyword_offset);
13713 ibf_dump_write_small_value(dump, location_pathobj_index);
13714 ibf_dump_write_small_value(dump, location_base_label_index);
13715 ibf_dump_write_small_value(dump, location_label_index);
13716 ibf_dump_write_small_value(dump, body->location.first_lineno);
13717 ibf_dump_write_small_value(dump, body->location.node_id);
13718 ibf_dump_write_small_value(dump, body->location.code_location.beg_pos.lineno);
13719 ibf_dump_write_small_value(dump, body->location.code_location.beg_pos.column);
13720 ibf_dump_write_small_value(dump, body->location.code_location.end_pos.lineno);
13721 ibf_dump_write_small_value(dump, body->location.code_location.end_pos.column);
13722 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(insns_info_body_offset));
13723 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(insns_info_positions_offset));
13724 ibf_dump_write_small_value(dump, body->insns_info.size);
13725 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(local_table_offset));
13726 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(lvar_states_offset));
13727 ibf_dump_write_small_value(dump, catch_table_size);
13728 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(catch_table_offset));
13729 ibf_dump_write_small_value(dump, parent_iseq_index);
13730 ibf_dump_write_small_value(dump, local_iseq_index);
13731 ibf_dump_write_small_value(dump, mandatory_only_iseq_index);
13732 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(ci_entries_offset));
13733 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(outer_variables_offset));
13734 ibf_dump_write_small_value(dump, body->variable.flip_count);
13735 ibf_dump_write_small_value(dump, body->local_table_size);
13736 ibf_dump_write_small_value(dump, body->ivc_size);
13737 ibf_dump_write_small_value(dump, body->icvarc_size);
13738 ibf_dump_write_small_value(dump, body->ise_size);
13739 ibf_dump_write_small_value(dump, body->ic_size);
13740 ibf_dump_write_small_value(dump, body->ci_size);
13741 ibf_dump_write_small_value(dump, body->stack_max);
13742 ibf_dump_write_small_value(dump, body->builtin_attrs);
13743 ibf_dump_write_small_value(dump, body->prism ? 1 : 0);
13745#undef IBF_BODY_OFFSET
13747#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13748 ibf_offset_t iseq_length_bytes = ibf_dump_pos(dump);
13750 dump->current_buffer = saved_buffer;
13751 ibf_dump_write(dump, RSTRING_PTR(buffer.str), iseq_length_bytes);
13753 ibf_offset_t offset = ibf_dump_pos(dump);
13754 ibf_dump_write_small_value(dump, iseq_start);
13755 ibf_dump_write_small_value(dump, iseq_length_bytes);
13756 ibf_dump_write_small_value(dump, body_offset);
13758 ibf_dump_write_small_value(dump, local_obj_list_offset);
13759 ibf_dump_write_small_value(dump, local_obj_list_size);
13761 st_free_table(buffer.obj_table);
13765 return body_offset;
13770ibf_load_location_str(
const struct ibf_load *load,
VALUE str_index)
13772 VALUE str = ibf_load_object(load, str_index);
13774 str = rb_fstring(str);
13784 ibf_offset_t reading_pos = offset;
13786#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13788 load->current_buffer = &load->global_buffer;
13790 const ibf_offset_t iseq_start = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13791 const ibf_offset_t iseq_length_bytes = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13792 const ibf_offset_t body_offset = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13795 buffer.buff = load->global_buffer.buff + iseq_start;
13796 buffer.size = iseq_length_bytes;
13797 buffer.obj_list_offset = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13798 buffer.obj_list_size = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13799 buffer.obj_list = pinned_list_new(buffer.obj_list_size);
13801 load->current_buffer = &buffer;
13802 reading_pos = body_offset;
13805#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13806# define IBF_BODY_OFFSET(x) (x)
13808# define IBF_BODY_OFFSET(x) (offset - (x))
13811 const unsigned int type = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13812 const unsigned int iseq_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13813 const ibf_offset_t bytecode_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13814 const ibf_offset_t bytecode_size = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13815 const unsigned int param_flags = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13816 const unsigned int param_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13817 const int param_lead_num = (int)ibf_load_small_value(load, &reading_pos);
13818 const int param_opt_num = (int)ibf_load_small_value(load, &reading_pos);
13819 const int param_rest_start = (int)ibf_load_small_value(load, &reading_pos);
13820 const int param_post_start = (int)ibf_load_small_value(load, &reading_pos);
13821 const int param_post_num = (int)ibf_load_small_value(load, &reading_pos);
13822 const int param_block_start = (int)ibf_load_small_value(load, &reading_pos);
13823 const ibf_offset_t param_opt_table_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13824 const ibf_offset_t param_keyword_offset = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13825 const VALUE location_pathobj_index = ibf_load_small_value(load, &reading_pos);
13826 const VALUE location_base_label_index = ibf_load_small_value(load, &reading_pos);
13827 const VALUE location_label_index = ibf_load_small_value(load, &reading_pos);
13828 const int location_first_lineno = (int)ibf_load_small_value(load, &reading_pos);
13829 const int location_node_id = (int)ibf_load_small_value(load, &reading_pos);
13830 const int location_code_location_beg_pos_lineno = (int)ibf_load_small_value(load, &reading_pos);
13831 const int location_code_location_beg_pos_column = (int)ibf_load_small_value(load, &reading_pos);
13832 const int location_code_location_end_pos_lineno = (int)ibf_load_small_value(load, &reading_pos);
13833 const int location_code_location_end_pos_column = (int)ibf_load_small_value(load, &reading_pos);
13834 const ibf_offset_t insns_info_body_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13835 const ibf_offset_t insns_info_positions_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13836 const unsigned int insns_info_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13837 const ibf_offset_t local_table_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13838 const ibf_offset_t lvar_states_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13839 const unsigned int catch_table_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13840 const ibf_offset_t catch_table_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13841 const int parent_iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13842 const int local_iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13843 const int mandatory_only_iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13844 const ibf_offset_t ci_entries_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13845 const ibf_offset_t outer_variables_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13846 const rb_snum_t variable_flip_count = (rb_snum_t)ibf_load_small_value(load, &reading_pos);
13847 const unsigned int local_table_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13849 const unsigned int ivc_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13850 const unsigned int icvarc_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13851 const unsigned int ise_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13852 const unsigned int ic_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13854 const unsigned int ci_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13855 const unsigned int stack_max = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13856 const unsigned int builtin_attrs = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13857 const bool prism = (bool)ibf_load_small_value(load, &reading_pos);
13860 VALUE path = ibf_load_object(load, location_pathobj_index);
13865 realpath = path = rb_fstring(path);
13868 VALUE pathobj = path;
13874 if (!
NIL_P(realpath)) {
13876 rb_raise(rb_eArgError,
"unexpected realpath %"PRIxVALUE
13877 "(%x), path=%+"PRIsVALUE,
13878 realpath,
TYPE(realpath), path);
13880 realpath = rb_fstring(realpath);
13886 rb_iseq_pathobj_set(iseq, path, realpath);
13891 VALUE dummy_frame = rb_vm_push_frame_fname(ec, path);
13893#undef IBF_BODY_OFFSET
13895 load_body->type =
type;
13896 load_body->stack_max = stack_max;
13897 load_body->param.flags.has_lead = (param_flags >> 0) & 1;
13898 load_body->param.flags.has_opt = (param_flags >> 1) & 1;
13899 load_body->param.flags.has_rest = (param_flags >> 2) & 1;
13900 load_body->param.flags.has_post = (param_flags >> 3) & 1;
13901 load_body->param.flags.has_kw = FALSE;
13902 load_body->param.flags.has_kwrest = (param_flags >> 5) & 1;
13903 load_body->param.flags.has_block = (param_flags >> 6) & 1;
13904 load_body->param.flags.ambiguous_param0 = (param_flags >> 7) & 1;
13905 load_body->param.flags.accepts_no_kwarg = (param_flags >> 8) & 1;
13906 load_body->param.flags.ruby2_keywords = (param_flags >> 9) & 1;
13907 load_body->param.flags.anon_rest = (param_flags >> 10) & 1;
13908 load_body->param.flags.anon_kwrest = (param_flags >> 11) & 1;
13909 load_body->param.flags.use_block = (param_flags >> 12) & 1;
13910 load_body->param.flags.forwardable = (param_flags >> 13) & 1;
13911 load_body->param.flags.accepts_no_block = (param_flags >> 14) & 1;
13912 load_body->param.size = param_size;
13913 load_body->param.lead_num = param_lead_num;
13914 load_body->param.opt_num = param_opt_num;
13915 load_body->param.rest_start = param_rest_start;
13916 load_body->param.post_start = param_post_start;
13917 load_body->param.post_num = param_post_num;
13918 load_body->param.block_start = param_block_start;
13919 load_body->local_table_size = local_table_size;
13920 load_body->ci_size = ci_size;
13921 load_body->insns_info.size = insns_info_size;
13923 ISEQ_COVERAGE_SET(iseq,
Qnil);
13924 ISEQ_ORIGINAL_ISEQ_CLEAR(iseq);
13925 load_body->variable.flip_count = variable_flip_count;
13926 load_body->variable.script_lines =
Qnil;
13928 load_body->location.first_lineno = location_first_lineno;
13929 load_body->location.node_id = location_node_id;
13930 load_body->location.code_location.beg_pos.lineno = location_code_location_beg_pos_lineno;
13931 load_body->location.code_location.beg_pos.column = location_code_location_beg_pos_column;
13932 load_body->location.code_location.end_pos.lineno = location_code_location_end_pos_lineno;
13933 load_body->location.code_location.end_pos.column = location_code_location_end_pos_column;
13934 load_body->builtin_attrs = builtin_attrs;
13935 load_body->prism = prism;
13937 load_body->ivc_size = ivc_size;
13938 load_body->icvarc_size = icvarc_size;
13939 load_body->ise_size = ise_size;
13940 load_body->ic_size = ic_size;
13942 if (ISEQ_IS_SIZE(load_body)) {
13946 load_body->is_entries = NULL;
13948 ibf_load_ci_entries(load, ci_entries_offset, ci_size, &load_body->call_data);
13949 load_body->outer_variables = ibf_load_outer_variables(load, outer_variables_offset);
13950 load_body->param.opt_table = ibf_load_param_opt_table(load, param_opt_table_offset, param_opt_num);
13951 load_body->param.keyword = ibf_load_param_keyword(load, param_keyword_offset);
13952 load_body->param.flags.has_kw = (param_flags >> 4) & 1;
13953 load_body->insns_info.body = ibf_load_insns_info_body(load, insns_info_body_offset, insns_info_size);
13954 load_body->insns_info.positions = ibf_load_insns_info_positions(load, insns_info_positions_offset, insns_info_size);
13955 load_body->local_table = ibf_load_local_table(load, local_table_offset, local_table_size);
13956 load_body->lvar_states = ibf_load_lvar_states(load, lvar_states_offset, local_table_size, load_body->local_table);
13957 ibf_load_catch_table(load, catch_table_offset, catch_table_size, iseq);
13961 const rb_iseq_t *mandatory_only_iseq = ibf_load_iseq(load, (
const rb_iseq_t *)(
VALUE)mandatory_only_iseq_index);
13963 RB_OBJ_WRITE(iseq, &load_body->parent_iseq, parent_iseq);
13964 RB_OBJ_WRITE(iseq, &load_body->local_iseq, local_iseq);
13965 RB_OBJ_WRITE(iseq, &load_body->mandatory_only_iseq, mandatory_only_iseq);
13968 if (load_body->param.keyword != NULL) {
13970 struct rb_iseq_param_keyword *keyword = (
struct rb_iseq_param_keyword *) load_body->param.keyword;
13971 keyword->table = &load_body->local_table[keyword->bits_start - keyword->num];
13974 ibf_load_code(load, iseq, bytecode_offset, bytecode_size, iseq_size);
13975#if VM_INSN_INFO_TABLE_IMPL == 2
13976 rb_iseq_insns_info_encode_positions(iseq);
13979 rb_iseq_translate_threaded_code(iseq);
13981#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13982 load->current_buffer = &load->global_buffer;
13985 RB_OBJ_WRITE(iseq, &load_body->location.base_label, ibf_load_location_str(load, location_base_label_index));
13986 RB_OBJ_WRITE(iseq, &load_body->location.label, ibf_load_location_str(load, location_label_index));
13988#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13989 load->current_buffer = saved_buffer;
13991 verify_call_cache(iseq);
13994 rb_vm_pop_frame_no_int(ec);
14004ibf_dump_iseq_list_i(st_data_t key, st_data_t val, st_data_t ptr)
14009 ibf_offset_t offset = ibf_dump_iseq_each(args->dump, iseq);
14012 return ST_CONTINUE;
14022 args.offset_list = offset_list;
14024 st_foreach(dump->iseq_table, ibf_dump_iseq_list_i, (st_data_t)&args);
14027 st_index_t size = dump->iseq_table->num_entries;
14028 ibf_offset_t *offsets =
ALLOCA_N(ibf_offset_t, size);
14030 for (i = 0; i < size; i++) {
14034 ibf_dump_align(dump,
sizeof(ibf_offset_t));
14035 header->iseq_list_offset = ibf_dump_write(dump, offsets,
sizeof(ibf_offset_t) * size);
14036 header->iseq_list_size = (
unsigned int)size;
14046 unsigned int type: 5;
14047 unsigned int special_const: 1;
14048 unsigned int frozen: 1;
14049 unsigned int internal: 1;
14052enum ibf_object_class_index {
14053 IBF_OBJECT_CLASS_OBJECT,
14054 IBF_OBJECT_CLASS_ARRAY,
14055 IBF_OBJECT_CLASS_STANDARD_ERROR,
14056 IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR,
14057 IBF_OBJECT_CLASS_TYPE_ERROR,
14058 IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_KEY_ERROR,
14068 long keyval[FLEX_ARY_LEN];
14081 BDIGIT digits[FLEX_ARY_LEN];
14084enum ibf_object_data_type {
14085 IBF_OBJECT_DATA_ENCODING,
14096#define IBF_ALIGNED_OFFSET(align, offset) \
14097 ((((offset) - 1) / (align) + 1) * (align))
14102#define IBF_OBJBODY(type, offset) \
14103 ibf_load_check_offset(load, IBF_ALIGNED_OFFSET(RUBY_ALIGNOF(type), offset))
14106ibf_load_check_offset(
const struct ibf_load *load,
size_t offset)
14108 if (offset >= load->current_buffer->size) {
14109 rb_raise(
rb_eIndexError,
"object offset out of range: %"PRIdSIZE, offset);
14111 return load->current_buffer->buff + offset;
14114NORETURN(
static void ibf_dump_object_unsupported(
struct ibf_dump *dump,
VALUE obj));
14117ibf_dump_object_unsupported(
struct ibf_dump *dump,
VALUE obj)
14120 rb_raw_obj_info(buff,
sizeof(buff), obj);
14129 rb_raise(rb_eArgError,
"unsupported");
14136 enum ibf_object_class_index cindex;
14138 cindex = IBF_OBJECT_CLASS_OBJECT;
14141 cindex = IBF_OBJECT_CLASS_ARRAY;
14144 cindex = IBF_OBJECT_CLASS_STANDARD_ERROR;
14147 cindex = IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR;
14150 cindex = IBF_OBJECT_CLASS_TYPE_ERROR;
14153 cindex = IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_KEY_ERROR;
14156 rb_obj_info_dump(obj);
14158 rb_bug(
"unsupported class");
14160 ibf_dump_write_small_value(dump, (
VALUE)cindex);
14166 enum ibf_object_class_index cindex = (
enum ibf_object_class_index)ibf_load_small_value(load, &offset);
14169 case IBF_OBJECT_CLASS_OBJECT:
14171 case IBF_OBJECT_CLASS_ARRAY:
14173 case IBF_OBJECT_CLASS_STANDARD_ERROR:
14175 case IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR:
14177 case IBF_OBJECT_CLASS_TYPE_ERROR:
14179 case IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_KEY_ERROR:
14183 rb_raise(rb_eArgError,
"ibf_load_object_class: unknown class (%d)", (
int)cindex);
14191 (void)IBF_W(&dbl,
double, 1);
14199 memcpy(&d, IBF_OBJBODY(
double, offset),
sizeof(d));
14201 if (!
FLONUM_P(f)) RB_OBJ_SET_SHAREABLE(f);
14208 long encindex = (long)rb_enc_get_index(obj);
14209 long len = RSTRING_LEN(obj);
14210 const char *ptr = RSTRING_PTR(obj);
14212 if (encindex > RUBY_ENCINDEX_BUILTIN_MAX) {
14213 rb_encoding *enc = rb_enc_from_index((
int)encindex);
14214 const char *enc_name = rb_enc_name(enc);
14215 encindex = RUBY_ENCINDEX_BUILTIN_MAX + ibf_dump_object(dump,
rb_str_new2(enc_name));
14218 ibf_dump_write_small_value(dump, encindex);
14219 ibf_dump_write_small_value(dump,
len);
14220 IBF_WP(ptr,
char,
len);
14226 ibf_offset_t reading_pos = offset;
14228 int encindex = (int)ibf_load_small_value(load, &reading_pos);
14229 const long len = (long)ibf_load_small_value(load, &reading_pos);
14230 const char *ptr = load->current_buffer->buff + reading_pos;
14232 if (encindex > RUBY_ENCINDEX_BUILTIN_MAX) {
14233 VALUE enc_name_str = ibf_load_object(load, encindex - RUBY_ENCINDEX_BUILTIN_MAX);
14234 encindex = rb_enc_find_index(RSTRING_PTR(enc_name_str));
14238 if (header->frozen && !header->internal) {
14239 str = rb_enc_literal_str(ptr,
len, rb_enc_from_index(encindex));
14242 str = rb_enc_str_new(ptr,
len, rb_enc_from_index(encindex));
14245 if (header->frozen) str = rb_fstring(str);
14256 regexp.srcstr = (long)ibf_dump_object(dump, srcstr);
14258 ibf_dump_write_byte(dump, (
unsigned char)regexp.option);
14259 ibf_dump_write_small_value(dump, regexp.srcstr);
14266 regexp.option = ibf_load_byte(load, &offset);
14267 regexp.srcstr = ibf_load_small_value(load, &offset);
14269 VALUE srcstr = ibf_load_object(load, regexp.srcstr);
14270 VALUE reg = rb_reg_compile(srcstr, (
int)regexp.option, NULL, 0);
14273 if (header->frozen) RB_OBJ_SET_SHAREABLE(
rb_obj_freeze(reg));
14282 ibf_dump_write_small_value(dump,
len);
14283 for (i=0; i<
len; i++) {
14284 long index = (long)ibf_dump_object(dump,
RARRAY_AREF(obj, i));
14285 ibf_dump_write_small_value(dump, index);
14292 ibf_offset_t reading_pos = offset;
14294 const long len = (long)ibf_load_small_value(load, &reading_pos);
14299 for (i=0; i<
len; i++) {
14300 const VALUE index = ibf_load_small_value(load, &reading_pos);
14304 if (header->frozen) {
14313ibf_dump_object_hash_i(st_data_t key, st_data_t val, st_data_t ptr)
14317 VALUE key_index = ibf_dump_object(dump, (
VALUE)key);
14318 VALUE val_index = ibf_dump_object(dump, (
VALUE)val);
14320 ibf_dump_write_small_value(dump, key_index);
14321 ibf_dump_write_small_value(dump, val_index);
14322 return ST_CONTINUE;
14329 ibf_dump_write_small_value(dump, (
VALUE)
len);
14337 long len = (long)ibf_load_small_value(load, &offset);
14338 VALUE obj = header->frozen ? rb_hash_alloc_fixed_size(
rb_cHash,
len) : rb_hash_new_with_size(
len);
14341 for (i = 0; i <
len; i++) {
14342 VALUE key_index = ibf_load_small_value(load, &offset);
14343 VALUE val_index = ibf_load_small_value(load, &offset);
14345 VALUE key = ibf_load_object(load, key_index);
14346 VALUE val = ibf_load_object(load, val_index);
14347 rb_hash_aset(obj, key, val);
14351 if (header->frozen) {
14352 RB_OBJ_SET_FROZEN_SHAREABLE(obj);
14366 range.class_index = 0;
14369 range.beg = (long)ibf_dump_object(dump, beg);
14370 range.end = (long)ibf_dump_object(dump, end);
14376 rb_raise(
rb_eNotImpError,
"ibf_dump_object_struct: unsupported class %"PRIsVALUE,
14385 VALUE beg = ibf_load_object(load, range->beg);
14386 VALUE end = ibf_load_object(load, range->end);
14389 if (header->frozen) RB_OBJ_SET_FROZEN_SHAREABLE(obj);
14396 ssize_t
len = BIGNUM_LEN(obj);
14397 ssize_t slen = BIGNUM_SIGN(obj) > 0 ?
len :
len * -1;
14398 BDIGIT *d = BIGNUM_DIGITS(obj);
14400 (void)IBF_W(&slen, ssize_t, 1);
14401 IBF_WP(d, BDIGIT,
len);
14408 int sign = bignum->slen > 0;
14409 ssize_t
len = sign > 0 ? bignum->slen : -1 * bignum->slen;
14410 const int big_unpack_flags =
14413 VALUE obj = rb_integer_unpack(bignum->digits,
len,
sizeof(BDIGIT), 0,
14417 if (header->frozen) RB_OBJ_SET_FROZEN_SHAREABLE(obj);
14424 if (rb_data_is_encoding(obj)) {
14426 const char *name = rb_enc_name(enc);
14427 long len = strlen(name) + 1;
14429 data[0] = IBF_OBJECT_DATA_ENCODING;
14431 (void)IBF_W(data,
long, 2);
14432 IBF_WP(name,
char,
len);
14435 ibf_dump_object_unsupported(dump, obj);
14442 const long *body = IBF_OBJBODY(
long, offset);
14443 const enum ibf_object_data_type
type = (
enum ibf_object_data_type)body[0];
14445 const char *data = (
const char *)&body[2];
14448 case IBF_OBJECT_DATA_ENCODING:
14450 VALUE encobj = rb_enc_from_encoding(rb_enc_find(data));
14455 return ibf_load_object_unsupported(load, header, offset);
14459ibf_dump_object_complex_rational(
struct ibf_dump *dump,
VALUE obj)
14462 data[0] = (long)ibf_dump_object(dump, RCOMPLEX(obj)->real);
14463 data[1] = (long)ibf_dump_object(dump, RCOMPLEX(obj)->imag);
14465 (void)IBF_W(data,
long, 2);
14469ibf_load_object_complex_rational(
const struct ibf_load *load,
const struct ibf_object_header *header, ibf_offset_t offset)
14472 VALUE a = ibf_load_object(load, nums->a);
14473 VALUE b = ibf_load_object(load, nums->b);
14485 ibf_dump_object_string(dump,
rb_sym2str(obj));
14491 ibf_offset_t reading_pos = offset;
14493 int encindex = (int)ibf_load_small_value(load, &reading_pos);
14494 const long len = (long)ibf_load_small_value(load, &reading_pos);
14495 const char *ptr = load->current_buffer->buff + reading_pos;
14497 if (encindex > RUBY_ENCINDEX_BUILTIN_MAX) {
14498 VALUE enc_name_str = ibf_load_object(load, encindex - RUBY_ENCINDEX_BUILTIN_MAX);
14499 encindex = rb_enc_find_index(RSTRING_PTR(enc_name_str));
14502 ID id = rb_intern3(ptr,
len, rb_enc_from_index(encindex));
14506typedef void (*ibf_dump_object_function)(
struct ibf_dump *dump,
VALUE obj);
14507static const ibf_dump_object_function dump_object_functions[
RUBY_T_MASK+1] = {
14508 ibf_dump_object_unsupported,
14509 ibf_dump_object_unsupported,
14510 ibf_dump_object_class,
14511 ibf_dump_object_unsupported,
14512 ibf_dump_object_float,
14513 ibf_dump_object_string,
14514 ibf_dump_object_regexp,
14515 ibf_dump_object_array,
14516 ibf_dump_object_hash,
14517 ibf_dump_object_struct,
14518 ibf_dump_object_bignum,
14519 ibf_dump_object_unsupported,
14520 ibf_dump_object_data,
14521 ibf_dump_object_unsupported,
14522 ibf_dump_object_complex_rational,
14523 ibf_dump_object_complex_rational,
14524 ibf_dump_object_unsupported,
14525 ibf_dump_object_unsupported,
14526 ibf_dump_object_unsupported,
14527 ibf_dump_object_unsupported,
14528 ibf_dump_object_symbol,
14529 ibf_dump_object_unsupported,
14530 ibf_dump_object_unsupported,
14531 ibf_dump_object_unsupported,
14532 ibf_dump_object_unsupported,
14533 ibf_dump_object_unsupported,
14534 ibf_dump_object_unsupported,
14535 ibf_dump_object_unsupported,
14536 ibf_dump_object_unsupported,
14537 ibf_dump_object_unsupported,
14538 ibf_dump_object_unsupported,
14539 ibf_dump_object_unsupported,
14545 unsigned char byte =
14546 (header.type << 0) |
14547 (header.special_const << 5) |
14548 (header.frozen << 6) |
14549 (header.internal << 7);
14555ibf_load_object_object_header(const struct
ibf_load *load, ibf_offset_t *offset)
14557 unsigned char byte = ibf_load_byte(load, offset);
14560 header.type = (
byte >> 0) & 0x1f;
14561 header.special_const = (
byte >> 5) & 0x01;
14562 header.frozen = (
byte >> 6) & 0x01;
14563 header.internal = (
byte >> 7) & 0x01;
14572 ibf_offset_t current_offset;
14573 IBF_ZERO(obj_header);
14574 obj_header.type =
TYPE(obj);
14576 IBF_W_ALIGN(ibf_offset_t);
14577 current_offset = ibf_dump_pos(dump);
14582 obj_header.special_const = TRUE;
14583 obj_header.frozen = TRUE;
14584 obj_header.internal = TRUE;
14585 ibf_dump_object_object_header(dump, obj_header);
14586 ibf_dump_write_small_value(dump, obj);
14590 obj_header.special_const = FALSE;
14591 obj_header.frozen =
OBJ_FROZEN(obj) ? TRUE : FALSE;
14592 ibf_dump_object_object_header(dump, obj_header);
14593 (*dump_object_functions[obj_header.type])(dump, obj);
14596 return current_offset;
14600static const ibf_load_object_function load_object_functions[
RUBY_T_MASK+1] = {
14601 ibf_load_object_unsupported,
14602 ibf_load_object_unsupported,
14603 ibf_load_object_class,
14604 ibf_load_object_unsupported,
14605 ibf_load_object_float,
14606 ibf_load_object_string,
14607 ibf_load_object_regexp,
14608 ibf_load_object_array,
14609 ibf_load_object_hash,
14610 ibf_load_object_struct,
14611 ibf_load_object_bignum,
14612 ibf_load_object_unsupported,
14613 ibf_load_object_data,
14614 ibf_load_object_unsupported,
14615 ibf_load_object_complex_rational,
14616 ibf_load_object_complex_rational,
14617 ibf_load_object_unsupported,
14618 ibf_load_object_unsupported,
14619 ibf_load_object_unsupported,
14620 ibf_load_object_unsupported,
14621 ibf_load_object_symbol,
14622 ibf_load_object_unsupported,
14623 ibf_load_object_unsupported,
14624 ibf_load_object_unsupported,
14625 ibf_load_object_unsupported,
14626 ibf_load_object_unsupported,
14627 ibf_load_object_unsupported,
14628 ibf_load_object_unsupported,
14629 ibf_load_object_unsupported,
14630 ibf_load_object_unsupported,
14631 ibf_load_object_unsupported,
14632 ibf_load_object_unsupported,
14636ibf_load_object(
const struct ibf_load *load,
VALUE object_index)
14638 if (object_index == 0) {
14642 VALUE obj = pinned_list_fetch(load->current_buffer->obj_list, (
long)object_index);
14644 ibf_offset_t *offsets = (ibf_offset_t *)(load->current_buffer->obj_list_offset + load->current_buffer->buff);
14645 ibf_offset_t offset = offsets[object_index];
14646 const struct ibf_object_header header = ibf_load_object_object_header(load, &offset);
14649 fprintf(stderr,
"ibf_load_object: list=%#x offsets=%p offset=%#x\n",
14650 load->current_buffer->obj_list_offset, (
void *)offsets, offset);
14651 fprintf(stderr,
"ibf_load_object: type=%#x special=%d frozen=%d internal=%d\n",
14652 header.type, header.special_const, header.frozen, header.internal);
14654 if (offset >= load->current_buffer->size) {
14655 rb_raise(
rb_eIndexError,
"object offset out of range: %u", offset);
14658 if (header.special_const) {
14659 ibf_offset_t reading_pos = offset;
14661 obj = ibf_load_small_value(load, &reading_pos);
14664 obj = (*load_object_functions[header.type])(load, &header, offset);
14667 pinned_list_store(load->current_buffer->obj_list, (
long)object_index, obj);
14670 fprintf(stderr,
"ibf_load_object: index=%#"PRIxVALUE
" obj=%#"PRIxVALUE
"\n",
14671 object_index, obj);
14684ibf_dump_object_list_i(st_data_t key, st_data_t val, st_data_t ptr)
14689 ibf_offset_t offset = ibf_dump_object_object(args->dump, obj);
14692 return ST_CONTINUE;
14696ibf_dump_object_list(
struct ibf_dump *dump, ibf_offset_t *obj_list_offset,
unsigned int *obj_list_size)
14698 st_table *obj_table = dump->current_buffer->obj_table;
14703 args.offset_list = offset_list;
14705 st_foreach(obj_table, ibf_dump_object_list_i, (st_data_t)&args);
14707 IBF_W_ALIGN(ibf_offset_t);
14708 *obj_list_offset = ibf_dump_pos(dump);
14710 st_index_t size = obj_table->num_entries;
14713 for (i=0; i<size; i++) {
14718 *obj_list_size = (
unsigned int)size;
14722ibf_dump_mark(
void *ptr)
14725 rb_gc_mark(dump->global_buffer.str);
14727 rb_mark_set(dump->global_buffer.obj_table);
14728 rb_mark_set(dump->iseq_table);
14732ibf_dump_free(
void *ptr)
14735 if (dump->global_buffer.obj_table) {
14736 st_free_table(dump->global_buffer.obj_table);
14737 dump->global_buffer.obj_table = 0;
14739 if (dump->iseq_table) {
14740 st_free_table(dump->iseq_table);
14741 dump->iseq_table = 0;
14746ibf_dump_memsize(
const void *ptr)
14750 if (dump->iseq_table) size += st_memsize(dump->iseq_table);
14751 if (dump->global_buffer.obj_table) size += st_memsize(dump->global_buffer.obj_table);
14757 {ibf_dump_mark, ibf_dump_free, ibf_dump_memsize,},
14764 dump->global_buffer.obj_table = NULL;
14765 dump->iseq_table = NULL;
14768 dump->global_buffer.obj_table = ibf_dump_object_table_new();
14769 dump->iseq_table = st_init_numtable();
14771 dump->current_buffer = &dump->global_buffer;
14782 if (ISEQ_BODY(iseq)->parent_iseq != NULL ||
14783 ISEQ_BODY(iseq)->local_iseq != iseq) {
14786 if (
RTEST(ISEQ_COVERAGE(iseq))) {
14791 ibf_dump_setup(dump, dump_obj);
14793 ibf_dump_write(dump, &header,
sizeof(header));
14794 ibf_dump_iseq(dump, iseq);
14796 header.magic[0] =
'Y';
14797 header.magic[1] =
'A';
14798 header.magic[2] =
'R';
14799 header.magic[3] =
'B';
14800 header.major_version = IBF_MAJOR_VERSION;
14801 header.minor_version = IBF_MINOR_VERSION;
14802 header.endian = IBF_ENDIAN_MARK;
14804 ibf_dump_iseq_list(dump, &header);
14805 ibf_dump_object_list(dump, &header.global_object_list_offset, &header.global_object_list_size);
14806 header.size = ibf_dump_pos(dump);
14809 VALUE opt_str = opt;
14812 ibf_dump_write(dump, ptr, header.extra_size);
14815 header.extra_size = 0;
14818 ibf_dump_overwrite(dump, &header,
sizeof(header), 0);
14820 str = dump->global_buffer.str;
14825static const ibf_offset_t *
14826ibf_iseq_list(
const struct ibf_load *load)
14828 return (
const ibf_offset_t *)(load->global_buffer.buff + load->header->iseq_list_offset);
14832rb_ibf_load_iseq_complete(
rb_iseq_t *iseq)
14836 ibf_offset_t offset = ibf_iseq_list(load)[iseq->aux.loader.index];
14839 fprintf(stderr,
"rb_ibf_load_iseq_complete: index=%#x offset=%#x size=%#x\n",
14840 iseq->aux.loader.index, offset,
14841 load->header->size);
14843 ibf_load_iseq_each(load, iseq, offset);
14844 ISEQ_COMPILE_DATA_CLEAR(iseq);
14846 rb_iseq_init_trace(iseq);
14847 load->iseq = prev_src_iseq;
14854 rb_ibf_load_iseq_complete((
rb_iseq_t *)iseq);
14862 int iseq_index = (int)(
VALUE)index_iseq;
14865 fprintf(stderr,
"ibf_load_iseq: index_iseq=%p iseq_list=%p\n",
14866 (
void *)index_iseq, (
void *)load->iseq_list);
14868 if (iseq_index == -1) {
14872 VALUE iseqv = pinned_list_fetch(load->iseq_list, iseq_index);
14875 fprintf(stderr,
"ibf_load_iseq: iseqv=%p\n", (
void *)iseqv);
14883 fprintf(stderr,
"ibf_load_iseq: new iseq=%p\n", (
void *)iseq);
14886 iseq->aux.loader.obj = load->loader_obj;
14887 iseq->aux.loader.index = iseq_index;
14889 fprintf(stderr,
"ibf_load_iseq: iseq=%p loader_obj=%p index=%d\n",
14890 (
void *)iseq, (
void *)load->loader_obj, iseq_index);
14892 pinned_list_store(load->iseq_list, iseq_index, (
VALUE)iseq);
14894 if (!USE_LAZY_LOAD || GET_VM()->builtin_function_table) {
14896 fprintf(stderr,
"ibf_load_iseq: loading iseq=%p\n", (
void *)iseq);
14898 rb_ibf_load_iseq_complete(iseq);
14902 fprintf(stderr,
"ibf_load_iseq: iseq=%p loaded %p\n",
14903 (
void *)iseq, (
void *)load->iseq);
14911ibf_load_setup_bytes(
struct ibf_load *load,
VALUE loader_obj,
const char *bytes,
size_t size)
14914 load->loader_obj = loader_obj;
14915 load->global_buffer.buff = bytes;
14916 load->header = header;
14917 load->global_buffer.size = header->size;
14918 load->global_buffer.obj_list_offset = header->global_object_list_offset;
14919 load->global_buffer.obj_list_size = header->global_object_list_size;
14920 RB_OBJ_WRITE(loader_obj, &load->iseq_list, pinned_list_new(header->iseq_list_size));
14921 RB_OBJ_WRITE(loader_obj, &load->global_buffer.obj_list, pinned_list_new(load->global_buffer.obj_list_size));
14924 load->current_buffer = &load->global_buffer;
14926 if (size < header->size) {
14929 if (strncmp(header->magic,
"YARB", 4) != 0) {
14932 if (header->major_version != IBF_MAJOR_VERSION ||
14933 header->minor_version != IBF_MINOR_VERSION) {
14935 header->major_version, header->minor_version, IBF_MAJOR_VERSION, IBF_MINOR_VERSION);
14937 if (header->endian != IBF_ENDIAN_MARK) {
14943 if (header->iseq_list_offset %
RUBY_ALIGNOF(ibf_offset_t)) {
14944 rb_raise(rb_eArgError,
"unaligned iseq list offset: %u",
14945 header->iseq_list_offset);
14947 if (load->global_buffer.obj_list_offset %
RUBY_ALIGNOF(ibf_offset_t)) {
14948 rb_raise(rb_eArgError,
"unaligned object list offset: %u",
14949 load->global_buffer.obj_list_offset);
14962 if (USE_LAZY_LOAD) {
14963 str =
rb_str_new(RSTRING_PTR(str), RSTRING_LEN(str));
14966 ibf_load_setup_bytes(load, loader_obj, RSTRING_PTR(str), RSTRING_LEN(str));
14971ibf_loader_mark(
void *ptr)
14974 rb_gc_mark(load->str);
14975 rb_gc_mark(load->iseq_list);
14976 rb_gc_mark(load->global_buffer.obj_list);
14980ibf_loader_free(
void *ptr)
14987ibf_loader_memsize(
const void *ptr)
14994 {ibf_loader_mark, ibf_loader_free, ibf_loader_memsize,},
14999rb_iseq_ibf_load(
VALUE str)
15005 ibf_load_setup(load, loader_obj, str);
15006 iseq = ibf_load_iseq(load, 0);
15013rb_iseq_ibf_load_bytes(
const char *bytes,
size_t size)
15019 ibf_load_setup_bytes(load, loader_obj, bytes, size);
15020 iseq = ibf_load_iseq(load, 0);
15027rb_iseq_ibf_load_extra_data(
VALUE str)
15033 ibf_load_setup(load, loader_obj, str);
15034 extra_str =
rb_str_new(load->global_buffer.buff + load->header->size, load->header->extra_size);
15039#include "prism_compile.c"
#define RBIMPL_ASSERT_OR_ASSUME(...)
This is either RUBY_ASSERT or RBIMPL_ASSUME, depending on RUBY_DEBUG.
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_ALIGNOF
Wraps (or simulates) alignof.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_NONE
No events.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
#define rb_str_new2
Old name of rb_str_new_cstr.
#define T_COMPLEX
Old name of RUBY_T_COMPLEX.
#define TYPE(_)
Old name of rb_type.
#define NUM2ULONG
Old name of RB_NUM2ULONG.
#define NUM2LL
Old name of RB_NUM2LL.
#define ALLOCV
Old name of RB_ALLOCV.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define OBJ_FROZEN
Old name of RB_OBJ_FROZEN.
#define rb_str_cat2
Old name of rb_str_cat_cstr.
#define T_NIL
Old name of RUBY_T_NIL.
#define UNREACHABLE
Old name of RBIMPL_UNREACHABLE.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define OBJ_FREEZE
Old name of RB_OBJ_FREEZE.
#define ULONG2NUM
Old name of RB_ULONG2NUM.
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
#define SYM2ID
Old name of RB_SYM2ID.
#define FIX2UINT
Old name of RB_FIX2UINT.
#define ZALLOC
Old name of RB_ZALLOC.
#define CLASS_OF
Old name of rb_class_of.
#define FIXABLE
Old name of RB_FIXABLE.
#define xmalloc
Old name of ruby_xmalloc.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define NUM2UINT
Old name of RB_NUM2UINT.
#define ZALLOC_N
Old name of RB_ZALLOC_N.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define T_RATIONAL
Old name of RUBY_T_RATIONAL.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_SET
Old name of RB_FL_SET.
#define FLONUM_P
Old name of RB_FLONUM_P.
#define Qtrue
Old name of RUBY_Qtrue.
#define NUM2INT
Old name of RB_NUM2INT.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define NUM2LONG
Old name of RB_NUM2LONG.
#define FL_UNSET
Old name of RB_FL_UNSET.
#define UINT2NUM
Old name of RB_UINT2NUM.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define CONST_ID
Old name of RUBY_CONST_ID.
#define ALLOCV_END
Old name of RB_ALLOCV_END.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
#define T_REGEXP
Old name of RUBY_T_REGEXP.
#define ruby_debug
This variable controls whether the interpreter is in debug mode.
VALUE rb_eNotImpError
NotImplementedError exception.
VALUE rb_eStandardError
StandardError exception.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eNoMatchingPatternError
NoMatchingPatternError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
VALUE rb_eNoMatchingPatternKeyError
NoMatchingPatternKeyError exception.
VALUE rb_eIndexError
IndexError exception.
VALUE rb_eSyntaxError
SyntaxError exception.
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
VALUE rb_obj_reveal(VALUE obj, VALUE klass)
Make a hidden object visible again.
VALUE rb_cArray
Array class.
VALUE rb_cObject
Object class.
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
VALUE rb_cHash
Hash class.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cRange
Range class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_obj_freeze(VALUE obj)
Just calls rb_obj_freeze_inline() inside.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
#define RB_POSFIXABLE(_)
Checks if the passed value is in range of fixnum, assuming it is a positive number.
Defines RBIMPL_HAS_BUILTIN.
VALUE rb_ary_reverse(VALUE ary)
Destructively reverses the passed array in-place.
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_new_capa(long capa)
Identical to rb_ary_new(), except it additionally specifies how many rooms of objects it should alloc...
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_clear(VALUE ary)
Destructively removes everything form an array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_freeze(VALUE obj)
Freeze an array, preventing further modifications.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
VALUE rb_ary_join(VALUE ary, VALUE sep)
Recursively stringises the elements of the passed array, flattens that result, then joins the sequenc...
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define INTEGER_PACK_NATIVE_BYTE_ORDER
Means either INTEGER_PACK_MSBYTE_FIRST or INTEGER_PACK_LSBYTE_FIRST, depending on the host processor'...
#define INTEGER_PACK_NEGATIVE
Interprets the input as a signed negative number (unpack only).
#define INTEGER_PACK_LSWORD_FIRST
Stores/interprets the least significant word as the first word.
int rb_is_const_id(ID id)
Classifies the given ID, then sees if it is a constant.
int rb_is_attrset_id(ID id)
Classifies the given ID, then sees if it is an attribute writer.
int rb_range_values(VALUE range, VALUE *begp, VALUE *endp, int *exclp)
Deconstructs a range into its components.
VALUE rb_range_new(VALUE beg, VALUE end, int excl)
Creates a new Range.
VALUE rb_rational_new(VALUE num, VALUE den)
Constructs a Rational, with reduction.
int rb_reg_options(VALUE re)
Queries the options of the passed regular expression.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_str_tmp_new(long len)
Allocates a "temporary" string.
int rb_str_hash_cmp(VALUE str1, VALUE str2)
Compares two strings.
#define rb_str_new(str, len)
Allocates an instance of rb_cString.
st_index_t rb_str_hash(VALUE str)
Calculates a hash value of a string.
VALUE rb_str_cat(VALUE dst, const char *src, long srclen)
Destructively appends the passed contents to the string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
int rb_str_cmp(VALUE lhs, VALUE rhs)
Compares two strings, as in strcmp(3).
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
VALUE rb_str_freeze(VALUE str)
This is the implementation of String#freeze.
#define rb_str_new_cstr(str)
Identical to rb_str_new, except it assumes the passed pointer is a pointer to a C string.
VALUE rb_class_name(VALUE obj)
Queries the name of the given object's class.
static ID rb_intern_const(const char *str)
This is a "tiny optimisation" over rb_intern().
VALUE rb_id2sym(ID id)
Allocates an instance of rb_cSymbol that has the given id.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
ID rb_sym2id(VALUE obj)
Converts an instance of rb_cSymbol into an ID.
int len
Length of the buffer.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
VALUE rb_ractor_make_shareable(VALUE obj)
Destructively transforms the passed object so that multiple Ractors can share it.
#define DECIMAL_SIZE_OF(expr)
An approximation of decimal representation size.
void ruby_qsort(void *, const size_t, const size_t, int(*)(const void *, const void *, void *), void *)
Reentrant implementation of quick sort.
#define rb_long2int
Just another name of rb_long2int_inline.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define MEMZERO(p, type, n)
Handy macro to erase a region of memory.
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define RB_ALLOCV(v, n)
Identical to RB_ALLOCV_N(), except that it allocates a number of bytes and returns a void* .
VALUE type(ANYARGS)
ANYARGS-ed function type.
void rb_hash_foreach(VALUE q, int_type *w, VALUE e)
Iteration over the given hash.
#define RBIMPL_ATTR_NORETURN()
Wraps (or simulates) [[noreturn]]
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static void RARRAY_ASET(VALUE ary, long i, VALUE v)
Assigns an object in an array.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RUBY_DEFAULT_FREE
This is a value you can set to RData::dfree.
void(* RUBY_DATA_FUNC)(void *)
This is the type of callbacks registered to RData.
#define RHASH_SIZE(h)
Queries the size of the hash.
static VALUE RREGEXP_SRC(VALUE rexp)
Convenient getter function.
#define StringValue(v)
Ensures that the parameter object is a String.
#define StringValuePtr(v)
Identical to StringValue, except it returns a char*.
static int RSTRING_LENINT(VALUE str)
Identical to RSTRING_LEN(), except it differs for the return type.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RTYPEDDATA_DATA(v)
Convenient getter macro.
#define RUBY_TYPED_FREE_IMMEDIATELY
Macros to see if each corresponding flag is defined.
#define TypedData_Get_Struct(obj, type, data_type, sval)
Obtains a C struct from inside of a wrapper Ruby object.
#define TypedData_Wrap_Struct(klass, data_type, sval)
Converts sval, a pointer to your struct, into a Ruby object.
#define TypedData_Make_Struct(klass, type, data_type, sval)
Identical to TypedData_Wrap_Struct, except it allocates a new data region internally instead of takin...
void rb_p(VALUE obj)
Inspects an object.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define _(args)
This was a transition path from K&R to ANSI.
Internal header for Complex.
Internal header for Rational.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
This is the struct that holds necessary info for a struct.
const char * wrap_struct_name
Name of structs of this kind.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool rb_integer_type_p(VALUE obj)
Queries if the object is an instance of rb_cInteger.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
@ RUBY_T_MASK
Bitmask of ruby_value_type.