4#include "internal/compile.h"
5#include "internal/class.h"
6#include "internal/hash.h"
7#include "internal/object.h"
8#include "internal/sanitizers.h"
9#include "internal/string.h"
10#include "internal/struct.h"
11#include "internal/variable.h"
12#include "internal/re.h"
14#include "probes_helper.h"
16#include "yjit_iface.h"
18#include "yjit_codegen.h"
22static codegen_fn gen_fns[VM_INSTRUCTION_SIZE] = { NULL };
25static st_table *yjit_method_codegen_table = NULL;
28static void *leave_exit_code;
31static uint32_t outline_full_cfunc_return_pos;
35 uint32_t inline_patch_pos;
36 uint32_t outlined_target_pos;
41static patch_array_t global_inval_patches = NULL;
50 VALUE path = rb_iseq_path(jit->iseq);
52 fprintf(stderr,
"%s %.*s:%u\n", msg, (
int)len, ptr, rb_iseq_line_no(jit->iseq, jit->insn_idx));
60 mov(cb, C_ARG_REGS[0], opnd);
61 call_ptr(cb, REG0, (
void *)rb_obj_info_dump);
76 return jit->insn_idx + insn_len(jit_get_opcode(jit));
83 RUBY_ASSERT(arg_idx + 1 < (
size_t)insn_len(jit_get_opcode(jit)));
84 return *(jit->pc + arg_idx + 1);
91 RUBY_ASSERT(reg.type == OPND_REG && reg.num_bits == 64);
94 mov(cb, reg, const_ptr_opnd((
void*)ptr));
97 uint32_t ptr_offset = cb->write_pos -
sizeof(
VALUE);
100 if (!rb_darray_append(&jit->block->gc_object_offsets, ptr_offset)) {
101 rb_bug(
"allocation failed");
111 const VALUE *ec_pc = jit->ec->cfp->pc;
112 return (ec_pc == jit->pc);
126 VALUE *sp = jit->ec->cfp->sp;
128 return *(sp - 1 - n);
134 return jit->ec->cfp->self;
143 int32_t local_table_size = jit->iseq->body->local_table_size;
144 RUBY_ASSERT(n < (
int)jit->iseq->body->local_table_size);
146 const VALUE *ep = jit->ec->cfp->ep;
147 return ep[-VM_ENV_DATA_SIZE - local_table_size + n + 1];
156 mov(cb, scratch_reg, const_ptr_opnd(jit->pc + insn_len(jit->opcode)));
167 if (ctx->sp_offset != 0) {
168 x86opnd_t stack_pointer = ctx_sp_opnd(ctx, 0);
170 lea(cb, REG_SP, stack_pointer);
184 jit->record_boundary_patch_point =
true;
185 jit_save_pc(jit, scratch_reg);
186 jit_save_sp(jit, ctx);
192record_global_inval_patch(
const codeblock_t *cb, uint32_t outline_block_target_pos)
194 struct codepage_patch patch_point = { cb->write_pos, outline_block_target_pos };
195 if (!rb_darray_append(&global_inval_patches, patch_point))
rb_bug(
"allocation failed");
198static bool jit_guard_known_klass(
jitstate_t *jit,
ctx_t *ctx, VALUE known_klass,
insn_opnd_t insn_opnd, VALUE sample_instance,
const int max_chain_depth, uint8_t *side_exit);
204_add_comment(
codeblock_t *cb,
const char *comment_str)
211 size_t num_comments = rb_darray_size(yjit_code_comments);
212 if (num_comments > 0) {
213 struct yjit_comment last_comment = rb_darray_get(yjit_code_comments, num_comments - 1);
214 if (last_comment.offset == cb->write_pos && strcmp(last_comment.comment, comment_str) == 0) {
219 struct yjit_comment new_comment = (
struct yjit_comment){ cb->write_pos, comment_str };
220 rb_darray_append(&yjit_code_comments, new_comment);
224#define ADD_COMMENT(cb, comment) _add_comment((cb), (comment))
234 VALUE self_val = jit_peek_at_self(jit, ctx);
235 if (type_diff(yjit_type_of_value(self_val), ctx->self_type) == INT_MAX) {
236 rb_bug(
"verify_ctx: ctx type (%s) incompatible with actual value of self: %s", yjit_type_name(ctx->self_type), rb_obj_info(self_val));
239 for (
int i = 0; i < ctx->stack_size && i < MAX_TEMP_TYPES; i++) {
241 VALUE val = jit_peek_at_stack(jit, ctx, i);
242 val_type_t detected = yjit_type_of_value(val);
244 if (learned.mapping.kind == TEMP_SELF) {
245 if (self_val != val) {
246 rb_bug(
"verify_ctx: stack value was mapped to self, but values did not match\n"
250 rb_obj_info(self_val));
254 if (learned.mapping.kind == TEMP_LOCAL) {
255 int local_idx = learned.mapping.idx;
256 VALUE local_val = jit_peek_at_local(jit, ctx, local_idx);
257 if (local_val != val) {
258 rb_bug(
"verify_ctx: stack value was mapped to local, but values did not match\n"
263 rb_obj_info(local_val));
267 if (type_diff(detected, learned.type) == INT_MAX) {
268 rb_bug(
"verify_ctx: ctx type (%s) incompatible with actual value on stack: %s", yjit_type_name(learned.type), rb_obj_info(val));
272 int32_t local_table_size = jit->iseq->body->local_table_size;
273 for (
int i = 0; i < local_table_size && i < MAX_TEMP_TYPES; i++) {
275 VALUE val = jit_peek_at_local(jit, ctx, i);
276 val_type_t detected = yjit_type_of_value(val);
278 if (type_diff(detected, learned) == INT_MAX) {
279 rb_bug(
"verify_ctx: ctx type (%s) incompatible with actual value of local: %s", yjit_type_name(learned), rb_obj_info(val));
286#define ADD_COMMENT(cb, comment) ((void)0)
287#define verify_ctx(jit, ctx) ((void)0)
294#define GEN_COUNTER_INC(cb, counter_name) _gen_counter_inc(cb, &(yjit_runtime_counters . counter_name))
298 if (!rb_yjit_opts.gen_stats)
return;
301 mov(cb, REG1, const_ptr_opnd(counter));
302 cb_write_lock_prefix(cb);
303 add(cb, mem_opnd(64, REG1, 0), imm_opnd(1));
307#define COUNTED_EXIT(jit, side_exit, counter_name) _counted_side_exit(jit, side_exit, &(yjit_runtime_counters . counter_name))
309_counted_side_exit(
jitstate_t* jit, uint8_t *existing_side_exit, int64_t *counter)
311 if (!rb_yjit_opts.gen_stats)
return existing_side_exit;
313 uint8_t *start = cb_get_ptr(jit->ocb, jit->ocb->write_pos);
314 _gen_counter_inc(jit->ocb, counter);
315 jmp_ptr(jit->ocb, existing_side_exit);
321#define GEN_COUNTER_INC(cb, counter_name) ((void)0)
322#define COUNTED_EXIT(jit, side_exit, counter_name) side_exit
330 const uint32_t code_pos = cb->write_pos;
332 ADD_COMMENT(cb,
"exit to interpreter");
336 if (ctx->sp_offset != 0) {
337 x86opnd_t stack_pointer = ctx_sp_opnd(ctx, 0);
338 lea(cb, REG_SP, stack_pointer);
343 mov(cb, RAX, const_ptr_opnd(exit_pc));
348 if (rb_yjit_opts.gen_stats) {
349 mov(cb, RDI, const_ptr_opnd(exit_pc));
350 call_ptr(cb, RSI, (
void *)&yjit_count_side_exit_op);
358 mov(cb, RAX, imm_opnd(
Qundef));
368 uint8_t *code_ptr = cb_get_ptr(cb, cb->write_pos);
374 GEN_COUNTER_INC(cb, leave_interp_return);
390gen_code_for_exit_from_stub(
void)
393 code_for_exit_from_stub = cb_get_ptr(cb, cb->write_pos);
395 GEN_COUNTER_INC(cb, exit_from_branch_stub);
401 mov(cb, RAX, imm_opnd(
Qundef));
420 if (!jit->side_exit_for_pc) {
422 uint32_t pos = yjit_gen_exit(jit->pc, ctx, ocb);
423 jit->side_exit_for_pc = cb_get_ptr(ocb, pos);
426 return jit->side_exit_for_pc;
435 if (block->entry_exit)
return;
437 if (jit->insn_idx == block->blockid.idx) {
440 block->entry_exit = yjit_side_exit(jit, &block->ctx);
443 VALUE *pc = yjit_iseq_pc_at_idx(block->blockid.iseq, block->blockid.idx);
444 uint32_t pos = yjit_gen_exit(pc, &block->ctx, ocb);
445 block->entry_exit = cb_get_ptr(ocb, pos);
460 mov(cb, REG1, const_ptr_opnd(iseq->body->iseq_encoded));
464 uint32_t pc_is_zero = cb_new_label(cb,
"pc_is_zero");
465 jz_label(cb, pc_is_zero);
468 GEN_COUNTER_INC(cb, leave_start_pc_non_zero);
474 mov(cb, RAX, imm_opnd(
Qundef));
478 cb_write_label(cb, pc_is_zero);
500 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, cfp->self, me->def->original_id, me->called_id, me->owner, return_value);
504 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
508 ec->cfp->sp[0] = return_value;
514gen_full_cfunc_return(
void)
517 outline_full_cfunc_return_pos = ocb->write_pos;
523 mov(cb, C_ARG_REGS[0], REG_EC);
524 mov(cb, C_ARG_REGS[1], RAX);
525 call_ptr(cb, REG0, (
void *)full_cfunc_return);
528 GEN_COUNTER_INC(cb, traced_cfunc_return);
535 mov(cb, RAX, imm_opnd(
Qundef));
548 enum { MAX_PROLOGUE_SIZE = 1024 };
551 if (cb->write_pos + MAX_PROLOGUE_SIZE >= cb->mem_size) {
555 const uint32_t old_write_pos = cb->write_pos;
558 cb_align_pos(cb, 64);
560 uint8_t *code_ptr = cb_get_ptr(cb, cb->write_pos);
561 ADD_COMMENT(cb,
"yjit entry");
568 mov(cb, REG_EC, C_ARG_REGS[0]);
569 mov(cb, REG_CFP, C_ARG_REGS[1]);
576 mov(cb, REG0, const_ptr_opnd(leave_exit_code));
585 if (iseq->body->param.flags.has_opt) {
586 yjit_pc_guard(cb, iseq);
598yjit_check_ints(
codeblock_t *cb, uint8_t *side_exit)
602 ADD_COMMENT(cb,
"RUBY_VM_CHECK_INTS(ec)");
606 jnz_ptr(cb, side_exit);
616 ctx_t reset_depth = *current_context;
617 reset_depth.chain_depth = 0;
619 blockid_t jump_block = { jit->iseq, jit_next_insn_idx(jit) };
622 if (jit->record_boundary_patch_point) {
623 uint32_t exit_pos = yjit_gen_exit(jit->pc + insn_len(jit->opcode), &reset_depth, jit->ocb);
624 record_global_inval_patch(jit->cb, exit_pos);
625 jit->record_boundary_patch_point =
false;
642 verify_blockid(blockid);
651 ctx_t ctx_copy = *start_ctx;
652 ctx_t *ctx = &ctx_copy;
655 *ctx = limit_block_versions(blockid, ctx);
658 block->blockid = blockid;
661 RUBY_ASSERT(!(blockid.idx == 0 && start_ctx->stack_size > 0));
663 const rb_iseq_t *iseq = block->blockid.iseq;
664 const unsigned int iseq_size = iseq->body->iseq_size;
665 uint32_t insn_idx = block->blockid.idx;
666 const uint32_t starting_insn_idx = insn_idx;
678 block->start_addr = cb_get_write_ptr(cb);
681 while (insn_idx < iseq_size) {
683 VALUE *pc = yjit_iseq_pc_at_idx(iseq, insn_idx);
684 int opcode = yjit_opcode_at_pc(iseq, pc);
685 RUBY_ASSERT(opcode >= 0 && opcode < VM_INSTRUCTION_SIZE);
689 if (opcode == BIN(opt_getinlinecache) && insn_idx > starting_insn_idx) {
690 jit_jump_to_next_insn(&jit, ctx);
695 jit.insn_idx = insn_idx;
698 jit.side_exit_for_pc = NULL;
701 if (jit.record_boundary_patch_point) {
703 uint32_t exit_pos = yjit_gen_exit(jit.pc, ctx, ocb);
704 record_global_inval_patch(cb, exit_pos);
705 jit.record_boundary_patch_point =
false;
709 if (jit_at_current_insn(&jit)) {
710 verify_ctx(&jit, ctx);
714 codegen_fn gen_fn = gen_fns[opcode];
715 codegen_status_t status = YJIT_CANT_COMPILE;
718 fprintf(stderr,
"compiling %d: %s\n", insn_idx, insn_name(opcode));
719 print_str(cb, insn_name(opcode));
725 GEN_COUNTER_INC(cb, exec_instruction);
728 ADD_COMMENT(cb, insn_name(opcode));
731 status = gen_fn(&jit, ctx, cb);
736 if (status == YJIT_CANT_COMPILE) {
740 uint32_t exit_off = yjit_gen_exit(jit.pc, ctx, cb);
744 if (insn_idx == block->blockid.idx) {
745 block->entry_exit = cb_get_ptr(cb, exit_off);
752 ctx->chain_depth = 0;
755 insn_idx += insn_len(opcode);
758 if (status == YJIT_END_BLOCK) {
764 block->end_addr = cb_get_write_ptr(cb);
767 block->end_idx = insn_idx;
774 if (cb->dropped_bytes || ocb->dropped_bytes) {
775 yjit_free_block(block);
779 if (YJIT_DUMP_MODE >= 2) {
781 fprintf(stderr,
"Compiled the following for iseq=%p:\n", (
void *)iseq);
782 for (uint32_t idx = block->blockid.idx; idx < insn_idx; ) {
783 int opcode = yjit_opcode_at_pc(iseq, yjit_iseq_pc_at_idx(iseq, idx));
784 fprintf(stderr,
" %04d %s\n", idx, insn_name(opcode));
785 idx += insn_len(opcode);
794static codegen_status_t
798 return YJIT_KEEP_COMPILING;
801static codegen_status_t
805 x86opnd_t dup_val = ctx_stack_pop(ctx, 0);
809 x86opnd_t loc0 = ctx_stack_push_mapping(ctx, mapping);
810 mov(cb, REG0, dup_val);
813 return YJIT_KEEP_COMPILING;
817static codegen_status_t
820 rb_num_t n = (rb_num_t)jit_get_arg(jit, 0);
824 return YJIT_CANT_COMPILE;
827 x86opnd_t opnd1 = ctx_stack_opnd(ctx, 1);
828 x86opnd_t opnd0 = ctx_stack_opnd(ctx, 0);
832 x86opnd_t dst1 = ctx_stack_push_mapping(ctx, mapping1);
833 mov(cb, REG0, opnd1);
836 x86opnd_t dst0 = ctx_stack_push_mapping(ctx, mapping0);
837 mov(cb, REG0, opnd0);
840 return YJIT_KEEP_COMPILING;
846 x86opnd_t opnd0 = ctx_stack_opnd(ctx, offset0);
847 x86opnd_t opnd1 = ctx_stack_opnd(ctx, offset1);
852 mov(cb, reg0, opnd0);
853 mov(cb, reg1, opnd1);
854 mov(cb, opnd0, reg1);
855 mov(cb, opnd1, reg0);
857 ctx_set_opnd_mapping(ctx, OPND_STACK(offset0), mapping1);
858 ctx_set_opnd_mapping(ctx, OPND_STACK(offset1), mapping0);
862static codegen_status_t
865 stack_swap(ctx , cb, 0, 1, REG0, REG1);
866 return YJIT_KEEP_COMPILING;
870static codegen_status_t
873 rb_num_t n = (rb_num_t)jit_get_arg(jit, 0);
876 x86opnd_t top_val = ctx_stack_pop(ctx, 0);
877 x86opnd_t dst_opnd = ctx_stack_opnd(ctx, (int32_t)n);
878 mov(cb, REG0, top_val);
879 mov(cb, dst_opnd, REG0);
882 ctx_set_opnd_mapping(ctx, OPND_STACK(n), mapping);
884 return YJIT_KEEP_COMPILING;
888static codegen_status_t
891 int32_t n = (int32_t)jit_get_arg(jit, 0);
894 x86opnd_t top_n_val = ctx_stack_opnd(ctx, n);
897 x86opnd_t loc0 = ctx_stack_push_mapping(ctx, mapping);
898 mov(cb, REG0, top_n_val);
901 return YJIT_KEEP_COMPILING;
904static codegen_status_t
908 ctx_stack_pop(ctx, 1);
909 return YJIT_KEEP_COMPILING;
913static codegen_status_t
916 rb_num_t n = (rb_num_t)jit_get_arg(jit, 0);
917 ctx_stack_pop(ctx, n);
918 return YJIT_KEEP_COMPILING;
922static codegen_status_t
925 rb_num_t n = (rb_num_t)jit_get_arg(jit, 0);
928 jit_prepare_routine_call(jit, ctx, REG0);
930 x86opnd_t values_ptr = ctx_sp_opnd(ctx, -(
sizeof(VALUE) * (uint32_t)n));
933 mov(cb, C_ARG_REGS[0], REG_EC);
934 mov(cb, C_ARG_REGS[1], imm_opnd(n));
935 lea(cb, C_ARG_REGS[2], values_ptr);
936 call_ptr(cb, REG0, (
void *)rb_ec_ary_new_from_values);
938 ctx_stack_pop(ctx, n);
939 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_ARRAY);
940 mov(cb, stack_ret, RAX);
942 return YJIT_KEEP_COMPILING;
946static codegen_status_t
949 VALUE ary = jit_get_arg(jit, 0);
952 jit_prepare_routine_call(jit, ctx, REG0);
955 jit_mov_gc_ptr(jit, cb, C_ARG_REGS[0], ary);
958 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_ARRAY);
959 mov(cb, stack_ret, RAX);
961 return YJIT_KEEP_COMPILING;
965static codegen_status_t
968 VALUE hash = jit_get_arg(jit, 0);
971 jit_prepare_routine_call(jit, ctx, REG0);
974 jit_mov_gc_ptr(jit, cb, C_ARG_REGS[0], hash);
975 call_ptr(cb, REG0, (
void *)rb_hash_resurrect);
977 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_HASH);
978 mov(cb, stack_ret, RAX);
980 return YJIT_KEEP_COMPILING;
983VALUE rb_vm_splat_array(VALUE flag, VALUE ary);
986static codegen_status_t
989 VALUE flag = (
VALUE) jit_get_arg(jit, 0);
993 jit_prepare_routine_call(jit, ctx, REG0);
996 x86opnd_t ary_opnd = ctx_stack_pop(ctx, 1);
999 jit_mov_gc_ptr(jit, cb, C_ARG_REGS[0], flag);
1000 mov(cb, C_ARG_REGS[1], ary_opnd);
1001 call_ptr(cb, REG1, (
void *) rb_vm_splat_array);
1003 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_ARRAY);
1004 mov(cb, stack_ret, RAX);
1006 return YJIT_KEEP_COMPILING;
1010static codegen_status_t
1013 rb_num_t flag = (rb_num_t)jit_get_arg(jit, 0);
1016 jit_prepare_routine_call(jit, ctx, REG0);
1019 mov(cb, C_ARG_REGS[0], ctx_stack_opnd(ctx, 1));
1020 mov(cb, C_ARG_REGS[1], ctx_stack_opnd(ctx, 0));
1021 mov(cb, C_ARG_REGS[2], imm_opnd(flag));
1024 ctx_stack_pop(ctx, 2);
1025 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_HEAP);
1026 mov(cb, stack_ret, RAX);
1028 return YJIT_KEEP_COMPILING;
1034 ADD_COMMENT(cb,
"guard object is heap");
1038 jnz_ptr(cb, side_exit);
1041 cmp(cb, object_opnd, imm_opnd(
Qnil));
1043 jbe_ptr(cb, side_exit);
1049 ADD_COMMENT(cb,
"guard object is array");
1052 mov(cb, flags_opnd, member_opnd(object_opnd,
struct RBasic, flags));
1056 cmp(cb, flags_opnd, imm_opnd(
T_ARRAY));
1057 jne_ptr(cb, side_exit);
1061static codegen_status_t
1064 int flag = (int) jit_get_arg(jit, 1);
1068 GEN_COUNTER_INC(cb, expandarray_splat);
1069 return YJIT_CANT_COMPILE;
1074 GEN_COUNTER_INC(cb, expandarray_postarg);
1075 return YJIT_CANT_COMPILE;
1078 uint8_t *side_exit = yjit_side_exit(jit, ctx);
1082 int num = (int)jit_get_arg(jit, 0);
1083 val_type_t array_type = ctx_get_opnd_type(ctx, OPND_STACK(0));
1084 x86opnd_t array_opnd = ctx_stack_pop(ctx, 1);
1086 if (array_type.type == ETYPE_NIL) {
1089 for (
int i = 0; i < num; i++) {
1090 x86opnd_t push = ctx_stack_push(ctx, TYPE_NIL);
1091 mov(cb, push, imm_opnd(
Qnil));
1093 return YJIT_KEEP_COMPILING;
1097 mov(cb, REG0, array_opnd);
1098 guard_object_is_heap(cb, REG0, ctx, COUNTED_EXIT(jit, side_exit, expandarray_not_array));
1099 guard_object_is_array(cb, REG0, REG1, ctx, COUNTED_EXIT(jit, side_exit, expandarray_not_array));
1103 return YJIT_KEEP_COMPILING;
1108 mov(cb, REG1, flags_opnd);
1111 and(cb, REG1, imm_opnd(RARRAY_EMBED_LEN_MASK));
1115 test(cb, flags_opnd, imm_opnd(RARRAY_EMBED_FLAG));
1116 cmovz(cb, REG1, member_opnd(REG0,
struct RArray, as.heap.
len));
1120 cmp(cb, REG1, imm_opnd(num));
1121 jl_ptr(cb, COUNTED_EXIT(jit, side_exit, expandarray_rhs_too_small));
1125 lea(cb, REG1, member_opnd(REG0,
struct RArray, as.
ary));
1129 test(cb, flags_opnd, imm_opnd(RARRAY_EMBED_FLAG));
1130 cmovz(cb, REG1, member_opnd(REG0,
struct RArray, as.heap.
ptr));
1133 for (int32_t i = (int32_t) num - 1; i >= 0; i--) {
1134 x86opnd_t top = ctx_stack_push(ctx, TYPE_UNKNOWN);
1139 return YJIT_KEEP_COMPILING;
1143static codegen_status_t
1146 int32_t num = (int32_t)jit_get_arg(jit, 0);
1149 jit_prepare_routine_call(jit, ctx, REG0);
1153 mov(cb, C_ARG_REGS[0], imm_opnd(num / 2));
1154 call_ptr(cb, REG0, (
void *)rb_hash_new_with_size);
1161 mov(cb, C_ARG_REGS[0], imm_opnd(num));
1162 lea(cb, C_ARG_REGS[1], ctx_stack_opnd(ctx, num - 1));
1163 mov(cb, C_ARG_REGS[2], RAX);
1169 ctx_stack_pop(ctx, num);
1170 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_HASH);
1171 mov(cb, stack_ret, RAX);
1177 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_HASH);
1178 mov(cb, stack_ret, RAX);
1181 return YJIT_KEEP_COMPILING;
1189 val_type_t val_type = yjit_type_of_value(arg);
1190 x86opnd_t stack_top = ctx_stack_push(ctx, val_type);
1202 if (imm.num_bits <= 32) {
1203 mov(cb, stack_top, imm);
1207 mov(cb, stack_top, REG0);
1213 jit_mov_gc_ptr(jit, cb, REG0, arg);
1216 mov(cb, stack_top, REG0);
1220static codegen_status_t
1223 jit_putobject(jit, ctx,
Qnil);
1224 return YJIT_KEEP_COMPILING;
1227static codegen_status_t
1230 VALUE arg = jit_get_arg(jit, 0);
1232 jit_putobject(jit, ctx, arg);
1233 return YJIT_KEEP_COMPILING;
1236static codegen_status_t
1239 VALUE put_val = jit_get_arg(jit, 0);
1242 jit_prepare_routine_call(jit, ctx, REG0);
1244 mov(cb, C_ARG_REGS[0], REG_EC);
1245 jit_mov_gc_ptr(jit, cb, C_ARG_REGS[1], put_val);
1246 call_ptr(cb, REG0, (
void *)rb_ec_str_resurrect);
1248 x86opnd_t stack_top = ctx_stack_push(ctx, TYPE_STRING);
1249 mov(cb, stack_top, RAX);
1251 return YJIT_KEEP_COMPILING;
1254static codegen_status_t
1257 int opcode = jit_get_opcode(jit);
1258 int cst_val = (opcode == BIN(putobject_INT2FIX_0_))? 0:1;
1260 jit_putobject(jit, ctx,
INT2FIX(cst_val));
1261 return YJIT_KEEP_COMPILING;
1264static codegen_status_t
1271 x86opnd_t stack_top = ctx_stack_push_self(ctx);
1272 mov(cb, stack_top, REG0);
1274 return YJIT_KEEP_COMPILING;
1277static codegen_status_t
1280 enum vm_special_object_type
type = (
enum vm_special_object_type)jit_get_arg(jit, 0);
1282 if (
type == VM_SPECIAL_OBJECT_VMCORE) {
1283 x86opnd_t stack_top = ctx_stack_push(ctx, TYPE_HEAP);
1284 jit_mov_gc_ptr(jit, cb, REG0, rb_mRubyVMFrozenCore);
1285 mov(cb, stack_top, REG0);
1286 return YJIT_KEEP_COMPILING;
1291 return YJIT_CANT_COMPILE;
1306 mov(cb, reg, mem_opnd(64, REG0,
SIZEOF_VALUE * VM_ENV_DATA_INDEX_SPECVAL));
1307 and(cb, reg, imm_opnd(~0x03));
1313slot_to_local_idx(
const rb_iseq_t *iseq, int32_t slot_idx)
1316 int32_t local_table_size = iseq->body->local_table_size;
1317 int32_t op = slot_idx - VM_ENV_DATA_SIZE;
1318 int32_t local_idx = local_idx = local_table_size - op - 1;
1319 RUBY_ASSERT(local_idx >= 0 && local_idx < local_table_size);
1320 return (uint32_t)local_idx;
1323static codegen_status_t
1327 int32_t slot_idx = (int32_t)jit_get_arg(jit, 0);
1329 uint32_t local_idx = slot_to_local_idx(jit->iseq, slot_idx);
1332 gen_get_ep(cb, REG0, 0);
1335 mov(cb, REG0, mem_opnd(64, REG0, offs));
1338 x86opnd_t stack_top = ctx_stack_push_local(ctx, local_idx);
1339 mov(cb, stack_top, REG0);
1341 return YJIT_KEEP_COMPILING;
1344static codegen_status_t
1345gen_getlocal_generic(
ctx_t *ctx, uint32_t local_idx, uint32_t level)
1347 gen_get_ep(cb, REG0, level);
1352 mov(cb, REG0, mem_opnd(64, REG0, offs));
1355 x86opnd_t stack_top = ctx_stack_push(ctx, TYPE_UNKNOWN);
1356 mov(cb, stack_top, REG0);
1358 return YJIT_KEEP_COMPILING;
1361static codegen_status_t
1364 int32_t idx = (int32_t)jit_get_arg(jit, 0);
1365 int32_t level = (int32_t)jit_get_arg(jit, 1);
1366 return gen_getlocal_generic(ctx, idx, level);
1369static codegen_status_t
1372 int32_t idx = (int32_t)jit_get_arg(jit, 0);
1373 return gen_getlocal_generic(ctx, idx, 1);
1376static codegen_status_t
1392 int32_t slot_idx = (int32_t)jit_get_arg(jit, 0);
1393 uint32_t local_idx = slot_to_local_idx(jit->iseq, slot_idx);
1396 gen_get_ep(cb, REG0, 0);
1399 x86opnd_t flags_opnd = mem_opnd(64, REG0,
sizeof(VALUE) * VM_ENV_DATA_INDEX_FLAGS);
1400 test(cb, flags_opnd, imm_opnd(VM_ENV_FLAG_WB_REQUIRED));
1403 uint8_t *side_exit = yjit_side_exit(jit, ctx);
1406 jnz_ptr(cb, side_exit);
1409 val_type_t temp_type = ctx_get_opnd_type(ctx, OPND_STACK(0));
1410 ctx_set_local_type(ctx, local_idx, temp_type);
1413 x86opnd_t stack_top = ctx_stack_pop(ctx, 1);
1414 mov(cb, REG1, stack_top);
1417 const int32_t offs = -8 * slot_idx;
1418 mov(cb, mem_opnd(64, REG0, offs), REG1);
1420 return YJIT_KEEP_COMPILING;
1425static codegen_status_t
1430 if (jit->iseq->body->param.keyword->num >= 32) {
1431 return YJIT_CANT_COMPILE;
1435 int32_t bits_offset = (int32_t)jit_get_arg(jit, 0);
1438 int32_t index = (int32_t)jit_get_arg(jit, 1);
1441 gen_get_ep(cb, REG0, 0);
1444 x86opnd_t bits_opnd = mem_opnd(64, REG0,
sizeof(VALUE) * -bits_offset);
1450 int64_t bit_test = 0x01 << (index + 1);
1451 test(cb, bits_opnd, imm_opnd(bit_test));
1452 mov(cb, REG0, imm_opnd(
Qfalse));
1453 mov(cb, REG1, imm_opnd(
Qtrue));
1454 cmovz(cb, REG0, REG1);
1456 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_IMM);
1457 mov(cb, stack_ret, REG0);
1459 return YJIT_KEEP_COMPILING;
1462static codegen_status_t
1463gen_setlocal_generic(
jitstate_t *jit,
ctx_t *ctx, uint32_t local_idx, uint32_t level)
1466 gen_get_ep(cb, REG0, level);
1469 x86opnd_t flags_opnd = mem_opnd(64, REG0,
sizeof(VALUE) * VM_ENV_DATA_INDEX_FLAGS);
1470 test(cb, flags_opnd, imm_opnd(VM_ENV_FLAG_WB_REQUIRED));
1473 uint8_t *side_exit = yjit_side_exit(jit, ctx);
1476 jnz_ptr(cb, side_exit);
1479 x86opnd_t stack_top = ctx_stack_pop(ctx, 1);
1480 mov(cb, REG1, stack_top);
1484 mov(cb, mem_opnd(64, REG0, offs), REG1);
1486 return YJIT_KEEP_COMPILING;
1489static codegen_status_t
1492 int32_t idx = (int32_t)jit_get_arg(jit, 0);
1493 int32_t level = (int32_t)jit_get_arg(jit, 1);
1494 return gen_setlocal_generic(jit, ctx, idx, level);
1497static codegen_status_t
1500 int32_t idx = (int32_t)jit_get_arg(jit, 0);
1501 return gen_setlocal_generic(jit, ctx, idx, 1);
1505gen_jnz_to_target0(
codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
1514 jnz_ptr(cb, target0);
1520gen_jz_to_target0(
codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
1529 jz_ptr(cb, target0);
1535gen_jbe_to_target0(
codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
1544 jbe_ptr(cb, target0);
1561jit_chain_guard(
enum jcc_kinds jcc,
jitstate_t *jit,
const ctx_t *ctx, uint8_t depth_limit, uint8_t *side_exit)
1563 branchgen_fn target0_gen_fn;
1568 target0_gen_fn = gen_jnz_to_target0;
1572 target0_gen_fn = gen_jz_to_target0;
1576 target0_gen_fn = gen_jbe_to_target0;
1579 rb_bug(
"yjit: unimplemented jump kind");
1583 if (ctx->chain_depth < depth_limit) {
1584 ctx_t deeper = *ctx;
1585 deeper.chain_depth++;
1590 (
blockid_t) { jit->iseq, jit->insn_idx },
1598 target0_gen_fn(cb, side_exit, NULL, SHAPE_DEFAULT);
1603 GETIVAR_MAX_DEPTH = 10,
1604 OPT_AREF_MAX_CHAIN_DEPTH = 2,
1608VALUE rb_vm_set_ivar_idx(VALUE obj, uint32_t idx, VALUE val);
1615static codegen_status_t
1616gen_set_ivar(
jitstate_t *jit,
ctx_t *ctx, VALUE recv, VALUE klass, ID ivar_name)
1620 jit_prepare_routine_call(jit, ctx, REG0);
1623 x86opnd_t val_opnd = ctx_stack_pop(ctx, 1);
1624 x86opnd_t recv_opnd = ctx_stack_pop(ctx, 1);
1626 uint32_t ivar_index = rb_obj_ensure_iv_index_mapping(recv, ivar_name);
1629 mov(cb, C_ARG_REGS[0], recv_opnd);
1630 mov(cb, C_ARG_REGS[1], imm_opnd(ivar_index));
1631 mov(cb, C_ARG_REGS[2], val_opnd);
1632 call_ptr(cb, REG0, (
void *)rb_vm_set_ivar_idx);
1634 x86opnd_t out_opnd = ctx_stack_push(ctx, TYPE_UNKNOWN);
1635 mov(cb, out_opnd, RAX);
1637 return YJIT_KEEP_COMPILING;
1645static codegen_status_t
1646gen_get_ivar(
jitstate_t *jit,
ctx_t *ctx,
const int max_chain_depth, VALUE comptime_receiver, ID ivar_name,
insn_opnd_t reg0_opnd, uint8_t *side_exit)
1648 VALUE comptime_val_klass =
CLASS_OF(comptime_receiver);
1649 const ctx_t starting_context = *ctx;
1659 ADD_COMMENT(cb,
"call rb_ivar_get()");
1662 jit_prepare_routine_call(jit, ctx, REG1);
1664 mov(cb, C_ARG_REGS[0], REG0);
1665 mov(cb, C_ARG_REGS[1], imm_opnd((int64_t)ivar_name));
1668 if (!reg0_opnd.is_self) {
1669 (void)ctx_stack_pop(ctx, 1);
1672 x86opnd_t out_opnd = ctx_stack_push(ctx, TYPE_UNKNOWN);
1673 mov(cb, out_opnd, RAX);
1676 jit_jump_to_next_insn(jit, ctx);
1677 return YJIT_END_BLOCK;
1702 uint32_t ivar_index = rb_obj_ensure_iv_index_mapping(comptime_receiver, ivar_name);
1705 if (!reg0_opnd.is_self) {
1706 (void)ctx_stack_pop(ctx, 1);
1715 ADD_COMMENT(cb,
"guard embedded getivar");
1717 test(cb, flags_opnd, imm_opnd(ROBJECT_EMBED));
1718 jit_chain_guard(JCC_JZ, jit, &starting_context, max_chain_depth, COUNTED_EXIT(jit, side_exit, getivar_megamorphic));
1722 mov(cb, REG1, ivar_opnd);
1725 cmp(cb, REG1, imm_opnd(
Qundef));
1726 mov(cb, REG0, imm_opnd(
Qnil));
1727 cmove(cb, REG1, REG0);
1730 x86opnd_t out_opnd = ctx_stack_push(ctx, TYPE_UNKNOWN);
1731 mov(cb, out_opnd, REG1);
1738 ADD_COMMENT(cb,
"guard extended getivar");
1740 test(cb, flags_opnd, imm_opnd(ROBJECT_EMBED));
1741 jit_chain_guard(JCC_JNZ, jit, &starting_context, max_chain_depth, COUNTED_EXIT(jit, side_exit, getivar_megamorphic));
1747 cmp(cb, num_slots, imm_opnd(ivar_index));
1748 jle_ptr(cb, COUNTED_EXIT(jit, side_exit, getivar_idx_out_of_range));
1753 mov(cb, REG0, tbl_opnd);
1756 x86opnd_t ivar_opnd = mem_opnd(64, REG0,
sizeof(VALUE) * ivar_index);
1757 mov(cb, REG0, ivar_opnd);
1760 cmp(cb, REG0, imm_opnd(
Qundef));
1761 mov(cb, REG1, imm_opnd(
Qnil));
1762 cmove(cb, REG0, REG1);
1765 x86opnd_t out_opnd = ctx_stack_push(ctx, TYPE_UNKNOWN);
1766 mov(cb, out_opnd, REG0);
1770 jit_jump_to_next_insn(jit, ctx);
1771 return YJIT_END_BLOCK;
1774static codegen_status_t
1778 if (!jit_at_current_insn(jit)) {
1779 defer_compilation(jit, ctx);
1780 return YJIT_END_BLOCK;
1783 ID ivar_name = (
ID)jit_get_arg(jit, 0);
1785 VALUE comptime_val = jit_peek_at_self(jit, ctx);
1786 VALUE comptime_val_klass =
CLASS_OF(comptime_val);
1789 uint8_t *side_exit = yjit_side_exit(jit, ctx);
1794 jit_guard_known_klass(jit, ctx, comptime_val_klass, OPND_SELF, comptime_val, GETIVAR_MAX_DEPTH, side_exit);
1796 return gen_get_ivar(jit, ctx, GETIVAR_MAX_DEPTH, comptime_val, ivar_name, OPND_SELF, side_exit);
1799void rb_vm_setinstancevariable(
const rb_iseq_t *iseq, VALUE obj, ID
id, VALUE val,
IVC ic);
1801static codegen_status_t
1804 ID
id = (
ID)jit_get_arg(jit, 0);
1805 IVC ic = (
IVC)jit_get_arg(jit, 1);
1809 jit_prepare_routine_call(jit, ctx, REG0);
1812 x86opnd_t val_opnd = ctx_stack_pop(ctx, 1);
1816 mov(cb, C_ARG_REGS[3], val_opnd);
1817 mov(cb, C_ARG_REGS[2], imm_opnd(
id));
1818 mov(cb, C_ARG_REGS[4], const_ptr_opnd(ic));
1819 jit_mov_gc_ptr(jit, cb, C_ARG_REGS[0], (VALUE)jit->iseq);
1820 call_ptr(cb, REG0, (
void *)rb_vm_setinstancevariable);
1822 return YJIT_KEEP_COMPILING;
1827static codegen_status_t
1830 rb_num_t op_type = (rb_num_t)jit_get_arg(jit, 0);
1831 VALUE obj = (
VALUE)jit_get_arg(jit, 1);
1832 VALUE pushval = (
VALUE)jit_get_arg(jit, 2);
1836 jit_prepare_routine_call(jit, ctx, REG0);
1839 x86opnd_t v_opnd = ctx_stack_pop(ctx, 1);
1842 mov(cb, C_ARG_REGS[0], REG_EC);
1843 mov(cb, C_ARG_REGS[1], REG_CFP);
1844 mov(cb, C_ARG_REGS[2], imm_opnd(op_type));
1845 jit_mov_gc_ptr(jit, cb, C_ARG_REGS[3], (VALUE)obj);
1846 mov(cb, C_ARG_REGS[4], v_opnd);
1847 call_ptr(cb, REG0, (
void *)rb_vm_defined);
1852 jit_mov_gc_ptr(jit, cb, REG1, (VALUE)pushval);
1853 cmp(cb, AL, imm_opnd(0));
1854 mov(cb, RAX, imm_opnd(
Qnil));
1855 cmovnz(cb, RAX, REG1);
1859 x86opnd_t stack_ret = ctx_stack_push(ctx, out_type);
1860 mov(cb, stack_ret, RAX);
1862 return YJIT_KEEP_COMPILING;
1865static codegen_status_t
1868 enum ruby_value_type type_val = (
enum ruby_value_type)jit_get_arg(jit, 0);
1871 val_type_t val_type = ctx_get_opnd_type(ctx, OPND_STACK(0));
1877 if ((type_val ==
T_STRING && val_type.type == ETYPE_STRING) ||
1878 (type_val ==
T_ARRAY && val_type.type == ETYPE_ARRAY) ||
1879 (type_val ==
T_HASH && val_type.type == ETYPE_HASH)) {
1881 stack_ret = ctx_stack_push(ctx, TYPE_TRUE);
1882 mov(cb, stack_ret, imm_opnd(
Qtrue));
1883 return YJIT_KEEP_COMPILING;
1885 else if (val_type.is_imm || val_type.type != ETYPE_UNKNOWN) {
1887 stack_ret = ctx_stack_push(ctx, TYPE_FALSE);
1888 mov(cb, stack_ret, imm_opnd(
Qfalse));
1889 return YJIT_KEEP_COMPILING;
1893 mov(cb, REG1, imm_opnd(
Qfalse));
1895 uint32_t ret = cb_new_label(cb,
"ret");
1897 if (!val_type.is_heap) {
1902 cmp(cb, REG0, imm_opnd(
Qnil));
1907 mov(cb, REG0, mem_opnd(64, REG0, offsetof(
struct RBasic, flags)));
1909 cmp(cb, REG0, imm_opnd(type_val));
1910 mov(cb, REG0, imm_opnd(
Qtrue));
1912 cmove(cb, REG1, REG0);
1914 cb_write_label(cb, ret);
1915 stack_ret = ctx_stack_push(ctx, TYPE_IMM);
1916 mov(cb, stack_ret, REG1);
1919 return YJIT_KEEP_COMPILING;
1922 return YJIT_CANT_COMPILE;
1926static codegen_status_t
1929 rb_num_t n = (rb_num_t)jit_get_arg(jit, 0);
1932 jit_prepare_routine_call(jit, ctx, REG0);
1934 x86opnd_t values_ptr = ctx_sp_opnd(ctx, -(
sizeof(VALUE) * (uint32_t)n));
1937 mov(cb, C_ARG_REGS[0], imm_opnd(n));
1938 lea(cb, C_ARG_REGS[1], values_ptr);
1939 call_ptr(cb, REG0, (
void *)rb_str_concat_literals);
1941 ctx_stack_pop(ctx, n);
1942 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_STRING);
1943 mov(cb, stack_ret, RAX);
1945 return YJIT_KEEP_COMPILING;
1949guard_two_fixnums(
ctx_t *ctx, uint8_t *side_exit)
1952 val_type_t arg1_type = ctx_get_opnd_type(ctx, OPND_STACK(0));
1953 val_type_t arg0_type = ctx_get_opnd_type(ctx, OPND_STACK(1));
1955 if (arg0_type.is_heap || arg1_type.is_heap) {
1956 jmp_ptr(cb, side_exit);
1960 if (arg0_type.type != ETYPE_FIXNUM && arg0_type.type != ETYPE_UNKNOWN) {
1961 jmp_ptr(cb, side_exit);
1965 if (arg1_type.type != ETYPE_FIXNUM && arg1_type.type != ETYPE_UNKNOWN) {
1966 jmp_ptr(cb, side_exit);
1972 RUBY_ASSERT(arg0_type.type == ETYPE_FIXNUM || arg0_type.type == ETYPE_UNKNOWN);
1973 RUBY_ASSERT(arg1_type.type == ETYPE_FIXNUM || arg1_type.type == ETYPE_UNKNOWN);
1976 x86opnd_t arg1 = ctx_stack_opnd(ctx, 0);
1977 x86opnd_t arg0 = ctx_stack_opnd(ctx, 1);
1980 if (arg0_type.type != ETYPE_FIXNUM) {
1981 ADD_COMMENT(cb,
"guard arg0 fixnum");
1983 jz_ptr(cb, side_exit);
1985 if (arg1_type.type != ETYPE_FIXNUM) {
1986 ADD_COMMENT(cb,
"guard arg1 fixnum");
1988 jz_ptr(cb, side_exit);
1992 ctx_upgrade_opnd_type(ctx, OPND_STACK(0), TYPE_FIXNUM);
1993 ctx_upgrade_opnd_type(ctx, OPND_STACK(1), TYPE_FIXNUM);
1999static codegen_status_t
2003 if (!jit_at_current_insn(jit)) {
2004 defer_compilation(jit, ctx);
2005 return YJIT_END_BLOCK;
2008 VALUE comptime_a = jit_peek_at_stack(jit, ctx, 1);
2009 VALUE comptime_b = jit_peek_at_stack(jit, ctx, 0);
2014 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2016 if (!assume_bop_not_redefined(jit, INTEGER_REDEFINED_OP_FLAG, BOP_LT)) {
2017 return YJIT_CANT_COMPILE;
2021 guard_two_fixnums(ctx, side_exit);
2028 xor(cb, REG0_32, REG0_32);
2029 mov(cb, REG1, arg0);
2030 cmp(cb, REG1, arg1);
2031 mov(cb, REG1, imm_opnd(
Qtrue));
2032 cmov_op(cb, REG0, REG1);
2035 x86opnd_t dst = ctx_stack_push(ctx, TYPE_UNKNOWN);
2038 return YJIT_KEEP_COMPILING;
2041 return gen_opt_send_without_block(jit, ctx, cb);
2045static codegen_status_t
2048 return gen_fixnum_cmp(jit, ctx, cmovl);
2051static codegen_status_t
2054 return gen_fixnum_cmp(jit, ctx, cmovle);
2057static codegen_status_t
2060 return gen_fixnum_cmp(jit, ctx, cmovge);
2063static codegen_status_t
2066 return gen_fixnum_cmp(jit, ctx, cmovg);
2072gen_equality_specialized(
jitstate_t *jit,
ctx_t *ctx, uint8_t *side_exit)
2074 VALUE comptime_a = jit_peek_at_stack(jit, ctx, 1);
2075 VALUE comptime_b = jit_peek_at_stack(jit, ctx, 0);
2077 x86opnd_t a_opnd = ctx_stack_opnd(ctx, 1);
2078 x86opnd_t b_opnd = ctx_stack_opnd(ctx, 0);
2081 if (!assume_bop_not_redefined(jit, INTEGER_REDEFINED_OP_FLAG, BOP_EQ)) {
2086 guard_two_fixnums(ctx, side_exit);
2088 mov(cb, REG0, a_opnd);
2089 cmp(cb, REG0, b_opnd);
2091 mov(cb, REG0, imm_opnd(
Qfalse));
2092 mov(cb, REG1, imm_opnd(
Qtrue));
2093 cmove(cb, REG0, REG1);
2096 ctx_stack_pop(ctx, 2);
2097 x86opnd_t dst = ctx_stack_push(ctx, TYPE_IMM);
2104 if (!assume_bop_not_redefined(jit, STRING_REDEFINED_OP_FLAG, BOP_EQ)) {
2110 mov(cb, C_ARG_REGS[0], a_opnd);
2111 mov(cb, C_ARG_REGS[1], b_opnd);
2114 mov(cb, REG0, C_ARG_REGS[0]);
2115 jit_guard_known_klass(jit, ctx,
rb_cString, OPND_STACK(1), comptime_a, SEND_MAX_DEPTH, side_exit);
2117 uint32_t ret = cb_new_label(cb,
"ret");
2120 cmp(cb, C_ARG_REGS[0], C_ARG_REGS[1]);
2121 mov(cb, RAX, imm_opnd(
Qtrue));
2125 if (ctx_get_opnd_type(ctx, OPND_STACK(0)).
type != ETYPE_STRING) {
2126 mov(cb, REG0, C_ARG_REGS[1]);
2128 jit_guard_known_klass(jit, ctx,
rb_cString, OPND_STACK(0), comptime_b, SEND_MAX_DEPTH, side_exit);
2132 call_ptr(cb, REG0, (
void *)rb_str_eql_internal);
2135 cb_write_label(cb, ret);
2136 ctx_stack_pop(ctx, 2);
2137 x86opnd_t dst = ctx_stack_push(ctx, TYPE_IMM);
2148static codegen_status_t
2152 if (!jit_at_current_insn(jit)) {
2153 defer_compilation(jit, ctx);
2154 return YJIT_END_BLOCK;
2158 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2160 if (gen_equality_specialized(jit, ctx, side_exit)) {
2161 jit_jump_to_next_insn(jit, ctx);
2162 return YJIT_END_BLOCK;
2165 return gen_opt_send_without_block(jit, ctx, cb);
2171static codegen_status_t
2177 return gen_send_general(jit, ctx, cd, NULL);
2180static codegen_status_t
2184 int32_t argc = (int32_t)vm_ci_argc(cd->ci);
2188 GEN_COUNTER_INC(cb, oaref_argc_not_one);
2189 return YJIT_CANT_COMPILE;
2193 if (!jit_at_current_insn(jit)) {
2194 defer_compilation(jit, ctx);
2195 return YJIT_END_BLOCK;
2199 const ctx_t starting_context = *ctx;
2202 VALUE comptime_idx = jit_peek_at_stack(jit, ctx, 0);
2203 VALUE comptime_recv = jit_peek_at_stack(jit, ctx, 1);
2206 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2209 if (!assume_bop_not_redefined(jit, ARRAY_REDEFINED_OP_FLAG, BOP_AREF)) {
2210 return YJIT_CANT_COMPILE;
2214 x86opnd_t idx_opnd = ctx_stack_pop(ctx, 1);
2215 x86opnd_t recv_opnd = ctx_stack_pop(ctx, 1);
2216 mov(cb, REG0, recv_opnd);
2221 jnz_ptr(cb, side_exit);
2222 cmp(cb, REG0, imm_opnd(
Qfalse));
2223 je_ptr(cb, side_exit);
2224 cmp(cb, REG0, imm_opnd(
Qnil));
2225 je_ptr(cb, side_exit);
2229 mov(cb, REG1, mem_opnd(64, REG0, offsetof(
struct RBasic, klass)));
2230 mov(cb, REG0, const_ptr_opnd((
void *)
rb_cArray));
2231 cmp(cb, REG0, REG1);
2232 jit_chain_guard(JCC_JNE, jit, &starting_context, OPT_AREF_MAX_CHAIN_DEPTH, side_exit);
2235 mov(cb, REG1, idx_opnd);
2237 jz_ptr(cb, COUNTED_EXIT(jit, side_exit, oaref_arg_not_fixnum));
2242 mov(cb, RDI, recv_opnd);
2243 sar(cb, REG1, imm_opnd(1));
2245 call_ptr(cb, REG0, (
void *)rb_ary_entry_internal);
2248 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
2249 mov(cb, stack_ret, RAX);
2253 jit_jump_to_next_insn(jit, ctx);
2254 return YJIT_END_BLOCK;
2257 if (!assume_bop_not_redefined(jit, HASH_REDEFINED_OP_FLAG, BOP_AREF)) {
2258 return YJIT_CANT_COMPILE;
2261 x86opnd_t key_opnd = ctx_stack_opnd(ctx, 0);
2262 x86opnd_t recv_opnd = ctx_stack_opnd(ctx, 1);
2265 mov(cb, REG0, recv_opnd);
2266 jit_guard_known_klass(jit, ctx,
rb_cHash, OPND_STACK(1), comptime_recv, OPT_AREF_MAX_CHAIN_DEPTH, side_exit);
2269 mov(cb, C_ARG_REGS[0], REG0);
2270 mov(cb, C_ARG_REGS[1], key_opnd);
2273 jit_prepare_routine_call(jit, ctx, REG0);
2278 (void)ctx_stack_pop(ctx, 2);
2281 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
2282 mov(cb, stack_ret, RAX);
2285 jit_jump_to_next_insn(jit, ctx);
2286 return YJIT_END_BLOCK;
2290 return gen_opt_send_without_block(jit, ctx, cb);
2294static codegen_status_t
2298 if (!jit_at_current_insn(jit)) {
2299 defer_compilation(jit, ctx);
2300 return YJIT_END_BLOCK;
2303 VALUE comptime_recv = jit_peek_at_stack(jit, ctx, 2);
2304 VALUE comptime_key = jit_peek_at_stack(jit, ctx, 1);
2307 x86opnd_t recv = ctx_stack_opnd(ctx, 2);
2312 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2315 mov(cb, REG0, recv);
2316 jit_guard_known_klass(jit, ctx,
rb_cArray, OPND_STACK(2), comptime_recv, SEND_MAX_DEPTH, side_exit);
2320 jit_guard_known_klass(jit, ctx,
rb_cInteger, OPND_STACK(1), comptime_key, SEND_MAX_DEPTH, side_exit);
2323 mov(cb, C_ARG_REGS[0], recv);
2324 mov(cb, C_ARG_REGS[1], key);
2325 sar(cb, C_ARG_REGS[1], imm_opnd(1));
2326 mov(cb, C_ARG_REGS[2], val);
2329 jit_prepare_routine_call(jit, ctx, REG0);
2335 mov(cb, REG0, ctx_stack_opnd(ctx, 0));
2338 ctx_stack_pop(ctx, 3);
2339 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
2340 mov(cb, stack_ret, REG0);
2342 jit_jump_to_next_insn(jit, ctx);
2343 return YJIT_END_BLOCK;
2346 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2349 mov(cb, REG0, recv);
2350 jit_guard_known_klass(jit, ctx,
rb_cHash, OPND_STACK(2), comptime_recv, SEND_MAX_DEPTH, side_exit);
2353 mov(cb, C_ARG_REGS[0], recv);
2354 mov(cb, C_ARG_REGS[1], key);
2355 mov(cb, C_ARG_REGS[2], val);
2358 jit_prepare_routine_call(jit, ctx, REG0);
2363 ctx_stack_pop(ctx, 3);
2364 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
2365 mov(cb, stack_ret, RAX);
2367 jit_jump_to_next_insn(jit, ctx);
2368 return YJIT_END_BLOCK;
2371 return gen_opt_send_without_block(jit, ctx, cb);
2375static codegen_status_t
2379 if (!jit_at_current_insn(jit)) {
2380 defer_compilation(jit, ctx);
2381 return YJIT_END_BLOCK;
2384 VALUE comptime_a = jit_peek_at_stack(jit, ctx, 1);
2385 VALUE comptime_b = jit_peek_at_stack(jit, ctx, 0);
2390 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2392 if (!assume_bop_not_redefined(jit, INTEGER_REDEFINED_OP_FLAG, BOP_AND)) {
2393 return YJIT_CANT_COMPILE;
2397 guard_two_fixnums(ctx, side_exit);
2404 mov(cb, REG0, arg0);
2405 and(cb, REG0, arg1);
2408 x86opnd_t dst = ctx_stack_push(ctx, TYPE_FIXNUM);
2411 return YJIT_KEEP_COMPILING;
2415 return gen_opt_send_without_block(jit, ctx, cb);
2419static codegen_status_t
2423 if (!jit_at_current_insn(jit)) {
2424 defer_compilation(jit, ctx);
2425 return YJIT_END_BLOCK;
2428 VALUE comptime_a = jit_peek_at_stack(jit, ctx, 1);
2429 VALUE comptime_b = jit_peek_at_stack(jit, ctx, 0);
2434 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2436 if (!assume_bop_not_redefined(jit, INTEGER_REDEFINED_OP_FLAG, BOP_OR)) {
2437 return YJIT_CANT_COMPILE;
2441 guard_two_fixnums(ctx, side_exit);
2448 mov(cb, REG0, arg0);
2452 x86opnd_t dst = ctx_stack_push(ctx, TYPE_FIXNUM);
2455 return YJIT_KEEP_COMPILING;
2459 return gen_opt_send_without_block(jit, ctx, cb);
2463static codegen_status_t
2467 if (!jit_at_current_insn(jit)) {
2468 defer_compilation(jit, ctx);
2469 return YJIT_END_BLOCK;
2472 VALUE comptime_a = jit_peek_at_stack(jit, ctx, 1);
2473 VALUE comptime_b = jit_peek_at_stack(jit, ctx, 0);
2478 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2480 if (!assume_bop_not_redefined(jit, INTEGER_REDEFINED_OP_FLAG, BOP_MINUS)) {
2481 return YJIT_CANT_COMPILE;
2485 guard_two_fixnums(ctx, side_exit);
2492 mov(cb, REG0, arg0);
2493 sub(cb, REG0, arg1);
2494 jo_ptr(cb, side_exit);
2495 add(cb, REG0, imm_opnd(1));
2498 x86opnd_t dst = ctx_stack_push(ctx, TYPE_FIXNUM);
2501 return YJIT_KEEP_COMPILING;
2505 return gen_opt_send_without_block(jit, ctx, cb);
2509static codegen_status_t
2513 if (!jit_at_current_insn(jit)) {
2514 defer_compilation(jit, ctx);
2515 return YJIT_END_BLOCK;
2518 VALUE comptime_a = jit_peek_at_stack(jit, ctx, 1);
2519 VALUE comptime_b = jit_peek_at_stack(jit, ctx, 0);
2524 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2526 if (!assume_bop_not_redefined(jit, INTEGER_REDEFINED_OP_FLAG, BOP_PLUS)) {
2527 return YJIT_CANT_COMPILE;
2531 guard_two_fixnums(ctx, side_exit);
2538 mov(cb, REG0, arg0);
2539 sub(cb, REG0, imm_opnd(1));
2540 add(cb, REG0, arg1);
2541 jo_ptr(cb, side_exit);
2544 x86opnd_t dst = ctx_stack_push(ctx, TYPE_FIXNUM);
2547 return YJIT_KEEP_COMPILING;
2551 return gen_opt_send_without_block(jit, ctx, cb);
2555static codegen_status_t
2559 return gen_opt_send_without_block(jit, ctx, cb);
2562static codegen_status_t
2566 return gen_opt_send_without_block(jit, ctx, cb);
2569VALUE rb_vm_opt_mod(VALUE recv, VALUE obj);
2571static codegen_status_t
2576 jit_prepare_routine_call(jit, ctx, REG0);
2578 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2585 mov(cb, C_ARG_REGS[0], arg0);
2586 mov(cb, C_ARG_REGS[1], arg1);
2587 call_ptr(cb, REG0, (
void *)rb_vm_opt_mod);
2590 cmp(cb, RAX, imm_opnd(
Qundef));
2591 je_ptr(cb, side_exit);
2594 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
2595 mov(cb, stack_ret, RAX);
2597 return YJIT_KEEP_COMPILING;
2600static codegen_status_t
2604 return gen_opt_send_without_block(jit, ctx, cb);
2607static codegen_status_t
2611 return gen_opt_send_without_block(jit, ctx, cb);
2614static codegen_status_t
2618 return gen_opt_send_without_block(jit, ctx, cb);
2621static codegen_status_t
2624 if (!assume_bop_not_redefined(jit, STRING_REDEFINED_OP_FLAG, BOP_FREEZE)) {
2625 return YJIT_CANT_COMPILE;
2628 VALUE str = jit_get_arg(jit, 0);
2629 jit_mov_gc_ptr(jit, cb, REG0, str);
2632 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_STRING);
2633 mov(cb, stack_ret, REG0);
2635 return YJIT_KEEP_COMPILING;
2638static codegen_status_t
2641 if (!assume_bop_not_redefined(jit, STRING_REDEFINED_OP_FLAG, BOP_UMINUS)) {
2642 return YJIT_CANT_COMPILE;
2645 VALUE str = jit_get_arg(jit, 0);
2646 jit_mov_gc_ptr(jit, cb, REG0, str);
2649 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_STRING);
2650 mov(cb, stack_ret, REG0);
2652 return YJIT_KEEP_COMPILING;
2655static codegen_status_t
2658 return gen_opt_send_without_block(jit, ctx, cb);
2661static codegen_status_t
2664 return gen_opt_send_without_block(jit, ctx, cb);
2667static codegen_status_t
2670 return gen_opt_send_without_block(jit, ctx, cb);
2673static codegen_status_t
2676 return gen_opt_send_without_block(jit, ctx, cb);
2679static codegen_status_t
2690 ctx_stack_pop(ctx, 1);
2692 return YJIT_KEEP_COMPILING;
2696gen_branchif_branch(
codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
2700 jz_ptr(cb, target1);
2704 jnz_ptr(cb, target0);
2708 jnz_ptr(cb, target0);
2709 jmp_ptr(cb, target1);
2714static codegen_status_t
2717 int32_t jump_offset = (int32_t)jit_get_arg(jit, 0);
2720 if (jump_offset < 0) {
2721 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2722 yjit_check_ints(cb, side_exit);
2728 x86opnd_t val_opnd = ctx_stack_pop(ctx, 1);
2729 test(cb, val_opnd, imm_opnd(~
Qnil));
2732 uint32_t next_idx = jit_next_insn_idx(jit);
2733 uint32_t jump_idx = next_idx + jump_offset;
2734 blockid_t next_block = { jit->iseq, next_idx };
2735 blockid_t jump_block = { jit->iseq, jump_idx };
2748 return YJIT_END_BLOCK;
2752gen_branchunless_branch(
codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
2756 jnz_ptr(cb, target1);
2760 jz_ptr(cb, target0);
2764 jz_ptr(cb, target0);
2765 jmp_ptr(cb, target1);
2770static codegen_status_t
2773 int32_t jump_offset = (int32_t)jit_get_arg(jit, 0);
2776 if (jump_offset < 0) {
2777 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2778 yjit_check_ints(cb, side_exit);
2784 x86opnd_t val_opnd = ctx_stack_pop(ctx, 1);
2785 test(cb, val_opnd, imm_opnd(~
Qnil));
2788 uint32_t next_idx = jit_next_insn_idx(jit);
2789 uint32_t jump_idx = next_idx + jump_offset;
2790 blockid_t next_block = { jit->iseq, next_idx };
2791 blockid_t jump_block = { jit->iseq, jump_idx };
2801 gen_branchunless_branch
2804 return YJIT_END_BLOCK;
2808gen_branchnil_branch(
codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
2812 jne_ptr(cb, target1);
2816 je_ptr(cb, target0);
2820 je_ptr(cb, target0);
2821 jmp_ptr(cb, target1);
2826static codegen_status_t
2829 int32_t jump_offset = (int32_t)jit_get_arg(jit, 0);
2832 if (jump_offset < 0) {
2833 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2834 yjit_check_ints(cb, side_exit);
2839 x86opnd_t val_opnd = ctx_stack_pop(ctx, 1);
2840 cmp(cb, val_opnd, imm_opnd(
Qnil));
2843 uint32_t next_idx = jit_next_insn_idx(jit);
2844 uint32_t jump_idx = next_idx + jump_offset;
2845 blockid_t next_block = { jit->iseq, next_idx };
2846 blockid_t jump_block = { jit->iseq, jump_idx };
2856 gen_branchnil_branch
2859 return YJIT_END_BLOCK;
2862static codegen_status_t
2865 int32_t jump_offset = (int32_t)jit_get_arg(jit, 0);
2868 if (jump_offset < 0) {
2869 uint8_t *side_exit = yjit_side_exit(jit, ctx);
2870 yjit_check_ints(cb, side_exit);
2874 uint32_t jump_idx = jit_next_insn_idx(jit) + jump_offset;
2875 blockid_t jump_block = { jit->iseq, jump_idx };
2884 return YJIT_END_BLOCK;
2896jit_guard_known_klass(
jitstate_t *jit,
ctx_t *ctx, VALUE known_klass,
insn_opnd_t insn_opnd, VALUE sample_instance,
const int max_chain_depth, uint8_t *side_exit)
2898 val_type_t val_type = ctx_get_opnd_type(ctx, insn_opnd);
2900 if (known_klass == rb_cNilClass) {
2902 if (val_type.type != ETYPE_NIL) {
2905 ADD_COMMENT(cb,
"guard object is nil");
2906 cmp(cb, REG0, imm_opnd(
Qnil));
2907 jit_chain_guard(JCC_JNE, jit, ctx, max_chain_depth, side_exit);
2909 ctx_upgrade_opnd_type(ctx, insn_opnd, TYPE_NIL);
2912 else if (known_klass == rb_cTrueClass) {
2914 if (val_type.type != ETYPE_TRUE) {
2917 ADD_COMMENT(cb,
"guard object is true");
2918 cmp(cb, REG0, imm_opnd(
Qtrue));
2919 jit_chain_guard(JCC_JNE, jit, ctx, max_chain_depth, side_exit);
2921 ctx_upgrade_opnd_type(ctx, insn_opnd, TYPE_TRUE);
2924 else if (known_klass == rb_cFalseClass) {
2926 if (val_type.type != ETYPE_FALSE) {
2929 ADD_COMMENT(cb,
"guard object is false");
2930 STATIC_ASSERT(qfalse_is_zero,
Qfalse == 0);
2931 test(cb, REG0, REG0);
2932 jit_chain_guard(JCC_JNZ, jit, ctx, max_chain_depth, side_exit);
2934 ctx_upgrade_opnd_type(ctx, insn_opnd, TYPE_FALSE);
2941 if (val_type.type != ETYPE_FIXNUM || !val_type.is_imm) {
2944 ADD_COMMENT(cb,
"guard object is fixnum");
2946 jit_chain_guard(JCC_JZ, jit, ctx, max_chain_depth, side_exit);
2947 ctx_upgrade_opnd_type(ctx, insn_opnd, TYPE_FIXNUM);
2954 if (val_type.type != ETYPE_SYMBOL || !val_type.is_imm) {
2957 ADD_COMMENT(cb,
"guard object is static symbol");
2960 jit_chain_guard(JCC_JNE, jit, ctx, max_chain_depth, side_exit);
2961 ctx_upgrade_opnd_type(ctx, insn_opnd, TYPE_STATIC_SYMBOL);
2966 if (val_type.type != ETYPE_FLONUM || !val_type.is_imm) {
2970 ADD_COMMENT(cb,
"guard object is flonum");
2971 mov(cb, REG1, REG0);
2974 jit_chain_guard(JCC_JNE, jit, ctx, max_chain_depth, side_exit);
2975 ctx_upgrade_opnd_type(ctx, insn_opnd, TYPE_FLONUM);
2989 ADD_COMMENT(cb,
"guard known object with singleton class");
2991 jit_mov_gc_ptr(jit, cb, REG1, sample_instance);
2992 cmp(cb, REG0, REG1);
2993 jit_chain_guard(JCC_JNE, jit, ctx, max_chain_depth, side_exit);
3000 if (!val_type.is_heap) {
3001 ADD_COMMENT(cb,
"guard not immediate");
3004 jit_chain_guard(JCC_JNZ, jit, ctx, max_chain_depth, side_exit);
3005 cmp(cb, REG0, imm_opnd(
Qnil));
3006 jit_chain_guard(JCC_JBE, jit, ctx, max_chain_depth, side_exit);
3008 ctx_upgrade_opnd_type(ctx, insn_opnd, TYPE_HEAP);
3011 x86opnd_t klass_opnd = mem_opnd(64, REG0, offsetof(
struct RBasic, klass));
3015 ADD_COMMENT(cb,
"guard known class");
3016 jit_mov_gc_ptr(jit, cb, REG1, known_klass);
3017 cmp(cb, klass_opnd, REG1);
3018 jit_chain_guard(JCC_JNE, jit, ctx, max_chain_depth, side_exit);
3031 jit_mov_gc_ptr(jit, cb, C_ARG_REGS[1], cme->defined_class);
3034 call_ptr(cb, REG0, (
void *)&rb_obj_is_kind_of);
3036 jz_ptr(cb, COUNTED_EXIT(jit, side_exit, send_se_protected_check_failed));
3050yjit_reg_method(VALUE klass,
const char *mid_str, method_codegen_t gen_fn)
3061 RUBY_ASSERT(me->def->type == VM_METHOD_TYPE_CFUNC);
3063 st_insert(yjit_method_codegen_table, (st_data_t)me->def->method_serial, (st_data_t)gen_fn);
3072 const val_type_t recv_opnd = ctx_get_opnd_type(ctx, OPND_STACK(0));
3074 if (recv_opnd.type == ETYPE_NIL || recv_opnd.type == ETYPE_FALSE) {
3075 ADD_COMMENT(cb,
"rb_obj_not(nil_or_false)");
3076 ctx_stack_pop(ctx, 1);
3077 x86opnd_t out_opnd = ctx_stack_push(ctx, TYPE_TRUE);
3078 mov(cb, out_opnd, imm_opnd(
Qtrue));
3080 else if (recv_opnd.is_heap || recv_opnd.type != ETYPE_UNKNOWN) {
3082 ADD_COMMENT(cb,
"rb_obj_not(truthy)");
3083 ctx_stack_pop(ctx, 1);
3084 x86opnd_t out_opnd = ctx_stack_push(ctx, TYPE_FALSE);
3085 mov(cb, out_opnd, imm_opnd(
Qfalse));
3100 ADD_COMMENT(cb,
"nil? == true");
3101 ctx_stack_pop(ctx, 1);
3102 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_TRUE);
3103 mov(cb, stack_ret, imm_opnd(
Qtrue));
3111 ADD_COMMENT(cb,
"nil? == false");
3112 ctx_stack_pop(ctx, 1);
3113 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_FALSE);
3114 mov(cb, stack_ret, imm_opnd(
Qfalse));
3123 ADD_COMMENT(cb,
"equal?");
3127 mov(cb, REG0, obj1);
3128 cmp(cb, REG0, obj2);
3129 mov(cb, REG0, imm_opnd(
Qtrue));
3130 mov(cb, REG1, imm_opnd(
Qfalse));
3131 cmovne(cb, REG0, REG1);
3133 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_IMM);
3134 mov(cb, stack_ret, REG0);
3139yjit_str_bytesize(VALUE str)
3147 ADD_COMMENT(cb,
"String#bytesize");
3150 mov(cb, C_ARG_REGS[0], recv);
3151 call_ptr(cb, REG0, (
void *)&yjit_str_bytesize);
3153 x86opnd_t out_opnd = ctx_stack_push(ctx, TYPE_FIXNUM);
3154 mov(cb, out_opnd, RAX);
3166 if (recv_known_klass && *recv_known_klass ==
rb_cString) {
3167 ADD_COMMENT(cb,
"to_s on plain string");
3178 ADD_COMMENT(cb,
"Thread.current");
3179 ctx_stack_pop(ctx, 1);
3185 mov(cb, REG0, member_opnd(REG0,
rb_thread_t,
self));
3187 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_HEAP);
3188 mov(cb, stack_ret, REG0);
3193static method_codegen_t
3196 method_codegen_t gen_fn;
3197 if (st_lookup(yjit_method_codegen_table, def->method_serial, (st_data_t *)&gen_fn)) {
3205c_method_tracing_currently_enabled(
const jitstate_t *jit)
3207 rb_event_flag_t tracing_events;
3208 if (rb_multi_ractor_p()) {
3209 tracing_events = ruby_vm_event_enabled_global_flags;
3215 tracing_events = rb_ec_ractor_hooks(jit->ec)->events;
3221static codegen_status_t
3227 if (cfunc->argc < 0 && cfunc->argc != -1) {
3228 GEN_COUNTER_INC(cb, send_cfunc_ruby_array_varg);
3229 return YJIT_CANT_COMPILE;
3233 if (cfunc->argc >= 0 && cfunc->argc != argc) {
3234 GEN_COUNTER_INC(cb, send_cfunc_argc_mismatch);
3235 return YJIT_CANT_COMPILE;
3239 if (cfunc->argc >= 0 && argc + 1 > NUM_C_ARG_REGS) {
3240 GEN_COUNTER_INC(cb, send_cfunc_toomany_args);
3241 return YJIT_CANT_COMPILE;
3244 if (c_method_tracing_currently_enabled(jit)) {
3246 GEN_COUNTER_INC(cb, send_cfunc_tracing);
3247 return YJIT_CANT_COMPILE;
3252 method_codegen_t known_cfunc_codegen;
3253 if ((known_cfunc_codegen = lookup_cfunc_codegen(cme->def))) {
3254 if (known_cfunc_codegen(jit, ctx, ci, cme, block, argc, recv_known_klass)) {
3257 jit_jump_to_next_insn(jit, ctx);
3258 return YJIT_END_BLOCK;
3273 uint8_t *side_exit = yjit_side_exit(jit, ctx);
3276 yjit_check_ints(cb, side_exit);
3282 cmp(cb, REG_CFP, REG0);
3283 jle_ptr(cb, COUNTED_EXIT(jit, side_exit, send_se_cf_overflow));
3286 x86opnd_t recv = ctx_stack_opnd(ctx, argc);
3289 jit_save_pc(jit, REG0);
3295 jit_mov_gc_ptr(jit, cb, REG0, (VALUE)block);
3301 lea(cb, REG0, ctx_sp_opnd(ctx,
sizeof(VALUE) * 3));
3307 jit_mov_gc_ptr(jit, cb, REG1, (VALUE)cme);
3308 mov(cb, mem_opnd(64, REG0, 8 * -3), REG1);
3315 or(cb, REG1, imm_opnd(1));
3316 mov(cb, mem_opnd(64, REG0, 8 * -2), REG1);
3319 mov(cb, mem_opnd(64, REG0, 8 * -2), imm_opnd(VM_BLOCK_HANDLER_NONE));
3324 uint64_t frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3325 mov(cb, mem_opnd(64, REG0, 8 * -1), imm_opnd(frame_type));
3350 sub(cb, REG0, imm_opnd(
sizeof(VALUE)));
3352 mov(cb, REG0, recv);
3356 if (YJIT_CHECK_MODE > 0) {
3358 mov(cb, C_ARG_REGS[0], recv);
3359 jit_mov_gc_ptr(jit, cb, C_ARG_REGS[1], (VALUE)ci);
3360 mov(cb, C_ARG_REGS[2], const_ptr_opnd((
void *)cfunc->func));
3361 jit_mov_gc_ptr(jit, cb, C_ARG_REGS[3], (VALUE)cme);
3362 call_ptr(cb, REG0, (
void *)&check_cfunc_dispatch);
3366 lea(cb, RAX, ctx_sp_opnd(ctx, 0));
3369 ctx_stack_pop(ctx, argc + 1);
3373 jit_save_sp(jit, ctx);
3376 if (cfunc->argc >= 0) {
3379 for (int32_t i = 0; i < argc + 1; ++i)
3383 mov(cb, c_arg_reg, stack_opnd);
3387 if (cfunc->argc == -1) {
3390 mov(cb, C_ARG_REGS[0], imm_opnd(argc));
3391 lea(cb, C_ARG_REGS[1], mem_opnd(64, RAX, -(argc) *
SIZEOF_VALUE));
3392 mov(cb, C_ARG_REGS[2], mem_opnd(64, RAX, -(argc + 1) *
SIZEOF_VALUE));
3399 call_ptr(cb, REG0, (
void*)cfunc->func);
3402 record_global_inval_patch(cb, outline_full_cfunc_return_pos);
3405 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
3406 mov(cb, stack_ret, RAX);
3416 ctx_clear_local_types(ctx);
3423 jit_jump_to_next_insn(jit, ctx);
3424 return YJIT_END_BLOCK;
3428gen_return_branch(
codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
3437 mov(cb, REG0, const_ptr_opnd(target0));
3445rb_leaf_invokebuiltin_iseq_p(
const rb_iseq_t *iseq)
3447 unsigned int invokebuiltin_len = insn_len(BIN(opt_invokebuiltin_delegate_leave));
3448 unsigned int leave_len = insn_len(BIN(leave));
3450 return (iseq->body->iseq_size == (invokebuiltin_len + leave_len) &&
3451 rb_vm_insn_addr2opcode((
void *)iseq->body->iseq_encoded[0]) == BIN(opt_invokebuiltin_delegate_leave) &&
3452 rb_vm_insn_addr2opcode((
void *)iseq->body->iseq_encoded[invokebuiltin_len]) == BIN(leave) &&
3453 iseq->body->builtin_inline_p
3459rb_leaf_builtin_function(
const rb_iseq_t *iseq)
3461 if (!rb_leaf_invokebuiltin_iseq_p(iseq))
3466static codegen_status_t
3469 const rb_iseq_t *iseq = def_iseq_ptr(cme->def);
3476 const bool doing_kw_call = iseq->body->param.flags.has_kw;
3477 const bool supplying_kws = vm_ci_flag(ci) & VM_CALL_KWARG;
3479 if (vm_ci_flag(ci) & VM_CALL_TAILCALL) {
3481 GEN_COUNTER_INC(cb, send_iseq_tailcall);
3482 return YJIT_CANT_COMPILE;
3487 if (iseq->body->param.flags.has_rest ||
3488 iseq->body->param.flags.has_post ||
3489 iseq->body->param.flags.has_kwrest) {
3490 GEN_COUNTER_INC(cb, send_iseq_complex_callee);
3491 return YJIT_CANT_COMPILE;
3497 if (supplying_kws && !iseq->body->param.flags.has_kw) {
3498 GEN_COUNTER_INC(cb, send_iseq_complex_callee);
3499 return YJIT_CANT_COMPILE;
3504 if (supplying_kws && iseq->body->param.flags.accepts_no_kwarg) {
3505 GEN_COUNTER_INC(cb, send_iseq_complex_callee);
3506 return YJIT_CANT_COMPILE;
3510 int num_params = iseq->body->param.size;
3513 if (iseq->body->param.flags.has_block) {
3514 if (iseq->body->local_iseq == iseq) {
3523 GEN_COUNTER_INC(cb, send_iseq_complex_callee);
3524 return YJIT_CANT_COMPILE;
3528 uint32_t start_pc_offset = 0;
3530 const int required_num = iseq->body->param.lead_num;
3535 const int kw_arg_num = kw_arg ? kw_arg->keyword_len : 0;
3538 const int opts_filled = argc - required_num - kw_arg_num;
3539 const int opt_num = iseq->body->param.opt_num;
3540 const int opts_missing = opt_num - opts_filled;
3542 if (opts_filled < 0 || opts_filled > opt_num) {
3543 GEN_COUNTER_INC(cb, send_iseq_arity_error);
3544 return YJIT_CANT_COMPILE;
3550 if (doing_kw_call && opts_missing > 0) {
3551 GEN_COUNTER_INC(cb, send_iseq_complex_callee);
3552 return YJIT_CANT_COMPILE;
3556 num_params -= opt_num - opts_filled;
3557 start_pc_offset = (uint32_t)iseq->body->param.opt_table[opts_filled];
3560 if (doing_kw_call) {
3566 const struct rb_iseq_param_keyword *keyword = iseq->body->param.keyword;
3568 int required_kwargs_filled = 0;
3570 if (keyword->num > 30) {
3574 GEN_COUNTER_INC(cb, send_iseq_complex_callee);
3575 return YJIT_CANT_COMPILE;
3579 if (supplying_kws) {
3582 const ID *callee_kwargs = keyword->table;
3589 ID *caller_kwargs =
ALLOCA_N(VALUE, kw_arg->keyword_len);
3590 for (
int kwarg_idx = 0; kwarg_idx < kw_arg->keyword_len; kwarg_idx++)
3591 caller_kwargs[kwarg_idx] =
SYM2ID(kw_arg->keywords[kwarg_idx]);
3596 for (
int caller_idx = 0; caller_idx < kw_arg->keyword_len; caller_idx++) {
3599 for (callee_idx = 0; callee_idx < keyword->num; callee_idx++) {
3600 if (caller_kwargs[caller_idx] == callee_kwargs[callee_idx]) {
3608 if (callee_idx == keyword->num) {
3609 GEN_COUNTER_INC(cb, send_iseq_kwargs_mismatch);
3610 return YJIT_CANT_COMPILE;
3614 if (callee_idx < keyword->required_num) {
3615 required_kwargs_filled++;
3620 RUBY_ASSERT(required_kwargs_filled <= keyword->required_num);
3621 if (required_kwargs_filled != keyword->required_num) {
3622 GEN_COUNTER_INC(cb, send_iseq_kwargs_mismatch);
3623 return YJIT_CANT_COMPILE;
3628 const int num_locals = iseq->body->local_table_size - num_params;
3631 uint8_t *side_exit = yjit_side_exit(jit, ctx);
3634 yjit_check_ints(cb, side_exit);
3638 if (leaf_builtin && !block && leaf_builtin->argc + 1 <= NUM_C_ARG_REGS) {
3639 ADD_COMMENT(cb,
"inlined leaf builtin");
3642 mov(cb, C_ARG_REGS[0], REG_EC);
3645 for (int32_t i = 0; i < leaf_builtin->argc + 1; i++) {
3646 x86opnd_t stack_opnd = ctx_stack_opnd(ctx, leaf_builtin->argc - i);
3647 x86opnd_t c_arg_reg = C_ARG_REGS[i + 1];
3648 mov(cb, c_arg_reg, stack_opnd);
3650 ctx_stack_pop(ctx, leaf_builtin->argc + 1);
3651 call_ptr(cb, REG0, (
void *)leaf_builtin->func_ptr);
3654 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
3655 mov(cb, stack_ret, RAX);
3660 return YJIT_KEEP_COMPILING;
3666 ADD_COMMENT(cb,
"stack overflow check");
3667 lea(cb, REG0, ctx_sp_opnd(ctx,
sizeof(VALUE) * (num_locals + iseq->body->stack_max) + 2 *
sizeof(
rb_control_frame_t)));
3668 cmp(cb, REG_CFP, REG0);
3669 jle_ptr(cb, COUNTED_EXIT(jit, side_exit, send_se_cf_overflow));
3671 if (doing_kw_call) {
3677 const int args_before_kw = required_num + opt_num;
3681 int caller_keyword_len = 0;
3682 const VALUE *caller_keywords = NULL;
3683 if (vm_ci_kwarg(ci)) {
3684 caller_keyword_len = vm_ci_kwarg(ci)->keyword_len;
3685 caller_keywords = &vm_ci_kwarg(ci)->keywords[0];
3690 const struct rb_iseq_param_keyword *
const keyword = iseq->body->param.keyword;
3692 ADD_COMMENT(cb,
"keyword args");
3696 const ID *callee_kwargs = keyword->table;
3698 int total_kwargs = keyword->num;
3705 ID *caller_kwargs =
ALLOCA_N(VALUE, total_kwargs);
3707 for (kwarg_idx = 0; kwarg_idx < caller_keyword_len; kwarg_idx++) {
3708 caller_kwargs[kwarg_idx] =
SYM2ID(caller_keywords[kwarg_idx]);
3711 int unspecified_bits = 0;
3713 for (
int callee_idx = keyword->required_num; callee_idx < total_kwargs; callee_idx++) {
3714 bool already_passed =
false;
3715 ID callee_kwarg = callee_kwargs[callee_idx];
3717 for (
int caller_idx = 0; caller_idx < caller_keyword_len; caller_idx++) {
3718 if (caller_kwargs[caller_idx] == callee_kwarg) {
3719 already_passed =
true;
3724 if (!already_passed) {
3729 x86opnd_t default_arg = ctx_stack_push(ctx, TYPE_UNKNOWN);
3730 VALUE default_value = keyword->default_values[callee_idx - keyword->required_num];
3732 if (default_value ==
Qundef) {
3736 unspecified_bits |= 0x01 << (callee_idx - keyword->required_num);
3737 default_value =
Qnil;
3741 jit_mov_gc_ptr(jit, cb, REG0, default_value);
3742 mov(cb, default_arg, REG0);
3744 caller_kwargs[kwarg_idx++] = callee_kwarg;
3752 for (kwarg_idx = 0; kwarg_idx < total_kwargs; kwarg_idx++) {
3753 ID callee_kwarg = callee_kwargs[kwarg_idx];
3758 if (callee_kwarg == caller_kwargs[kwarg_idx])
continue;
3763 for (
int swap_idx = kwarg_idx + 1; swap_idx < total_kwargs; swap_idx++) {
3764 if (callee_kwarg == caller_kwargs[swap_idx]) {
3767 stack_swap(ctx, cb, argc - 1 - swap_idx - args_before_kw, argc - 1 - kwarg_idx - args_before_kw, REG1, REG0);
3772 ID tmp = caller_kwargs[kwarg_idx];
3773 caller_kwargs[kwarg_idx] = caller_kwargs[swap_idx];
3774 caller_kwargs[swap_idx] = tmp;
3784 mov(cb, ctx_stack_opnd(ctx, -1), imm_opnd(
INT2FIX(unspecified_bits)));
3787 x86opnd_t recv = ctx_stack_opnd(ctx, argc);
3790 ADD_COMMENT(cb,
"store caller sp");
3791 lea(cb, REG0, ctx_sp_opnd(ctx,
sizeof(VALUE) * -(argc + 1)));
3795 jit_save_pc(jit, REG0);
3801 jit_mov_gc_ptr(jit, cb, REG0, (VALUE)block);
3806 lea(cb, REG0, ctx_sp_opnd(ctx,
sizeof(VALUE) * (3 + num_locals + doing_kw_call)));
3809 for (
int i = 0; i < num_locals; i++) {
3810 mov(cb, mem_opnd(64, REG0,
sizeof(VALUE) * (i - num_locals - 3)), imm_opnd(
Qnil));
3813 ADD_COMMENT(cb,
"push env");
3816 jit_mov_gc_ptr(jit, cb, REG1, (VALUE)cme);
3819 mov(cb, mem_opnd(64, REG0, 8 * -3), REG1);
3826 or(cb, REG1, imm_opnd(1));
3827 mov(cb, mem_opnd(64, REG0, 8 * -2), REG1);
3830 mov(cb, mem_opnd(64, REG0, 8 * -2), imm_opnd(VM_BLOCK_HANDLER_NONE));
3835 uint64_t frame_type = VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL;
3836 mov(cb, mem_opnd(64, REG0, 8 * -1), imm_opnd(frame_type));
3838 ADD_COMMENT(cb,
"push callee CFP");
3853 mov(cb, REG1, recv);
3855 mov(cb, REG_SP, REG0);
3858 sub(cb, REG0, imm_opnd(
sizeof(VALUE)));
3860 jit_mov_gc_ptr(jit, cb, REG0, (VALUE)iseq);
3870 blockid_t return_block = { jit->iseq, jit_next_insn_idx(jit) };
3873 ctx_t callee_ctx = DEFAULT_CTX;
3876 for (int32_t arg_idx = 0; arg_idx < argc; ++arg_idx) {
3877 val_type_t arg_type = ctx_get_opnd_type(ctx, OPND_STACK(argc - arg_idx - 1));
3878 ctx_set_local_type(&callee_ctx, arg_idx, arg_type);
3880 val_type_t recv_type = ctx_get_opnd_type(ctx, OPND_STACK(argc));
3881 ctx_upgrade_opnd_type(&callee_ctx, OPND_SELF, recv_type);
3884 ctx_clear_local_types(ctx);
3889 ctx_t return_ctx = *ctx;
3890 ctx_stack_pop(&return_ctx, argc + 1);
3891 ctx_stack_push(&return_ctx, TYPE_UNKNOWN);
3892 return_ctx.sp_offset = 1;
3893 return_ctx.chain_depth = 0;
3916 return YJIT_END_BLOCK;
3919static codegen_status_t
3921 if (vm_ci_argc(ci) != 0) {
3922 return YJIT_CANT_COMPILE;
3925 const unsigned int off = cme->def->body.optimized.index;
3934 uint64_t native_off = (uint64_t)off * (uint64_t)
SIZEOF_VALUE;
3935 if (native_off > (uint64_t)INT32_MAX) {
3936 return YJIT_CANT_COMPILE;
3944 bool embedded =
FL_TEST_RAW(comptime_recv, RSTRUCT_EMBED_LEN_MASK);
3946 ADD_COMMENT(cb,
"struct aref");
3950 mov(cb, REG0, recv);
3953 mov(cb, REG0, member_opnd_idx(REG0,
struct RStruct, as.ary, off));
3956 mov(cb, REG0, member_opnd(REG0,
struct RStruct, as.heap.ptr));
3960 x86opnd_t ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
3963 jit_jump_to_next_insn(jit, ctx);
3964 return YJIT_END_BLOCK;
3967static codegen_status_t
3969 if (vm_ci_argc(ci) != 1) {
3970 return YJIT_CANT_COMPILE;
3973 const unsigned int off = cme->def->body.optimized.index;
3979 ADD_COMMENT(cb,
"struct aset");
3984 mov(cb, C_ARG_REGS[0], recv);
3985 mov(cb, C_ARG_REGS[1], imm_opnd(off));
3986 mov(cb, C_ARG_REGS[2], val);
3989 x86opnd_t ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
3992 jit_jump_to_next_insn(jit, ctx);
3993 return YJIT_END_BLOCK;
3999static codegen_status_t
4014 int32_t argc = (int32_t)vm_ci_argc(ci);
4015 ID mid = vm_ci_mid(ci);
4018 if (vm_ci_flag(ci) & VM_CALL_KW_SPLAT) {
4019 GEN_COUNTER_INC(cb, send_kw_splat);
4020 return YJIT_CANT_COMPILE;
4025 if ((vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT) != 0) {
4026 GEN_COUNTER_INC(cb, send_args_splat);
4027 return YJIT_CANT_COMPILE;
4029 if ((vm_ci_flag(ci) & VM_CALL_ARGS_BLOCKARG) != 0) {
4030 GEN_COUNTER_INC(cb, send_block_arg);
4031 return YJIT_CANT_COMPILE;
4035 if (!jit_at_current_insn(jit)) {
4036 defer_compilation(jit, ctx);
4037 return YJIT_END_BLOCK;
4040 VALUE comptime_recv = jit_peek_at_stack(jit, ctx, argc);
4041 VALUE comptime_recv_klass =
CLASS_OF(comptime_recv);
4044 uint8_t *side_exit = yjit_side_exit(jit, ctx);
4047 x86opnd_t recv = ctx_stack_opnd(ctx, argc);
4049 mov(cb, REG0, recv);
4050 if (!jit_guard_known_klass(jit, ctx, comptime_recv_klass, recv_opnd, comptime_recv, SEND_MAX_DEPTH, side_exit)) {
4051 return YJIT_CANT_COMPILE;
4058 return YJIT_CANT_COMPILE;
4061 switch (METHOD_ENTRY_VISI(cme)) {
4062 case METHOD_VISI_PUBLIC:
4065 case METHOD_VISI_PRIVATE:
4066 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4069 return YJIT_CANT_COMPILE;
4072 case METHOD_VISI_PROTECTED:
4073 jit_protected_callee_ancestry_guard(jit, cb, cme, side_exit);
4075 case METHOD_VISI_UNDEF:
4076 RUBY_ASSERT(
false &&
"cmes should always have a visibility");
4082 assume_method_lookup_stable(comptime_recv_klass, cme, jit);
4087 switch (cme->def->type) {
4088 case VM_METHOD_TYPE_ISEQ:
4089 return gen_send_iseq(jit, ctx, ci, cme, block, argc);
4090 case VM_METHOD_TYPE_CFUNC:
4091 if ((vm_ci_flag(ci) & VM_CALL_KWARG) != 0) {
4092 GEN_COUNTER_INC(cb, send_cfunc_kwargs);
4093 return YJIT_CANT_COMPILE;
4095 return gen_send_cfunc(jit, ctx, ci, cme, block, argc, &comptime_recv_klass);
4096 case VM_METHOD_TYPE_IVAR:
4099 GEN_COUNTER_INC(cb, send_getter_arity);
4100 return YJIT_CANT_COMPILE;
4102 if (c_method_tracing_currently_enabled(jit)) {
4112 GEN_COUNTER_INC(cb, send_cfunc_tracing);
4113 return YJIT_CANT_COMPILE;
4116 mov(cb, REG0, recv);
4118 ID ivar_name = cme->def->body.attr.id;
4119 return gen_get_ivar(jit, ctx, SEND_MAX_DEPTH, comptime_recv, ivar_name, recv_opnd, side_exit);
4120 case VM_METHOD_TYPE_ATTRSET:
4121 if ((vm_ci_flag(ci) & VM_CALL_KWARG) != 0) {
4122 GEN_COUNTER_INC(cb, send_attrset_kwargs);
4123 return YJIT_CANT_COMPILE;
4126 GEN_COUNTER_INC(cb, send_ivar_set_method);
4127 return YJIT_CANT_COMPILE;
4129 else if (c_method_tracing_currently_enabled(jit)) {
4132 GEN_COUNTER_INC(cb, send_cfunc_tracing);
4133 return YJIT_CANT_COMPILE;
4136 ID ivar_name = cme->def->body.attr.id;
4137 return gen_set_ivar(jit, ctx, comptime_recv, comptime_recv_klass, ivar_name);
4140 case VM_METHOD_TYPE_BMETHOD:
4141 GEN_COUNTER_INC(cb, send_bmethod);
4142 return YJIT_CANT_COMPILE;
4143 case VM_METHOD_TYPE_ZSUPER:
4144 GEN_COUNTER_INC(cb, send_zsuper_method);
4145 return YJIT_CANT_COMPILE;
4146 case VM_METHOD_TYPE_ALIAS: {
4148 cme = rb_aliased_callable_method_entry(cme);
4151 case VM_METHOD_TYPE_UNDEF:
4152 GEN_COUNTER_INC(cb, send_undef_method);
4153 return YJIT_CANT_COMPILE;
4154 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4155 GEN_COUNTER_INC(cb, send_not_implemented_method);
4156 return YJIT_CANT_COMPILE;
4158 case VM_METHOD_TYPE_OPTIMIZED:
4159 switch (cme->def->body.optimized.type) {
4160 case OPTIMIZED_METHOD_TYPE_SEND:
4161 GEN_COUNTER_INC(cb, send_optimized_method_send);
4162 return YJIT_CANT_COMPILE;
4163 case OPTIMIZED_METHOD_TYPE_CALL:
4164 GEN_COUNTER_INC(cb, send_optimized_method_call);
4165 return YJIT_CANT_COMPILE;
4166 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4167 GEN_COUNTER_INC(cb, send_optimized_method_block_call);
4168 return YJIT_CANT_COMPILE;
4169 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF:
4170 return gen_struct_aref(jit, ctx, ci, cme, comptime_recv, comptime_recv_klass);
4171 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET:
4172 return gen_struct_aset(jit, ctx, ci, cme, comptime_recv, comptime_recv_klass);
4174 rb_bug(
"unknown optimized method type (%d)", cme->def->body.optimized.type);
4177 case VM_METHOD_TYPE_MISSING:
4178 GEN_COUNTER_INC(cb, send_missing_method);
4179 return YJIT_CANT_COMPILE;
4180 case VM_METHOD_TYPE_REFINED:
4181 GEN_COUNTER_INC(cb, send_refined_method);
4182 return YJIT_CANT_COMPILE;
4191static codegen_status_t
4195 return gen_send_general(jit, ctx, cd, NULL);
4198static codegen_status_t
4203 return gen_send_general(jit, ctx, cd, block);
4206static codegen_status_t
4213 if (!jit_at_current_insn(jit)) {
4214 defer_compilation(jit, ctx);
4215 return YJIT_END_BLOCK;
4220 return YJIT_CANT_COMPILE;
4224 VALUE current_defined_class = me->defined_class;
4225 ID mid = me->def->original_id;
4227 if (me != rb_callable_method_entry(current_defined_class, me->called_id)) {
4231 return YJIT_CANT_COMPILE;
4236 return YJIT_CANT_COMPILE;
4238 VALUE comptime_superclass =
RCLASS_SUPER(RCLASS_ORIGIN(current_defined_class));
4241 int32_t argc = (int32_t)vm_ci_argc(ci);
4245 if ((vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT) != 0) {
4246 GEN_COUNTER_INC(cb, send_args_splat);
4247 return YJIT_CANT_COMPILE;
4249 if ((vm_ci_flag(ci) & VM_CALL_KWARG) != 0) {
4250 GEN_COUNTER_INC(cb, send_keywords);
4251 return YJIT_CANT_COMPILE;
4253 if ((vm_ci_flag(ci) & VM_CALL_KW_SPLAT) != 0) {
4254 GEN_COUNTER_INC(cb, send_kw_splat);
4255 return YJIT_CANT_COMPILE;
4257 if ((vm_ci_flag(ci) & VM_CALL_ARGS_BLOCKARG) != 0) {
4258 GEN_COUNTER_INC(cb, send_block_arg);
4259 return YJIT_CANT_COMPILE;
4267 VALUE comptime_recv = jit_peek_at_stack(jit, ctx, argc);
4268 if (!rb_obj_is_kind_of(comptime_recv, current_defined_class)) {
4269 return YJIT_CANT_COMPILE;
4276 return YJIT_CANT_COMPILE;
4280 switch (cme->def->type) {
4281 case VM_METHOD_TYPE_ISEQ:
4282 case VM_METHOD_TYPE_CFUNC:
4286 return YJIT_CANT_COMPILE;
4290 uint8_t *side_exit = yjit_side_exit(jit, ctx);
4292 if (jit->ec->cfp->ep[VM_ENV_DATA_INDEX_ME_CREF] != (VALUE)me) {
4294 return YJIT_CANT_COMPILE;
4297 ADD_COMMENT(cb,
"guard known me");
4300 jit_mov_gc_ptr(jit, cb, REG1, (VALUE)me);
4301 cmp(cb, ep_me_opnd, REG1);
4302 jne_ptr(cb, COUNTED_EXIT(jit, side_exit, invokesuper_me_changed));
4311 ADD_COMMENT(cb,
"guard no block given");
4314 cmp(cb, ep_specval_opnd, imm_opnd(VM_BLOCK_HANDLER_NONE));
4315 jne_ptr(cb, COUNTED_EXIT(jit, side_exit, invokesuper_block));
4319 x86opnd_t recv = ctx_stack_opnd(ctx, argc);
4320 mov(cb, REG0, recv);
4324 assume_method_lookup_stable(current_defined_class, me, jit);
4325 assume_method_lookup_stable(comptime_superclass, cme, jit);
4328 ctx_clear_local_types(ctx);
4330 switch (cme->def->type) {
4331 case VM_METHOD_TYPE_ISEQ:
4332 return gen_send_iseq(jit, ctx, ci, cme, block, argc);
4333 case VM_METHOD_TYPE_CFUNC:
4334 return gen_send_cfunc(jit, ctx, ci, cme, block, argc, NULL);
4342static codegen_status_t
4349 uint8_t *side_exit = yjit_side_exit(jit, ctx);
4355 ADD_COMMENT(cb,
"check for interrupts");
4356 yjit_check_ints(cb, COUNTED_EXIT(jit, side_exit, leave_se_interrupt));
4359 mov(cb, REG0, ctx_stack_pop(ctx, 1));
4369 mov(cb, mem_opnd(64, REG_SP, 0), REG0);
4373 jmp_rm(cb, mem_opnd(64, REG_CFP, offset_to_jit_return));
4375 return YJIT_END_BLOCK;
4378RUBY_EXTERN rb_serial_t ruby_vm_global_constant_state;
4380static codegen_status_t
4383 ID gid = jit_get_arg(jit, 0);
4386 jit_prepare_routine_call(jit, ctx, REG0);
4388 mov(cb, C_ARG_REGS[0], imm_opnd(gid));
4390 call_ptr(cb, REG0, (
void *)&rb_gvar_get);
4392 x86opnd_t top = ctx_stack_push(ctx, TYPE_UNKNOWN);
4395 return YJIT_KEEP_COMPILING;
4398static codegen_status_t
4401 ID gid = jit_get_arg(jit, 0);
4405 jit_prepare_routine_call(jit, ctx, REG0);
4407 mov(cb, C_ARG_REGS[0], imm_opnd(gid));
4411 mov(cb, C_ARG_REGS[1], val);
4413 call_ptr(cb, REG0, (
void *)&rb_gvar_set);
4415 return YJIT_KEEP_COMPILING;
4418static codegen_status_t
4423 jit_prepare_routine_call(jit, ctx, REG0);
4428 mov(cb, C_ARG_REGS[0], str);
4429 mov(cb, C_ARG_REGS[1], val);
4431 call_ptr(cb, REG0, (
void *)&rb_obj_as_string_result);
4434 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_STRING);
4435 mov(cb, stack_ret, RAX);
4437 return YJIT_KEEP_COMPILING;
4440static codegen_status_t
4443 if (!jit_at_current_insn(jit)) {
4444 defer_compilation(jit, ctx);
4445 return YJIT_END_BLOCK;
4448 x86opnd_t recv = ctx_stack_opnd(ctx, 0);
4449 VALUE comptime_recv = jit_peek_at_stack(jit, ctx, 0);
4452 uint8_t *side_exit = yjit_side_exit(jit, ctx);
4454 mov(cb, REG0, recv);
4455 jit_guard_known_klass(jit, ctx,
CLASS_OF(comptime_recv), OPND_STACK(0), comptime_recv, SEND_MAX_DEPTH, side_exit);
4457 return YJIT_KEEP_COMPILING;
4461 return gen_send_general(jit, ctx, cd, NULL);
4465static codegen_status_t
4468 rb_num_t opt = jit_get_arg(jit, 0);
4469 rb_num_t cnt = jit_get_arg(jit, 1);
4473 jit_prepare_routine_call(jit, ctx, REG0);
4475 x86opnd_t values_ptr = ctx_sp_opnd(ctx, -(
sizeof(VALUE) * (uint32_t)cnt));
4476 ctx_stack_pop(ctx, cnt);
4478 mov(cb, C_ARG_REGS[0], imm_opnd(0));
4479 mov(cb, C_ARG_REGS[1], imm_opnd(cnt));
4480 lea(cb, C_ARG_REGS[2], values_ptr);
4481 call_ptr(cb, REG0, (
void *)&rb_ary_tmp_new_from_values);
4486 mov(cb, C_ARG_REGS[0], RAX);
4487 mov(cb, C_ARG_REGS[1], imm_opnd(opt));
4488 call_ptr(cb, REG0, (
void *)&rb_reg_new_ary);
4493 pop(cb, C_ARG_REGS[0]);
4496 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
4497 mov(cb, stack_ret, RAX);
4502 return YJIT_KEEP_COMPILING;
4505static codegen_status_t
4509 jit_prepare_routine_call(jit, ctx, REG0);
4513 mov(cb, C_ARG_REGS[0], str);
4518 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
4519 mov(cb, stack_ret, RAX);
4521 return YJIT_KEEP_COMPILING;
4524static codegen_status_t
4531 rb_num_t
type = jit_get_arg(jit, 1);
4535 return YJIT_CANT_COMPILE;
4537 else if (
type & 0x01) {
4541 jit_prepare_routine_call(jit, ctx, REG0);
4544 ADD_COMMENT(cb,
"rb_backref_get");
4546 mov(cb, C_ARG_REGS[0], RAX);
4548 switch (
type >> 1) {
4550 ADD_COMMENT(cb,
"rb_reg_last_match");
4554 ADD_COMMENT(cb,
"rb_reg_match_pre");
4558 ADD_COMMENT(cb,
"rb_reg_match_post");
4562 ADD_COMMENT(cb,
"rb_reg_match_last");
4566 rb_bug(
"invalid back-ref");
4569 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
4570 mov(cb, stack_ret, RAX);
4572 return YJIT_KEEP_COMPILING;
4578 jit_prepare_routine_call(jit, ctx, REG0);
4581 ADD_COMMENT(cb,
"rb_backref_get");
4585 ADD_COMMENT(cb,
"rb_reg_nth_match");
4586 mov(cb, C_ARG_REGS[0], imm_opnd(
type >> 1));
4587 mov(cb, C_ARG_REGS[1], RAX);
4590 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
4591 mov(cb, stack_ret, RAX);
4593 return YJIT_KEEP_COMPILING;
4600static codegen_status_t
4604 jit_prepare_routine_call(jit, ctx, REG0);
4607 mov(cb, C_ARG_REGS[1], REG_CFP);
4608 mov(cb, C_ARG_REGS[2], imm_opnd(jit_get_arg(jit, 0)));
4609 mov(cb, C_ARG_REGS[3], imm_opnd(jit_get_arg(jit, 1)));
4611 call_ptr(cb, REG0, (
void *)rb_vm_getclassvariable);
4613 x86opnd_t stack_top = ctx_stack_push(ctx, TYPE_UNKNOWN);
4614 mov(cb, stack_top, RAX);
4616 return YJIT_KEEP_COMPILING;
4622static codegen_status_t
4626 jit_prepare_routine_call(jit, ctx, REG0);
4629 mov(cb, C_ARG_REGS[1], REG_CFP);
4630 mov(cb, C_ARG_REGS[2], imm_opnd(jit_get_arg(jit, 0)));
4631 mov(cb, C_ARG_REGS[3], ctx_stack_pop(ctx, 1));
4632 mov(cb, C_ARG_REGS[4], imm_opnd(jit_get_arg(jit, 1)));
4634 call_ptr(cb, REG0, (
void *)rb_vm_setclassvariable);
4636 return YJIT_KEEP_COMPILING;
4639static codegen_status_t
4642 VALUE jump_offset = jit_get_arg(jit, 0);
4643 VALUE const_cache_as_value = jit_get_arg(jit, 1);
4644 IC ic = (
IC)const_cache_as_value;
4649 GET_IC_SERIAL(ice) != ruby_vm_global_constant_state ) {
4652 return YJIT_CANT_COMPILE;
4657 jit_ensure_block_entry_exit(jit);
4661 uint8_t *side_exit = yjit_side_exit(jit, ctx);
4664 bool rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep);
4665 mov(cb, C_ARG_REGS[0], const_ptr_opnd((
void *)ic));
4667 call_ptr(cb, REG0, (
void *)rb_vm_ic_hit_p);
4671 jz_ptr(cb, COUNTED_EXIT(jit, side_exit, opt_getinlinecache_miss));
4674 mov(cb, REG0, const_ptr_opnd((
void *)ic));
4676 x86opnd_t stack_top = ctx_stack_push(ctx, TYPE_UNKNOWN);
4678 mov(cb, stack_top, REG0);
4683 if (!assume_single_ractor_mode(jit))
return YJIT_CANT_COMPILE;
4687 assume_stable_global_constant_state(jit);
4689 jit_putobject(jit, ctx, ice->value);
4693 uint32_t jump_idx = jit_next_insn_idx(jit) + (int32_t)jump_offset;
4697 (
blockid_t){ .iseq = jit->iseq, .idx = jump_idx }
4700 return YJIT_END_BLOCK;
4706static codegen_status_t
4711 uint8_t *side_exit = yjit_side_exit(jit, ctx);
4714 uint32_t level = (uint32_t)jit_get_arg(jit, 1);
4717 gen_get_ep(cb, REG0, level);
4720 test(cb, mem_opnd(64, REG0,
SIZEOF_VALUE * VM_ENV_DATA_INDEX_FLAGS), imm_opnd(VM_FRAME_FLAG_MODIFIED_BLOCK_PARAM));
4721 jnz_ptr(cb, COUNTED_EXIT(jit, side_exit, gbpp_block_param_modified));
4725 mov(cb, REG0, mem_opnd(64, REG0,
SIZEOF_VALUE * VM_ENV_DATA_INDEX_SPECVAL));
4728 and(cb, REG0_8, imm_opnd(0x3));
4731 cmp(cb, REG0_8, imm_opnd(0x1));
4732 jnz_ptr(cb, COUNTED_EXIT(jit, side_exit, gbpp_block_handler_not_iseq));
4735 mov(cb, REG0, const_ptr_opnd((
void *)rb_block_param_proxy));
4737 x86opnd_t top = ctx_stack_push(ctx, TYPE_HEAP);
4740 return YJIT_KEEP_COMPILING;
4743static codegen_status_t
4749 if (bf->argc + 2 > NUM_C_ARG_REGS) {
4750 return YJIT_CANT_COMPILE;
4754 jit_prepare_routine_call(jit, ctx, REG0);
4757 mov(cb, C_ARG_REGS[0], REG_EC);
4761 for (int32_t i = 0; i < bf->argc; i++) {
4762 x86opnd_t stack_opnd = ctx_stack_opnd(ctx, bf->argc - i - 1);
4763 x86opnd_t c_arg_reg = C_ARG_REGS[2 + i];
4764 mov(cb, c_arg_reg, stack_opnd);
4767 call_ptr(cb, REG0, (
void *)bf->func_ptr);
4770 ctx_stack_pop(ctx, bf->argc);
4771 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
4772 mov(cb, stack_ret, RAX);
4774 return YJIT_KEEP_COMPILING;
4780static codegen_status_t
4784 int32_t start_index = (int32_t)jit_get_arg(jit, 1);
4787 if (bf->argc + 2 > NUM_C_ARG_REGS) {
4788 return YJIT_CANT_COMPILE;
4792 jit_prepare_routine_call(jit, ctx, REG0);
4800 mov(cb, C_ARG_REGS[0], REG_EC);
4804 for (int32_t i = 0; i < bf->argc; i++) {
4805 const int32_t offs = -jit->iseq->body->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index + i;
4807 x86opnd_t c_arg_reg = C_ARG_REGS[i + 2];
4808 mov(cb, c_arg_reg, local_opnd);
4810 call_ptr(cb, REG0, (
void *)bf->func_ptr);
4813 x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);
4814 mov(cb, stack_ret, RAX);
4816 return YJIT_KEEP_COMPILING;
4819static int tracing_invalidate_all_i(
void *vstart,
void *vend,
size_t stride,
void *data);
4820static void invalidate_all_blocks_for_tracing(
const rb_iseq_t *iseq);
4843rb_yjit_tracing_invalidate_all(
void)
4845 if (!rb_yjit_enabled_p())
return;
4852 rb_objspace_each_objects(tracing_invalidate_all_i, NULL);
4855 const uint32_t old_pos = cb->write_pos;
4856 rb_darray_for(global_inval_patches, patch_idx) {
4857 struct codepage_patch patch = rb_darray_get(global_inval_patches, patch_idx);
4858 cb_set_pos(cb, patch.inline_patch_pos);
4859 uint8_t *jump_target = cb_get_ptr(ocb, patch.outlined_target_pos);
4860 jmp_ptr(cb, jump_target);
4862 cb_set_pos(cb, old_pos);
4869 RUBY_ASSERT_ALWAYS(yjit_codepage_frozen_bytes <= old_pos &&
"frozen bytes should increase monotonically");
4870 yjit_codepage_frozen_bytes = old_pos;
4872 cb_mark_all_executable(ocb);
4873 cb_mark_all_executable(cb);
4878tracing_invalidate_all_i(
void *vstart,
void *vend,
size_t stride,
void *data)
4880 VALUE v = (
VALUE)vstart;
4881 for (; v != (
VALUE)vend; v += stride) {
4882 void *ptr = asan_poisoned_object_p(v);
4883 asan_unpoison_object(v,
false);
4885 if (rb_obj_is_iseq(v)) {
4887 invalidate_all_blocks_for_tracing(iseq);
4890 asan_poison_object_if(ptr, v);
4896invalidate_all_blocks_for_tracing(
const rb_iseq_t *iseq)
4901 ASSERT_vm_locking();
4908 rb_darray_for(body->yjit_blocks, version_array_idx) {
4910 rb_darray_for(version_array, version_idx) {
4912 block_t *block = rb_darray_get(version_array, version_idx);
4913 yjit_unlink_method_lookup_dependency(block);
4914 yjit_block_assumptions_free(block);
4916 rb_darray_free(version_array);
4918 rb_darray_free(body->yjit_blocks);
4919 body->yjit_blocks = NULL;
4923 body->jit_func = NULL;
4928yjit_reg_op(
int opcode, codegen_fn gen_fn)
4930 RUBY_ASSERT(opcode >= 0 && opcode < VM_INSTRUCTION_SIZE);
4934 gen_fns[opcode] = gen_fn;
4938yjit_init_codegen(
void)
4941 uint32_t mem_size = rb_yjit_opts.exec_mem_size * 1024 * 1024;
4942 uint8_t *mem_block = alloc_exec_mem(mem_size);
4945 cb_init(cb, mem_block, mem_size/2);
4947 ocb = &outline_block;
4948 cb_init(ocb, mem_block + mem_size/2, mem_size/2);
4951 leave_exit_code = yjit_gen_leave_exit(cb);
4954 gen_full_cfunc_return();
4955 cb_mark_all_executable(cb);
4958 yjit_reg_op(BIN(nop), gen_nop);
4959 yjit_reg_op(BIN(dup), gen_dup);
4960 yjit_reg_op(BIN(dupn), gen_dupn);
4961 yjit_reg_op(BIN(swap), gen_swap);
4962 yjit_reg_op(BIN(setn), gen_setn);
4963 yjit_reg_op(BIN(topn), gen_topn);
4964 yjit_reg_op(BIN(pop), gen_pop);
4965 yjit_reg_op(BIN(adjuststack), gen_adjuststack);
4966 yjit_reg_op(BIN(newarray), gen_newarray);
4967 yjit_reg_op(BIN(duparray), gen_duparray);
4968 yjit_reg_op(BIN(duphash), gen_duphash);
4969 yjit_reg_op(BIN(splatarray), gen_splatarray);
4970 yjit_reg_op(BIN(expandarray), gen_expandarray);
4971 yjit_reg_op(BIN(newhash), gen_newhash);
4972 yjit_reg_op(BIN(newrange), gen_newrange);
4973 yjit_reg_op(BIN(concatstrings), gen_concatstrings);
4974 yjit_reg_op(BIN(putnil), gen_putnil);
4975 yjit_reg_op(BIN(putobject), gen_putobject);
4976 yjit_reg_op(BIN(putstring), gen_putstring);
4977 yjit_reg_op(BIN(putobject_INT2FIX_0_), gen_putobject_int2fix);
4978 yjit_reg_op(BIN(putobject_INT2FIX_1_), gen_putobject_int2fix);
4979 yjit_reg_op(BIN(putself), gen_putself);
4980 yjit_reg_op(BIN(putspecialobject), gen_putspecialobject);
4981 yjit_reg_op(BIN(getlocal), gen_getlocal);
4982 yjit_reg_op(BIN(getlocal_WC_0), gen_getlocal_wc0);
4983 yjit_reg_op(BIN(getlocal_WC_1), gen_getlocal_wc1);
4984 yjit_reg_op(BIN(setlocal), gen_setlocal);
4985 yjit_reg_op(BIN(setlocal_WC_0), gen_setlocal_wc0);
4986 yjit_reg_op(BIN(setlocal_WC_1), gen_setlocal_wc1);
4987 yjit_reg_op(BIN(getinstancevariable), gen_getinstancevariable);
4988 yjit_reg_op(BIN(setinstancevariable), gen_setinstancevariable);
4989 yjit_reg_op(BIN(defined), gen_defined);
4990 yjit_reg_op(BIN(checktype), gen_checktype);
4991 yjit_reg_op(BIN(checkkeyword), gen_checkkeyword);
4992 yjit_reg_op(BIN(opt_lt), gen_opt_lt);
4993 yjit_reg_op(BIN(opt_le), gen_opt_le);
4994 yjit_reg_op(BIN(opt_ge), gen_opt_ge);
4995 yjit_reg_op(BIN(opt_gt), gen_opt_gt);
4996 yjit_reg_op(BIN(opt_eq), gen_opt_eq);
4997 yjit_reg_op(BIN(opt_neq), gen_opt_neq);
4998 yjit_reg_op(BIN(opt_aref), gen_opt_aref);
4999 yjit_reg_op(BIN(opt_aset), gen_opt_aset);
5000 yjit_reg_op(BIN(opt_and), gen_opt_and);
5001 yjit_reg_op(BIN(opt_or), gen_opt_or);
5002 yjit_reg_op(BIN(opt_minus), gen_opt_minus);
5003 yjit_reg_op(BIN(opt_plus), gen_opt_plus);
5004 yjit_reg_op(BIN(opt_mult), gen_opt_mult);
5005 yjit_reg_op(BIN(opt_div), gen_opt_div);
5006 yjit_reg_op(BIN(opt_mod), gen_opt_mod);
5007 yjit_reg_op(BIN(opt_ltlt), gen_opt_ltlt);
5008 yjit_reg_op(BIN(opt_nil_p), gen_opt_nil_p);
5009 yjit_reg_op(BIN(opt_empty_p), gen_opt_empty_p);
5010 yjit_reg_op(BIN(opt_str_freeze), gen_opt_str_freeze);
5011 yjit_reg_op(BIN(opt_str_uminus), gen_opt_str_uminus);
5012 yjit_reg_op(BIN(opt_not), gen_opt_not);
5013 yjit_reg_op(BIN(opt_size), gen_opt_size);
5014 yjit_reg_op(BIN(opt_length), gen_opt_length);
5015 yjit_reg_op(BIN(opt_regexpmatch2), gen_opt_regexpmatch2);
5016 yjit_reg_op(BIN(opt_getinlinecache), gen_opt_getinlinecache);
5017 yjit_reg_op(BIN(invokebuiltin), gen_invokebuiltin);
5018 yjit_reg_op(BIN(opt_invokebuiltin_delegate), gen_opt_invokebuiltin_delegate);
5019 yjit_reg_op(BIN(opt_invokebuiltin_delegate_leave), gen_opt_invokebuiltin_delegate);
5020 yjit_reg_op(BIN(opt_case_dispatch), gen_opt_case_dispatch);
5021 yjit_reg_op(BIN(branchif), gen_branchif);
5022 yjit_reg_op(BIN(branchunless), gen_branchunless);
5023 yjit_reg_op(BIN(branchnil), gen_branchnil);
5024 yjit_reg_op(BIN(jump), gen_jump);
5025 yjit_reg_op(BIN(getblockparamproxy), gen_getblockparamproxy);
5026 yjit_reg_op(BIN(opt_send_without_block), gen_opt_send_without_block);
5027 yjit_reg_op(BIN(send), gen_send);
5028 yjit_reg_op(BIN(invokesuper), gen_invokesuper);
5029 yjit_reg_op(BIN(leave), gen_leave);
5030 yjit_reg_op(BIN(getglobal), gen_getglobal);
5031 yjit_reg_op(BIN(setglobal), gen_setglobal);
5032 yjit_reg_op(BIN(anytostring), gen_anytostring);
5033 yjit_reg_op(BIN(objtostring), gen_objtostring);
5034 yjit_reg_op(BIN(toregexp), gen_toregexp);
5035 yjit_reg_op(BIN(intern), gen_intern);
5036 yjit_reg_op(BIN(getspecial), gen_getspecial);
5037 yjit_reg_op(BIN(getclassvariable), gen_getclassvariable);
5038 yjit_reg_op(BIN(setclassvariable), gen_setclassvariable);
5040 yjit_method_codegen_table = st_init_numtable();
5043 yjit_reg_method(rb_cBasicObject,
"!", jit_rb_obj_not);
5045 yjit_reg_method(rb_cNilClass,
"nil?", jit_rb_true);
5046 yjit_reg_method(rb_mKernel,
"nil?", jit_rb_false);
5048 yjit_reg_method(rb_cBasicObject,
"==", jit_rb_obj_equal);
5049 yjit_reg_method(rb_cBasicObject,
"equal?", jit_rb_obj_equal);
5050 yjit_reg_method(rb_mKernel,
"eql?", jit_rb_obj_equal);
5051 yjit_reg_method(rb_cModule,
"==", jit_rb_obj_equal);
5052 yjit_reg_method(
rb_cSymbol,
"==", jit_rb_obj_equal);
5053 yjit_reg_method(
rb_cSymbol,
"===", jit_rb_obj_equal);
5056 yjit_reg_method(
rb_cString,
"to_s", jit_rb_str_to_s);
5057 yjit_reg_method(
rb_cString,
"to_str", jit_rb_str_to_s);
5058 yjit_reg_method(
rb_cString,
"bytesize", jit_rb_str_bytesize);
5061 yjit_reg_method(rb_singleton_class(
rb_cThread),
"current", jit_thread_s_current);
#define RUBY_ASSERT(expr)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_ASSERT_ALWAYS(expr)
A variant of RUBY_ASSERT that does not interface with RUBY_DEBUG.
#define RUBY_EXTERN
Declaration of externally visible global variables.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
static VALUE RB_FL_TEST_RAW(VALUE obj, VALUE flags)
This is an implenentation detail of RB_FL_TEST().
#define FL_SINGLETON
Old name of RUBY_FL_SINGLETON.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define FLONUM_P
Old name of RB_FLONUM_P.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
void rb_bug(const char *fmt,...)
Interpreter panic switch.
VALUE rb_cArray
Array class.
VALUE rb_cInteger
Module class.
VALUE rb_cHash
Hash class.
VALUE rb_cSymbol
Sumbol class.
VALUE rb_cThread
Thread class.
VALUE rb_cFloat
Float class.
VALUE rb_cString
String class.
VALUE rb_ary_resurrect(VALUE ary)
I guess there is no use case of this function in extension libraries, but this is a routine identical...
VALUE rb_ary_clear(VALUE ary)
Destructively removes everything form an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
void rb_hash_bulk_insert(long argc, const VALUE *argv, VALUE hash)
Inserts a list of key-value pairs into a hash table at once.
VALUE rb_hash_aref(VALUE hash, VALUE key)
Queries the given key in the given hash table.
VALUE rb_hash_aset(VALUE hash, VALUE key, VALUE val)
Inserts or replaces ("upsert"s) the objects into the given hash table.
VALUE rb_hash_new(void)
Creates a new, empty hash object.
VALUE rb_backref_get(void)
Queries the last match, or Regexp.last_match, or the $~.
VALUE rb_range_new(VALUE beg, VALUE end, int excl)
Creates a new Range.
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
VALUE rb_attr_get(VALUE obj, ID name)
Identical to rb_ivar_get()
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
ID rb_intern(const char *name)
Finds or creates a symbol of the given name.
const char * rb_id2name(ID id)
Retrieves the name mapped to the given id.
#define RBIMPL_ATTR_MAYBE_UNUSED()
Wraps (or simulates) [[maybe_unused]]
#define ALLOCA_N(type, n)
VALUE type(ANYARGS)
ANYARGS-ed function type.
@ RARRAY_EMBED_LEN_SHIFT
Where ::RARRAY_EMBED_LEN_MASK resides.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
@ ROBJECT_EMBED_LEN_MAX
Max possible number of instance variables that can be embedded.
#define RSTRING_GETMEM(str, ptrvar, lenvar)
Convenient macro to obtain the contents and length at once.
static long RSTRING_LEN(VALUE str)
Queries the length of the string.
static long RSTRUCT_LEN(VALUE st)
Returns the number of struct members.
static VALUE RSTRUCT_SET(VALUE st, int k, VALUE v)
Resembles Struct#[]=.
static bool RB_FIXNUM_P(VALUE obj)
Checks if the given object is a so-called Fixnum.
@ RUBY_SPECIAL_SHIFT
Least significant 8 bits are reserved.
@ RUBY_FIXNUM_FLAG
Flag to denote a fixnum.
@ RUBY_FLONUM_MASK
Bit mask detecting a flonum.
@ RUBY_FLONUM_FLAG
Flag to denote a flonum.
@ RUBY_SYMBOL_FLAG
Flag to denote a static symbol.
@ RUBY_IMMEDIATE_MASK
Bit mask detecting special consts.
const VALUE ary[RARRAY_EMBED_LEN_MAX]
Embedded elements.
long len
Number of elements of the array.
const VALUE * ptr
Pointer to the C array that holds the elements of the array.
Ruby's object's, base components.
VALUE * ivptr
Pointer to a C array that holds instance variables.
uint32_t numiv
Number of instance variables.
VALUE ary[ROBJECT_EMBED_LEN_MAX]
Embedded instance variables.
Basic block version Represents a portion of an iseq compiled with a given context Note: care must be ...
Code generation context Contains information we can use to optimize code.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
ruby_value_type
C-level type of an object.
@ RUBY_T_MASK
Bitmask of ruby_value_type.