4#include "vm_callinfo.h"
8#include "internal/compile.h"
9#include "internal/class.h"
11#include "yjit_iface.h"
12#include "yjit_codegen.h"
16#ifdef HAVE_LIBCAPSTONE
17#include <capstone/capstone.h>
18static VALUE cYjitDisasm;
19static VALUE cYjitDisasmInsn;
23static VALUE cYjitBlock;
26static VALUE cYjitCodeComment;
30extern const int rb_vm_max_insn_name_size;
31static int64_t exit_op_count[VM_INSTRUCTION_SIZE] = { 0 };
35extern st_table *rb_encoded_insn_data;
40#define CODE_PAGE_SIZE 16 * 1024
43#define PAGES_PER_ALLOC 512
48 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
53yjit_iseq_pc_at_idx(
const rb_iseq_t *iseq, uint32_t insn_idx)
57 VALUE *encoded = iseq->body->iseq_encoded;
58 VALUE *pc = &encoded[insn_idx];
69 VALUE disassembly = rb_iseq_disasm(iseq);
71 fprintf(stderr,
"%.*s\n", (
int)len, ptr);
75yjit_opcode_at_pc(
const rb_iseq_t *iseq,
const VALUE *pc)
77 const VALUE at_pc = *pc;
79 return rb_vm_insn_addr2opcode((
const void *)at_pc);
90 if (METHOD_ENTRY_INVALIDATED(compile_time_cme)) {
91 rb_bug(
"yjit: output code uses invalidated cme %p", (
void *)compile_time_cme);
94 bool callee_correct =
false;
96 if (cme->def->type == VM_METHOD_TYPE_CFUNC) {
98 if ((
void *)cfunc->func == callee) {
99 callee_correct =
true;
102 if (!callee_correct) {
103 rb_bug(
"yjit: output code calls wrong method");
107MJIT_FUNC_EXPORTED VALUE rb_hash_has_key(VALUE hash, VALUE key);
115static st_table *blocks_assuming_bops;
118assume_bop_not_redefined(
jitstate_t *jit,
int redefined_flag,
enum ruby_basic_operators bop)
120 if (BASIC_OP_UNREDEFINED_P(bop, redefined_flag)) {
123 jit_ensure_block_entry_exit(jit);
124 st_insert(blocks_assuming_bops, (st_data_t)jit->block, 0);
135static st_table *method_lookup_dependency;
145static st_table *cme_validity_dependency;
148add_cme_validity_dependency_i(st_data_t *key, st_data_t *value, st_data_t new_block,
int existing)
156 block_set = st_init_numtable();
157 *value = (st_data_t)block_set;
161 st_insert(block_set, new_block, 1);
167add_lookup_dependency_i(st_data_t *key, st_data_t *value, st_data_t data,
int existing)
174 id2blocks = (
void *)*value;
178 id2blocks = rb_id_table_create(1);
179 *value = (st_data_t)id2blocks;
186 if (rb_id_table_lookup(id2blocks, info->mid, &blocks)) {
192 block_set = st_init_numtable();
193 rb_id_table_insert(id2blocks, info->mid, (VALUE)block_set);
197 st_insert(block_set, (st_data_t)info->block, 1);
214 RUBY_ASSERT(rb_callable_method_entry(receiver_klass, cme->called_id) == cme);
218 jit_ensure_block_entry_exit(jit);
223 rb_darray_append(&block->cme_dependencies, cme_dep);
225 st_update(cme_validity_dependency, (st_data_t)cme, add_cme_validity_dependency_i, (st_data_t)block);
228 st_update(method_lookup_dependency, (st_data_t)receiver_klass, add_lookup_dependency_i, (st_data_t)&info);
231static st_table *blocks_assuming_single_ractor_mode;
238 if (rb_multi_ractor_p())
return false;
240 jit_ensure_block_entry_exit(jit);
242 st_insert(blocks_assuming_single_ractor_mode, (st_data_t)jit->block, 1);
246static st_table *blocks_assuming_stable_global_constant_state;
251assume_stable_global_constant_state(
jitstate_t *jit)
253 jit_ensure_block_entry_exit(jit);
254 st_insert(blocks_assuming_stable_global_constant_state, (st_data_t)jit->block, 1);
258mark_and_pin_keys_i(st_data_t k, st_data_t v, st_data_t ignore)
267yjit_root_mark(
void *ptr)
269 if (method_lookup_dependency) {
274 st_foreach(method_lookup_dependency, mark_and_pin_keys_i, 0);
277 if (cme_validity_dependency) {
284 st_foreach(cme_validity_dependency, mark_and_pin_keys_i, 0);
289yjit_root_free(
void *ptr)
295yjit_root_memsize(
const void *ptr)
298 return st_memsize(method_lookup_dependency);
303yjit_root_update_references(
void *ptr)
311 {yjit_root_mark, yjit_root_free, yjit_root_memsize, yjit_root_update_references},
312 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
317block_set_invalidate_i(st_data_t key, st_data_t v, st_data_t ignore)
322 invalidate_block_version(version);
330rb_yjit_method_lookup_change(VALUE klass, ID mid)
332 if (!method_lookup_dependency)
return;
337 st_data_t key = (st_data_t)klass;
338 if (st_lookup(method_lookup_dependency, key, &image)) {
343 if (rb_id_table_lookup(id2blocks, mid, &blocks)) {
344 rb_id_table_delete(id2blocks, mid);
349 yjit_runtime_counters.invalidate_method_lookup += block_set->num_entries;
352 st_foreach(block_set, block_set_invalidate_i, 0);
354 st_free_table(block_set);
364rb_yjit_cme_invalidate(VALUE cme)
366 if (!cme_validity_dependency)
return;
373 st_data_t cme_as_st_data = (st_data_t)cme;
375 if (st_delete(cme_validity_dependency, &cme_as_st_data, &blocks)) {
379 yjit_runtime_counters.invalidate_method_lookup += block_set->num_entries;
383 st_foreach(block_set, block_set_invalidate_i, 0);
385 st_free_table(block_set);
393rb_yjit_invalidate_all_method_lookup_assumptions(
void)
407 st_data_t key = (st_data_t)receiver_klass;
408 if (st_lookup(method_lookup_dependency, key, &image)) {
410 ID mid = callee_cme->called_id;
414 if (rb_id_table_lookup(id2blocks, mid, &blocks)) {
418 st_data_t block_as_st_data = (st_data_t)block;
419 (void)st_delete(block_set, &block_as_st_data, NULL);
421 if (block_set->num_entries == 0) {
423 rb_id_table_delete(id2blocks, mid);
424 st_free_table(block_set);
437 if (st_lookup(cme_validity_dependency, (st_data_t)callee_cme, &blocks)) {
440 st_data_t block_as_st_data = (st_data_t)block;
441 (void)st_delete(block_set, &block_as_st_data, NULL);
446yjit_unlink_method_lookup_dependency(
block_t *block)
449 rb_darray_foreach(block->cme_dependencies, cme_dependency_idx, cme_dep) {
453 rb_darray_free(block->cme_dependencies);
457yjit_block_assumptions_free(
block_t *block)
459 st_data_t as_st_data = (st_data_t)block;
460 if (blocks_assuming_stable_global_constant_state) {
461 st_delete(blocks_assuming_stable_global_constant_state, &as_st_data, NULL);
464 if (blocks_assuming_single_ractor_mode) {
465 st_delete(blocks_assuming_single_ractor_mode, &as_st_data, NULL);
468 if (blocks_assuming_bops) {
469 st_delete(blocks_assuming_bops, &as_st_data, NULL);
478#if (OPT_DIRECT_THREADED_CODE || OPT_CALL_THREADED_CODE) && JIT_ENABLED
484 uint8_t *code_ptr = gen_entry_point(iseq, 0, ec);
487 iseq->body->jit_func = (yjit_func_t)code_ptr;
490 iseq->body->jit_func = 0;
508yjit_blocks_for(VALUE mod, VALUE rb_iseq)
510 if (
CLASS_OF(rb_iseq) != rb_cISeq) {
514 const rb_iseq_t *iseq = rb_iseqw_to_iseq(rb_iseq);
517 rb_darray_for(iseq->body->yjit_blocks, version_array_idx) {
520 rb_darray_for(versions, block_idx) {
521 block_t *block = rb_darray_get(versions, block_idx);
534block_address(VALUE
self)
538 return LONG2NUM((intptr_t)block->start_addr);
543block_code(VALUE
self)
549 (
const char*)block->start_addr,
550 block->end_addr - block->start_addr
557iseq_start_index(VALUE
self)
562 return INT2NUM(block->blockid.idx);
568iseq_end_index(VALUE
self)
573 return INT2NUM(block->end_idx);
578rb_yjit_bop_redefined(VALUE klass,
const rb_method_entry_t *me,
enum ruby_basic_operators bop)
580 if (blocks_assuming_bops) {
582 yjit_runtime_counters.invalidate_bop_redefined += blocks_assuming_bops->num_entries;
585 st_foreach(blocks_assuming_bops, block_set_invalidate_i, 0);
591rb_yjit_constant_state_changed(
void)
593 if (blocks_assuming_stable_global_constant_state) {
595 yjit_runtime_counters.constant_state_bumps++;
596 yjit_runtime_counters.invalidate_constant_state_bump += blocks_assuming_stable_global_constant_state->num_entries;
599 st_foreach(blocks_assuming_stable_global_constant_state, block_set_invalidate_i, 0);
607rb_yjit_constant_ic_update(
const rb_iseq_t *
const iseq,
IC ic)
609 if (!rb_yjit_enabled_p())
return;
613 if (ic->entry->ic_cref || rb_multi_ractor_p()) {
621 VALUE *code = body->iseq_encoded;
622 const unsigned get_insn_idx = ic->get_insn_idx;
628 RUBY_ASSERT(rb_vm_insn_addr2insn((
const void *)code[get_insn_idx]) == BIN(opt_getinlinecache));
631 RUBY_ASSERT(insn_op_type(BIN(opt_getinlinecache), 1) == TS_IC);
632 if (ic == (
IC)code[get_insn_idx + 1 + 1]) {
636 const int32_t initial_version_count = rb_darray_size(getinlinecache_blocks);
637 for (int32_t iteration=0; iteration<initial_version_count; ++iteration) {
638 getinlinecache_blocks = yjit_get_version_array(iseq, get_insn_idx);
640 if (rb_darray_size(getinlinecache_blocks) > 0) {
641 block_t *block = rb_darray_get(getinlinecache_blocks, 0);
642 invalidate_block_version(block);
644 yjit_runtime_counters.invalidate_constant_ic_fill++;
653 RUBY_ASSERT(0 == rb_darray_size(yjit_get_version_array(iseq, get_insn_idx)));
656 RUBY_ASSERT(
false &&
"ic->get_insn_diex not set properly");
663rb_yjit_before_ractor_spawn(
void)
665 if (blocks_assuming_single_ractor_mode) {
667 yjit_runtime_counters.invalidate_ractor_spawn += blocks_assuming_single_ractor_mode->num_entries;
670 st_foreach(blocks_assuming_single_ractor_mode, block_set_invalidate_i, 0);
674#ifdef HAVE_LIBCAPSTONE
677 {0, (void(*)(
void *))cs_close, 0, },
678 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
682yjit_disasm_init(VALUE klass)
686 if (cs_open(CS_ARCH_X86, CS_MODE_64, handle) != CS_ERR_OK) {
687 rb_raise(rb_eRuntimeError,
"failed to make Capstone handle");
693yjit_disasm(VALUE
self, VALUE code, VALUE from)
703 for (
size_t i = 0; i < count; i++) {
709 cs_free(insns, count);
720 uint8_t *start = (
void *)
NUM2ULL(start_address);
721 uint8_t *end = (
void *)
NUM2ULL(end_address);
723 rb_darray_for(yjit_code_comments, i) {
724 struct yjit_comment comment = rb_darray_get(yjit_code_comments, i);
725 uint8_t *comment_pos = cb_get_ptr(cb, comment.offset);
727 if (comment_pos >= end) {
730 if (comment_pos >= start) {
733 LL2NUM((
long long) comment_pos),
742 return comment_array;
748 return RBOOL(YJIT_STATS && rb_yjit_opts.gen_stats);
766 VALUE value =
LL2NUM((
long long)cb->write_pos);
770 value =
LL2NUM((
long long)ocb->write_pos);
775 if (rb_yjit_opts.gen_stats) {
779 int64_t *counter_reader = (int64_t *)&yjit_runtime_counters;
780 int64_t *counter_reader_end = &yjit_runtime_counters.last_member;
786 char *name_reader = yjit_counter_names;
787 char *counter_name_end = yjit_counter_names +
sizeof(yjit_counter_names);
788 while (name_reader < counter_name_end && counter_reader < counter_reader_end) {
789 if (*name_reader ==
',' || *name_reader ==
' ') {
798 name_end = strchr(name_reader,
',');
799 if (name_end == NULL)
break;
800 name_len = (int)(name_end - name_reader);
805 VALUE value =
LL2NUM((
long long)*counter_reader);
809 name_reader = name_end;
815 char key_string[rb_vm_max_insn_name_size + 6];
816 for (
int i = 0; i < VM_INSTRUCTION_SIZE; i++) {
817 const char *i_name = insn_name(i);
818 snprintf(key_string, rb_vm_max_insn_name_size + 6,
"%s%s",
"exit_", i_name);
821 VALUE value =
LL2NUM((
long long)exit_op_count[i]);
837 memset(&exit_op_count, 0,
sizeof(exit_op_count));
838 memset(&yjit_runtime_counters, 0,
sizeof(yjit_runtime_counters));
849 cb_set_pos(cb, cb->mem_size-1);
850 cb_set_pos(ocb, ocb->mem_size-1);
859rb_yjit_collect_vm_usage_insn(
int insn)
861 yjit_runtime_counters.vm_insns_count++;
865rb_yjit_collect_binding_alloc(
void)
867 yjit_runtime_counters.binding_allocations++;
871rb_yjit_collect_binding_set(
void)
873 yjit_runtime_counters.binding_set++;
877yjit_count_side_exit_op(
const VALUE *exit_pc)
879 int insn = rb_vm_insn_addr2opcode((
const void *)*exit_pc);
880 exit_op_count[insn]++;
888 rb_darray_for(body->yjit_blocks, version_array_idx) {
891 rb_darray_for(version_array, block_idx) {
892 block_t *block = rb_darray_get(version_array, block_idx);
897 rb_darray_foreach(block->cme_dependencies, cme_dependency_idx, cme_dep) {
903 rb_darray_for(block->outgoing, branch_idx) {
904 branch_t *branch = rb_darray_get(block->outgoing, branch_idx);
905 for (
int i = 0; i < 2; ++i) {
911 uint32_t *offset_element;
912 rb_darray_foreach(block->gc_object_offsets, offset_idx, offset_element) {
913 uint32_t offset_to_value = *offset_element;
914 uint8_t *value_address = cb_get_ptr(cb, offset_to_value);
932 rb_darray_for(body->yjit_blocks, version_array_idx) {
935 rb_darray_for(version_array, block_idx) {
936 block_t *block = rb_darray_get(version_array, block_idx);
941 rb_darray_foreach(block->cme_dependencies, cme_dependency_idx, cme_dep) {
942 cme_dep->receiver_klass =
rb_gc_location(cme_dep->receiver_klass);
947 rb_darray_for(block->outgoing, branch_idx) {
948 branch_t *branch = rb_darray_get(block->outgoing, branch_idx);
949 for (
int i = 0; i < 2; ++i) {
950 branch->targets[i].iseq = (
const void *)
rb_gc_location((VALUE)branch->targets[i].iseq);
955 uint32_t *offset_element;
956 rb_darray_foreach(block->gc_object_offsets, offset_idx, offset_element) {
957 uint32_t offset_to_value = *offset_element;
958 uint8_t *value_address = cb_get_ptr(cb, offset_to_value);
964 if (possibly_moved !=
object) {
966 cb_mark_position_writeable(cb, offset_to_value);
969 cb_mark_position_writeable(cb, offset_to_value +
SIZEOF_VALUE - 1);
981 cb_mark_all_executable(cb);
985 cb_mark_all_executable(ocb);
993 rb_darray_for(body->yjit_blocks, version_array_idx) {
996 rb_darray_for(version_array, block_idx) {
997 block_t *block = rb_darray_get(version_array, block_idx);
998 yjit_free_block(block);
1001 rb_darray_free(version_array);
1004 rb_darray_free(body->yjit_blocks);
1028static VALUE yjit_cur_code_page =
Qfalse;
1035yjit_code_page_free(
void *voidp)
1038 code_page->_next = code_page_freelist;
1039 code_page_freelist = code_page;
1045 {NULL, yjit_code_page_free, NULL, NULL},
1046 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
1051rb_yjit_code_page_alloc(
void)
1054 if (!code_page_freelist) {
1056 uint8_t* code_chunk = alloc_exec_mem(PAGES_PER_ALLOC * CODE_PAGE_SIZE);
1059 for (
int i = PAGES_PER_ALLOC - 1; i >= 0; --i) {
1061 code_page->mem_block = code_chunk + i * CODE_PAGE_SIZE;
1062 assert ((intptr_t)code_page->mem_block % CODE_PAGE_SIZE == 0);
1063 code_page->page_size = CODE_PAGE_SIZE;
1064 code_page->_next = code_page_freelist;
1065 code_page_freelist = code_page;
1070 code_page_freelist = code_page_freelist->_next;
1076 *((VALUE*)code_page->mem_block) = wrapper;
1079 uint8_t* page_start = code_page->mem_block +
sizeof(
VALUE);
1080 uint8_t* page_end = code_page->mem_block + CODE_PAGE_SIZE;
1081 uint32_t halfsize = (uint32_t)(page_end - page_start) / 2;
1082 cb_init(&code_page->cb, page_start, halfsize);
1083 cb_init(&code_page->cb, page_start + halfsize, halfsize);
1090rb_yjit_code_page_unwrap(VALUE cp_obj)
1099rb_yjit_code_page_from_ptr(uint8_t* code_ptr)
1101 VALUE* page_start = (VALUE*)((intptr_t)code_ptr & ~(CODE_PAGE_SIZE - 1));
1102 VALUE wrapper = *page_start;
1110 VALUE page_wrapper = rb_yjit_code_page_from_ptr(code_ptr);
1111 code_page_t *code_page = rb_yjit_code_page_unwrap(page_wrapper);
1114 uint8_t* mem_block = code_page->mem_block +
sizeof(
VALUE);
1115 uint32_t mem_size = (code_page->page_size/2) -
sizeof(VALUE);
1119 cb_init(cb, mem_block, mem_size);
1126 VALUE page_wrapper = rb_yjit_code_page_from_ptr(code_ptr);
1127 code_page_t *code_page = rb_yjit_code_page_unwrap(page_wrapper);
1130 uint8_t* mem_block = code_page->mem_block + (code_page->page_size/2);
1131 uint32_t mem_size = code_page->page_size/2;
1135 cb_init(cb, mem_block, mem_size);
1140yjit_get_code_page(uint32_t cb_bytes_needed, uint32_t ocb_bytes_needed)
1143 if (yjit_cur_code_page ==
Qfalse) {
1144 yjit_cur_code_page = rb_yjit_code_page_alloc();
1148 code_page_t *code_page = rb_yjit_code_page_unwrap(yjit_cur_code_page);
1151 uint32_t cb_bytes_left = code_page->cb.mem_size - code_page->cb.write_pos;
1152 uint32_t ocb_bytes_left = code_page->ocb.mem_size - code_page->ocb.write_pos;
1157 if (cb_bytes_needed <= cb_bytes_left && ocb_bytes_needed <= ocb_bytes_left) {
1158 return yjit_cur_code_page;
1162 yjit_cur_code_page = rb_yjit_code_page_alloc();
1163 code_page_t *new_code_page = rb_yjit_code_page_unwrap(yjit_cur_code_page);
1166 jmp_ptr(&code_page->cb, cb_get_ptr(&new_code_page->cb, 0));
1168 return yjit_cur_code_page;
1172rb_yjit_enabled_p(
void)
1174 return rb_yjit_opts.yjit_enabled;
1178rb_yjit_call_threshold(
void)
1180 return rb_yjit_opts.call_threshold;
1183# define PTR2NUM(x) (LONG2NUM((long)(x)))
1198 return PTR2NUM(block);
1208outgoing_ids(VALUE
self)
1215 rb_darray_for(block->outgoing, branch_idx) {
1216 branch_t *out_branch = rb_darray_get(block->outgoing, branch_idx);
1218 for (
size_t succ_idx = 0; succ_idx < 2; succ_idx++) {
1219 block_t *succ = out_branch->blocks[succ_idx];
1236 if (!YJIT_SUPPORTED_P || !JIT_ENABLED) {
1240 rb_yjit_opts = *options;
1241 rb_yjit_opts.yjit_enabled =
true;
1243 rb_yjit_opts.gen_stats = rb_yjit_opts.gen_stats || getenv(
"RUBY_YJIT_STATS");
1246 if(rb_yjit_opts.gen_stats) {
1247 rb_warning(
"--yjit-stats requires that Ruby is compiled with CPPFLAGS='-DYJIT_STATS=1' or CPPFLAGS='-DRUBY_DEBUG=1'");
1252 if (rb_yjit_opts.exec_mem_size < 1) {
1253 rb_yjit_opts.exec_mem_size = 256;
1255 if (rb_yjit_opts.call_threshold < 1) {
1256 rb_yjit_opts.call_threshold = YJIT_DEFAULT_CALL_THRESHOLD;
1258 if (rb_yjit_opts.max_versions < 1) {
1259 rb_yjit_opts.max_versions = 4;
1263 if (rb_yjit_opts.no_type_prop) {
1264 rb_yjit_opts.max_versions = 1;
1267 blocks_assuming_stable_global_constant_state = st_init_numtable();
1268 blocks_assuming_single_ractor_mode = st_init_numtable();
1269 blocks_assuming_bops = st_init_numtable();
1271 yjit_init_codegen();
1288#ifdef HAVE_LIBCAPSTONE
1299 method_lookup_dependency = st_init_numtable();
1300 cme_validity_dependency = st_init_numtable();
1309 (void)yjit_get_code_page;
#define RUBY_ASSERT(expr)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_DEBUG
Define this macro when you want assertions.
#define RUBY_ASSERT_ALWAYS(expr)
A variant of RUBY_ASSERT that does not interface with RUBY_DEBUG.
VALUE rb_define_class_under(VALUE outer, const char *name, VALUE super)
Defines a class under the namespace of outer.
VALUE rb_define_module_under(VALUE outer, const char *name)
Defines a module under the namespace of outer.
void rb_define_module_function(VALUE module, const char *name, VALUE(*func)(ANYARGS), int argc)
Defines a module function for a module.
void rb_define_method(VALUE klass, const char *name, VALUE(*func)(ANYARGS), int argc)
Defines a method.
#define rb_str_new2
Old name of rb_str_new_cstr.
#define ID2SYM
Old name of RB_ID2SYM.
#define LL2NUM
Old name of RB_LL2NUM.
#define CLASS_OF
Old name of rb_class_of.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define Qtrue
Old name of RUBY_Qtrue.
#define INT2NUM
Old name of RB_INT2NUM.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define NUM2ULL
Old name of RB_NUM2ULL.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define FL_TEST
Old name of RB_FL_TEST.
void rb_raise(VALUE exc, const char *fmt,...)
Exception entry point.
void rb_bug(const char *fmt,...)
Interpreter panic switch.
void rb_warning(const char *fmt,...)
Issues a warning.
void rb_gc_register_mark_object(VALUE object)
Inform the garbage collector that object is a live Ruby object that should not be moved.
Defines RBIMPL_HAS_BUILTIN.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_new_capa(long capa)
Identical to rb_ary_new(), except it additionally specifies how many rooms of objects it should alloc...
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_new_from_args(long n,...)
Constructs an array from the passed objects.
void rb_gc_mark(VALUE obj)
Marks an object.
void rb_gc_mark_movable(VALUE obj)
Maybe this is the only function provided for C extensions to control the pinning of objects,...
VALUE rb_gc_location(VALUE obj)
Finds a new "location" of an object.
VALUE rb_hash_aset(VALUE hash, VALUE key, VALUE val)
Inserts or replaces ("upsert"s) the objects into the given hash table.
VALUE rb_hash_new(void)
Creates a new, empty hash object.
VALUE rb_str_new(const char *ptr, long len)
Allocates an instance of rb_cString.
VALUE rb_str_new_cstr(const char *ptr)
Identical to rb_str_new(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_struct_define_under(VALUE space, const char *name,...)
Identical to rb_struct_define(), except it defines the class under the specified namespace instead of...
VALUE rb_struct_alloc(VALUE klass, VALUE values)
Identical to rb_struct_new(), except it takes the field values as a Ruby array.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
ID rb_intern2(const char *name, long len)
Identical to rb_intern(), except it additionally takes the length of the string.
ID rb_intern(const char *name)
Finds or creates a symbol of the given name.
#define RBIMPL_ATTR_MAYBE_UNUSED()
Wraps (or simulates) [[maybe_unused]]
int st_foreach(st_table *q, int_type *w, st_data_t e)
Iteration over the given table.
#define RBIMPL_ATTR_NODISCARD()
Wraps (or simulates) [[nodiscard]].
#define StringValuePtr(v)
Identical to StringValue, except it returns a char*.
#define RSTRING_GETMEM(str, ptrvar, lenvar)
Convenient macro to obtain the contents and length at once.
static long RSTRING_LEN(VALUE str)
Queries the length of the string.
#define TypedData_Get_Struct(obj, type, data_type, sval)
Obtains a C struct from inside of a wrapper Ruby object.
#define TypedData_Wrap_Struct(klass, data_type, sval)
Converts sval, a pointer to your struct, into a Ruby object.
#define TypedData_Make_Struct(klass, type, data_type, sval)
Identical to TypedData_Wrap_Struct, except it allocates a new data region internally instead of takin...
This is the struct that holds necessary info for a struct.
Basic block version Represents a portion of an iseq compiled with a given context Note: care must be ...
Store info about an outgoing branch in a code segment Note: care must be taken to minimize the size o...
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.