Ruby 4.1.0dev (2026-05-15 revision fff4b3ef2e3e309e7a84288de53c189aa3d45fed)
vm_method.c (fff4b3ef2e3e309e7a84288de53c189aa3d45fed)
1/*
2 * This file is included by vm.c
3 */
4
5#include "id_table.h"
6#include "yjit.h"
7
8#define METHOD_DEBUG 0
9
10static int vm_redefinition_check_flag(VALUE klass);
11static void rb_vm_check_redefinition_opt_method(const rb_method_entry_t *me, VALUE klass);
12static inline rb_method_entry_t *lookup_method_table(VALUE klass, ID id);
13
14#define object_id idObject_id
15#define added idMethod_added
16#define singleton_added idSingleton_method_added
17#define removed idMethod_removed
18#define singleton_removed idSingleton_method_removed
19#define undefined idMethod_undefined
20#define singleton_undefined idSingleton_method_undefined
21
22#define ruby_running (GET_VM()->running)
23/* int ruby_running = 0; */
24
25static enum rb_id_table_iterator_result
26mark_cc_entry_i(VALUE ccs_ptr, void *data)
27{
28 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_ptr;
29
30 VM_ASSERT(vm_ccs_p(ccs));
31
32 if (METHOD_ENTRY_INVALIDATED(ccs->cme)) {
33 /* Before detaching the CCs from this class, we need to invalidate the cc
34 * since we will no longer be marking the cme on their behalf.
35 */
36 for (int i = 0; i < ccs->len; i++) {
37 const struct rb_callcache *cc = ccs->entries[i].cc;
38 if (cc->klass == Qundef) continue; // already invalidated
39 VM_ASSERT(cc->klass == Qundef || vm_cc_check_cme(cc, ccs->cme));
40 VM_ASSERT(!vm_cc_super_p(cc) && !vm_cc_refinement_p(cc));
41 vm_cc_invalidate(cc);
42 }
43 ruby_xfree_sized(ccs, vm_ccs_alloc_size(ccs->capa));
44 return ID_TABLE_DELETE;
45 }
46 else {
47 rb_gc_mark_movable((VALUE)ccs->cme);
48
49 for (int i = 0; i < ccs->len; i++) {
50 const struct rb_callcache *cc = ccs->entries[i].cc;
51 VM_ASSERT(cc->klass == Qundef || vm_cc_check_cme(cc, ccs->cme));
52
53 rb_gc_mark_movable((VALUE)cc);
54 }
55 return ID_TABLE_CONTINUE;
56 }
57}
58
59static void
60vm_cc_table_mark(void *data)
61{
62 struct rb_id_table *tbl = (struct rb_id_table *)data;
63 if (tbl) {
64 rb_id_table_foreach_values(tbl, mark_cc_entry_i, NULL);
65 }
66}
67
68static enum rb_id_table_iterator_result
69cc_table_free_i(VALUE ccs_ptr, void *data)
70{
71 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_ptr;
72 VM_ASSERT(vm_ccs_p(ccs));
73
74 ruby_xfree_sized(ccs, vm_ccs_alloc_size(ccs->capa));
75
76 return ID_TABLE_CONTINUE;
77}
78
79static void
80vm_cc_table_free(void *data)
81{
82 struct rb_id_table *tbl = (struct rb_id_table *)data;
83
84 rb_id_table_foreach_values(tbl, cc_table_free_i, NULL);
85 rb_managed_id_table_type.function.dfree(data);
86}
87
88static enum rb_id_table_iterator_result
89cc_table_memsize_i(VALUE ccs_ptr, void *data_ptr)
90{
91 size_t *total_size = data_ptr;
92 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_ptr;
93 *total_size += sizeof(*ccs);
94 *total_size += sizeof(ccs->entries[0]) * ccs->capa;
95 return ID_TABLE_CONTINUE;
96}
97
98static size_t
99vm_cc_table_memsize(const void *data)
100{
101 size_t memsize = rb_managed_id_table_type.function.dsize(data);
102 struct rb_id_table *tbl = (struct rb_id_table *)data;
103 rb_id_table_foreach_values(tbl, cc_table_memsize_i, &memsize);
104 return memsize;
105}
106
107static enum rb_id_table_iterator_result
108compact_cc_entry_i(VALUE ccs_ptr, void *data)
109{
110 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_ptr;
111
112 ccs->cme = (const struct rb_callable_method_entry_struct *)rb_gc_location((VALUE)ccs->cme);
113 VM_ASSERT(vm_ccs_p(ccs));
114
115 for (int i=0; i<ccs->len; i++) {
116 ccs->entries[i].cc = (const struct rb_callcache *)rb_gc_location((VALUE)ccs->entries[i].cc);
117 }
118
119 return ID_TABLE_CONTINUE;
120}
121
122static void
123vm_cc_table_compact(void *data)
124{
125 struct rb_id_table *tbl = (struct rb_id_table *)data;
126 rb_id_table_foreach_values(tbl, compact_cc_entry_i, NULL);
127}
128
129static const rb_data_type_t cc_table_type = {
130 .wrap_struct_name = "VM/cc_table",
131 .function = {
132 .dmark = vm_cc_table_mark,
133 .dfree = vm_cc_table_free,
134 .dsize = vm_cc_table_memsize,
135 .dcompact = vm_cc_table_compact,
136 },
137 .parent = &rb_managed_id_table_type,
138 .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE,
139};
140
141VALUE
142rb_vm_cc_table_create(size_t capa)
143{
144 return rb_managed_id_table_create(&cc_table_type, capa);
145}
146
147static enum rb_id_table_iterator_result
148vm_cc_table_dup_i(ID key, VALUE old_ccs_ptr, void *data)
149{
150 VALUE new_table = (VALUE)data;
151 struct rb_class_cc_entries *old_ccs = (struct rb_class_cc_entries *)old_ccs_ptr;
152
153 if (METHOD_ENTRY_INVALIDATED(old_ccs->cme)) {
154 // Invalidated CME. This entry will be removed from the old table on
155 // the next GC mark, so it's unsafe (and undesirable) to copy
156 return ID_TABLE_CONTINUE;
157 }
158
159 size_t memsize = vm_ccs_alloc_size(old_ccs->capa);
160 struct rb_class_cc_entries *new_ccs = ruby_xcalloc(1, memsize);
161 rb_managed_id_table_insert(new_table, key, (VALUE)new_ccs);
162
163 // We hold the VM lock, so invalidation should not have happened between
164 // our earlier invalidation check and now.
165 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(old_ccs->cme));
166
167 memcpy(new_ccs, old_ccs, memsize);
168
169#if VM_CHECK_MODE > 0
170 new_ccs->debug_sig = ~(VALUE)new_ccs;
171#endif
172
173 RB_OBJ_WRITTEN(new_table, Qundef, (VALUE)new_ccs->cme);
174 for (int index = 0; index < new_ccs->len; index++) {
175 RB_OBJ_WRITTEN(new_table, Qundef, new_ccs->entries[index].cc);
176 }
177 return ID_TABLE_CONTINUE;
178}
179
180VALUE
181rb_vm_cc_table_dup(VALUE old_table)
182{
183 ASSERT_vm_locking();
184 VALUE new_table = rb_vm_cc_table_create(rb_managed_id_table_size(old_table));
185 rb_managed_id_table_foreach(old_table, vm_cc_table_dup_i, (void *)new_table);
186 return new_table;
187}
188
189static void
190vm_ccs_invalidate(struct rb_class_cc_entries *ccs)
191{
192 for (int i=0; i<ccs->len; i++) {
193 const struct rb_callcache *cc = ccs->entries[i].cc;
194 VM_ASSERT(!vm_cc_super_p(cc) && !vm_cc_refinement_p(cc));
195 vm_cc_invalidate(cc);
196 }
197}
198
199static void
200rb_vm_ccs_invalidate_and_free(struct rb_class_cc_entries *ccs)
201{
202 RB_DEBUG_COUNTER_INC(ccs_free);
203 vm_ccs_invalidate(ccs);
204 ruby_xfree_sized(ccs, vm_ccs_alloc_size(ccs->capa));
205}
206
207void
208rb_vm_cc_table_delete(VALUE table, ID mid)
209{
210 VALUE ccs_obj;
211 if (rb_managed_id_table_lookup(table, mid, &ccs_obj)) {
212 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_obj;
213 rb_managed_id_table_delete(table, mid);
214 rb_vm_ccs_invalidate_and_free(ccs);
215 }
216}
217
218static enum rb_id_table_iterator_result
219vm_ccs_dump_i(ID mid, VALUE val, void *data)
220{
221 const struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)val;
222 fprintf(stderr, " | %s (len:%d) ", rb_id2name(mid), ccs->len);
223 rp(ccs->cme);
224
225 for (int i=0; i<ccs->len; i++) {
226 rp_m( " | \t", ccs->entries[i].cc);
227 }
228
229 return ID_TABLE_CONTINUE;
230}
231
232static void
233vm_ccs_dump(VALUE klass, ID target_mid)
234{
235 VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
236 if (cc_tbl) {
237 VALUE ccs;
238 if (target_mid) {
239 if (rb_managed_id_table_lookup(cc_tbl, target_mid, &ccs)) {
240 fprintf(stderr, " [CCTB] %p\n", (void *)cc_tbl);
241 vm_ccs_dump_i(target_mid, ccs, NULL);
242 }
243 }
244 else {
245 fprintf(stderr, " [CCTB] %p\n", (void *)cc_tbl);
246 rb_managed_id_table_foreach(cc_tbl, vm_ccs_dump_i, (void *)target_mid);
247 }
248 }
249}
250
251static enum rb_id_table_iterator_result
252vm_cme_dump_i(ID mid, VALUE val, void *data)
253{
254 ID target_mid = (ID)data;
255 if (target_mid == 0 || mid == target_mid) {
256 rp_m(" > ", val);
257 }
258 return ID_TABLE_CONTINUE;
259}
260
261static VALUE
262vm_mtbl_dump(VALUE klass, ID target_mid)
263{
264 fprintf(stderr, "# vm_mtbl\n");
265 while (klass) {
266 rp_m(" -> ", klass);
267 VALUE me;
268
269 if (RCLASS_M_TBL(klass)) {
270 if (target_mid != 0) {
271 if (rb_id_table_lookup(RCLASS_M_TBL(klass), target_mid, &me)) {
272 rp_m(" [MTBL] ", me);
273 }
274 }
275 else {
276 fprintf(stderr, " ## RCLASS_M_TBL (%p)\n", (void *)RCLASS_M_TBL(klass));
277 rb_id_table_foreach(RCLASS_M_TBL(klass), vm_cme_dump_i, NULL);
278 }
279 }
280 else {
281 fprintf(stderr, " MTBL: NULL\n");
282 }
283 if (RCLASS_WRITABLE_CALLABLE_M_TBL(klass)) {
284 if (target_mid != 0) {
285 if (rb_id_table_lookup(RCLASS_WRITABLE_CALLABLE_M_TBL(klass), target_mid, &me)) {
286 rp_m(" [CM**] ", me);
287 }
288 }
289 else {
290 fprintf(stderr, " ## RCLASS_CALLABLE_M_TBL\n");
291 rb_id_table_foreach(RCLASS_WRITABLE_CALLABLE_M_TBL(klass), vm_cme_dump_i, NULL);
292 }
293 }
294 if (RCLASS_WRITABLE_CC_TBL(klass)) {
295 vm_ccs_dump(klass, target_mid);
296 }
297 klass = RCLASS_SUPER(klass);
298 }
299 return Qnil;
300}
301
302void
303rb_vm_mtbl_dump(const char *msg, VALUE klass, ID target_mid)
304{
305 fprintf(stderr, "[%s] ", msg);
306 vm_mtbl_dump(klass, target_mid);
307}
308
309static inline void
310vm_cme_invalidate(rb_callable_method_entry_t *cme)
311{
312 VM_ASSERT(IMEMO_TYPE_P(cme, imemo_ment), "cme: %d", imemo_type((VALUE)cme));
313 VM_ASSERT(callable_method_entry_p(cme));
314 METHOD_ENTRY_INVALIDATED_SET(cme);
315 RB_DEBUG_COUNTER_INC(cc_cme_invalidate);
316
317 rb_yjit_cme_invalidate(cme);
318 rb_zjit_cme_invalidate(cme);
319}
320
321static int
322rb_clear_constant_cache_for_id_i(st_data_t ic, st_data_t arg)
323{
324 ((IC) ic)->entry = NULL;
325 return ST_CONTINUE;
326}
327
328void
330{
331 VALUE lookup_result;
332 rb_vm_t *vm = GET_VM();
333
334 if (rb_id_table_lookup(&vm->constant_cache, id, &lookup_result)) {
335 set_table *ics = (set_table *)lookup_result;
336 set_table_foreach(ics, rb_clear_constant_cache_for_id_i, (st_data_t) NULL);
337 ruby_vm_constant_cache_invalidations += ics->num_entries;
338 }
339
340 rb_yjit_constant_state_changed(id);
341 rb_zjit_constant_state_changed(id);
342}
343
344static void
345invalidate_negative_cache(ID mid)
346{
347 VALUE cme;
348 rb_vm_t *vm = GET_VM();
349
350 if (rb_id_table_lookup(&vm->negative_cme_table, mid, &cme)) {
351 rb_id_table_delete(&vm->negative_cme_table, mid);
352 vm_cme_invalidate((rb_callable_method_entry_t *)cme);
353 RB_DEBUG_COUNTER_INC(cc_invalidate_negative);
354 }
355}
356
357const rb_method_entry_t * rb_method_entry_clone(const rb_method_entry_t *src_me);
358static const rb_callable_method_entry_t *complemented_callable_method_entry(VALUE klass, ID id);
359static const rb_callable_method_entry_t *lookup_overloaded_cme(const rb_callable_method_entry_t *cme);
360
361static void
362invalidate_method_cache_in_cc_table(VALUE tbl, ID mid)
363{
364 VALUE ccs_data;
365 if (tbl && rb_managed_id_table_lookup(tbl, mid, &ccs_data)) {
366 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_data;
367 rb_yjit_cme_invalidate((rb_callable_method_entry_t *)ccs->cme);
368 rb_zjit_cme_invalidate((rb_callable_method_entry_t *)ccs->cme);
369 if (NIL_P(ccs->cme->owner)) invalidate_negative_cache(mid);
370 rb_vm_ccs_invalidate_and_free(ccs);
371 rb_managed_id_table_delete(tbl, mid);
372 RB_DEBUG_COUNTER_INC(cc_invalidate_leaf_ccs);
373 }
374}
375
376static void
377invalidate_callable_method_entry_in_callable_m_table(struct rb_id_table *tbl, ID mid)
378{
379 VALUE cme;
380 if (tbl && rb_id_table_lookup(tbl, mid, &cme)) {
381 rb_yjit_cme_invalidate((rb_callable_method_entry_t *)cme);
382 rb_zjit_cme_invalidate((rb_callable_method_entry_t *)cme);
383 rb_id_table_delete(tbl, mid);
384 RB_DEBUG_COUNTER_INC(cc_invalidate_leaf_callable);
385 }
386}
387
389 VALUE klass;
390 ID mid;
391 const rb_method_entry_t *cme;
392 const rb_method_entry_t *newer;
393};
394
395static void
396invalidate_callable_method_entry_in_every_m_table_i(rb_classext_t *ext, bool is_prime, VALUE box_value, void *data)
397{
398 st_data_t me;
400 struct rb_id_table *tbl = RCLASSEXT_M_TBL(ext);
401
402 if (rb_id_table_lookup(tbl, arg->mid, &me) && arg->cme == (const rb_method_entry_t *)me) {
403 rb_method_table_insert(arg->klass, tbl, arg->mid, arg->newer);
404 }
405}
406
407static void
408invalidate_callable_method_entry_in_every_m_table(VALUE klass, ID mid, const rb_callable_method_entry_t *cme)
409{
410 // The argument cme must be invalidated later in the caller side
411 const rb_method_entry_t *newer = rb_method_entry_clone((const rb_method_entry_t *)cme);
413 .klass = klass,
414 .mid = mid,
415 .cme = (const rb_method_entry_t *) cme,
416 .newer = newer,
417 };
418 rb_class_classext_foreach(klass, invalidate_callable_method_entry_in_every_m_table_i, (void *)&arg);
419}
420
421static void
422invalidate_complemented_method_entry_in_callable_m_table(struct rb_id_table *tbl, ID mid)
423{
424 VALUE cme;
425 if (tbl && rb_id_table_lookup(tbl, mid, &cme)) {
426 rb_yjit_cme_invalidate((rb_callable_method_entry_t *)cme);
427 rb_zjit_cme_invalidate((rb_callable_method_entry_t *)cme);
428 rb_id_table_delete(tbl, mid);
429 RB_DEBUG_COUNTER_INC(cc_invalidate_tree_callable);
430 }
431}
432
433static void
434clear_method_cache_by_id_in_class(VALUE klass, ID mid)
435{
436 VM_ASSERT_TYPE2(klass, T_CLASS, T_ICLASS);
437 if (rb_objspace_garbage_object_p(klass)) return;
438
439 RB_VM_LOCKING() {
440 rb_vm_barrier();
441
442 if (LIKELY(!RCLASS_SUBCLASSES(klass)) &&
443 !FL_TEST_RAW(klass, RCLASS_HAS_SUBCLASSES) &&
444 // Non-refinement ICLASSes (from module inclusion) previously had
445 // subclasses reparented onto them, so they need the tree path for
446 // broader cme-based invalidation even though they now have no subclasses.
447 !(RB_TYPE_P(klass, T_ICLASS) && NIL_P(RCLASS_REFINED_CLASS(klass)))) {
448 // no subclasses
449 // check only current class
450
451 // invalidate CCs
452 VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
453 invalidate_method_cache_in_cc_table(cc_tbl, mid);
454 if (RCLASS_CC_TBL_NOT_PRIME_P(klass, cc_tbl)) {
455 invalidate_method_cache_in_cc_table(RCLASS_PRIME_CC_TBL(klass), mid);
456 }
457
458 // remove from callable_m_tbl, if exists
459 struct rb_id_table *cm_tbl = RCLASS_WRITABLE_CALLABLE_M_TBL(klass);
460 invalidate_callable_method_entry_in_callable_m_table(cm_tbl, mid);
461 if (RCLASS_CALLABLE_M_TBL_NOT_PRIME_P(klass, cm_tbl)) {
462 invalidate_callable_method_entry_in_callable_m_table(RCLASS_PRIME_CALLABLE_M_TBL(klass), mid);
463 }
464
465 RB_DEBUG_COUNTER_INC(cc_invalidate_leaf);
466 }
467 else {
468 const rb_callable_method_entry_t *cme = complemented_callable_method_entry(klass, mid);
469
470 if (cme) {
471 // invalidate cme if found to invalidate the inline method cache.
472 if (METHOD_ENTRY_CACHED(cme)) {
473 if (METHOD_ENTRY_COMPLEMENTED(cme)) {
474 // do nothing
475 }
476 else {
477 // invalidate cc by invalidating cc->cme
478 VALUE owner = cme->owner;
479 VM_ASSERT_TYPE(owner, T_CLASS);
480 VALUE klass_housing_cme;
481 if (cme->def->type == VM_METHOD_TYPE_REFINED && !cme->def->body.refined.orig_me) {
482 klass_housing_cme = owner;
483 }
484 else {
485 klass_housing_cme = RCLASS_ORIGIN(owner);
486 }
487
488 // replace the cme that will be invalid in the all classexts
489 invalidate_callable_method_entry_in_every_m_table(klass_housing_cme, mid, cme);
490 }
491
492 vm_cme_invalidate((rb_callable_method_entry_t *)cme);
493 RB_DEBUG_COUNTER_INC(cc_invalidate_tree_cme);
494
495 // In case of refinement ME, also invalidate the wrapped ME that
496 // could be cached at some callsite and is unreachable from any
497 // RCLASS_WRITABLE_CC_TBL.
498 if (cme->def->type == VM_METHOD_TYPE_REFINED && cme->def->body.refined.orig_me) {
499 vm_cme_invalidate((rb_callable_method_entry_t *)cme->def->body.refined.orig_me);
500 }
501
502 if (cme->def->iseq_overload) {
503 rb_callable_method_entry_t *monly_cme = (rb_callable_method_entry_t *)lookup_overloaded_cme(cme);
504 if (monly_cme) {
505 vm_cme_invalidate(monly_cme);
506 }
507 }
508 }
509
510 // invalidate complement tbl
511 if (METHOD_ENTRY_COMPLEMENTED(cme)) {
512 VALUE defined_class = cme->defined_class;
513 struct rb_id_table *cm_tbl = RCLASS_WRITABLE_CALLABLE_M_TBL(defined_class);
514 invalidate_complemented_method_entry_in_callable_m_table(cm_tbl, mid);
515 if (RCLASS_CALLABLE_M_TBL_NOT_PRIME_P(defined_class, cm_tbl)) {
516 struct rb_id_table *prime_cm_table = RCLASS_PRIME_CALLABLE_M_TBL(defined_class);
517 invalidate_complemented_method_entry_in_callable_m_table(prime_cm_table, mid);
518 }
519 }
520
521 RB_DEBUG_COUNTER_INC(cc_invalidate_tree);
522 }
523 else {
524 invalidate_negative_cache(mid);
525 }
526 }
527
528 rb_gccct_clear_table();
529 }
530}
531
532static void
533clear_iclass_method_cache_by_id(VALUE iclass, VALUE d)
534{
535 VM_ASSERT_TYPE(iclass, T_ICLASS);
536 ID mid = (ID)d;
537 clear_method_cache_by_id_in_class(iclass, mid);
538}
539
540static void
541clear_iclass_method_cache_by_id_for_refinements(VALUE klass, VALUE d)
542{
543 if (RB_TYPE_P(klass, T_ICLASS)) {
544 ID mid = (ID)d;
545 clear_method_cache_by_id_in_class(klass, mid);
546 }
547}
548
549void
550rb_clear_method_cache(VALUE klass_or_module, ID mid)
551{
552 if (RB_TYPE_P(klass_or_module, T_MODULE)) {
553 VALUE module = klass_or_module; // alias
554
555 if (FL_TEST(module, RMODULE_IS_REFINEMENT)) {
556 VALUE refined_class = rb_refinement_module_get_refined_class(module);
557 rb_clear_method_cache(refined_class, mid);
558 rb_class_foreach_subclass(refined_class, clear_iclass_method_cache_by_id_for_refinements, mid);
559 rb_clear_all_refinement_method_cache();
560 }
561 rb_class_foreach_subclass(module, clear_iclass_method_cache_by_id, mid);
562 }
563 else {
564 clear_method_cache_by_id_in_class(klass_or_module, mid);
565 }
566}
567
568static enum rb_id_table_iterator_result
569invalidate_method_entry_in_iclass_callable_m_tbl(VALUE cme, void *data)
570{
571 vm_cme_invalidate((rb_callable_method_entry_t *)cme);
572 return ID_TABLE_DELETE;
573}
574
575static enum rb_id_table_iterator_result
576invalidate_ccs_in_iclass_cc_tbl(VALUE value, void *data)
577{
578 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)value;
579 vm_cme_invalidate((rb_callable_method_entry_t *)ccs->cme);
580 ruby_xfree_sized(ccs, vm_ccs_alloc_size(ccs->capa));
581 return ID_TABLE_DELETE;
582}
583
584void
585rb_invalidate_method_caches(struct rb_id_table *cm_tbl, VALUE cc_tbl)
586{
587 if (cm_tbl) {
588 rb_id_table_foreach_values(cm_tbl, invalidate_method_entry_in_iclass_callable_m_tbl, NULL);
589 }
590 if (cc_tbl) {
591 rb_managed_id_table_foreach_values(cc_tbl, invalidate_ccs_in_iclass_cc_tbl, NULL);
592 }
593}
594
595static st_index_t
596vm_ci_hash(VALUE v)
597{
598 const struct rb_callinfo *ci = (const struct rb_callinfo *)v;
599 st_index_t h;
600 h = rb_hash_start(ci->mid);
601 h = rb_hash_uint(h, ci->flag);
602 h = rb_hash_uint(h, ci->argc);
603 if (ci->kwarg) {
604 for (int i = 0; i < ci->kwarg->keyword_len; i++) {
605 h = rb_hash_uint(h, ci->kwarg->keywords[i]);
606 }
607 }
608 return h;
609}
610
611static int
612vm_ci_hash_cmp(VALUE v1, VALUE v2)
613{
614 const struct rb_callinfo *ci1 = (const struct rb_callinfo *)v1;
615 const struct rb_callinfo *ci2 = (const struct rb_callinfo *)v2;
616 if (ci1->mid != ci2->mid) return 1;
617 if (ci1->flag != ci2->flag) return 1;
618 if (ci1->argc != ci2->argc) return 1;
619 if (ci1->kwarg != NULL) {
620 VM_ASSERT(ci2->kwarg != NULL); // implied by matching flags
621
622 if (ci1->kwarg->keyword_len != ci2->kwarg->keyword_len)
623 return 1;
624
625 for (int i = 0; i < ci1->kwarg->keyword_len; i++) {
626 if (ci1->kwarg->keywords[i] != ci2->kwarg->keywords[i]) {
627 return 1;
628 }
629 }
630 }
631 else {
632 VM_ASSERT(ci2->kwarg == NULL); // implied by matching flags
633 }
634 return 0;
635}
636
637static const struct st_hash_type vm_ci_hashtype = {
638 vm_ci_hash_cmp,
639 vm_ci_hash
640};
641
642static int
643ci_lookup_i(st_data_t *key, st_data_t *value, st_data_t data, int existing)
644{
645 const struct rb_callinfo *ci = (const struct rb_callinfo *)*key;
646 st_data_t *ret = (st_data_t *)data;
647
648 if (existing) {
649 if (rb_objspace_garbage_object_p((VALUE)ci)) {
650 *ret = (st_data_t)NULL;
651 return ST_DELETE;
652 }
653 else {
654 *ret = *key;
655 return ST_STOP;
656 }
657 }
658 else {
659 *key = *value = *ret = (st_data_t)ci;
660 return ST_CONTINUE;
661 }
662}
663
664const struct rb_callinfo *
665rb_vm_ci_lookup(ID mid, unsigned int flag, unsigned int argc, const struct rb_callinfo_kwarg *kwarg)
666{
667 rb_vm_t *vm = GET_VM();
668 const struct rb_callinfo *ci = NULL;
669
670 if (kwarg) {
671 ((struct rb_callinfo_kwarg *)kwarg)->references++;
672 }
673
674 struct rb_callinfo *new_ci = SHAREABLE_IMEMO_NEW(struct rb_callinfo, imemo_callinfo, (VALUE)kwarg);
675 new_ci->mid = mid;
676 new_ci->flag = flag;
677 new_ci->argc = argc;
678
679 RB_VM_LOCKING() {
680 st_table *ci_table = &vm->ci_table;
681
682 do {
683 st_update(ci_table, (st_data_t)new_ci, ci_lookup_i, (st_data_t)&ci);
684 } while (ci == NULL);
685 }
686
687 VM_ASSERT(ci);
688
689 return ci;
690}
691
692void
693rb_vm_ci_free(const struct rb_callinfo *ci)
694{
695 ASSERT_vm_locking();
696
697 rb_vm_t *vm = GET_VM();
698
699 st_data_t key = (st_data_t)ci;
700 st_delete(&vm->ci_table, &key, NULL);
701}
702
704 VALUE *entries;
705 size_t len;
706 size_t capa;
707};
708
709static void
710cc_refinement_set_free(void *ptr)
711{
712 struct cc_refinement_entries *e = ptr;
713 xfree(e->entries);
714}
715
716static size_t
717cc_refinement_set_memsize(const void *ptr)
718{
719 const struct cc_refinement_entries *e = ptr;
720 return e->capa * sizeof(VALUE);
721}
722
723static void
724cc_refinement_set_compact(void *ptr)
725{
726 struct cc_refinement_entries *e = ptr;
727 for (size_t i = 0; i < e->len; i++) {
728 e->entries[i] = rb_gc_location(e->entries[i]);
729 }
730}
731
732static void
733cc_refinement_set_handle_weak_references(void *ptr)
734{
735 struct cc_refinement_entries *e = ptr;
736 size_t write = 0;
737 for (size_t read = 0; read < e->len; read++) {
738 if (rb_gc_handle_weak_references_alive_p(e->entries[read])) {
739 e->entries[write++] = e->entries[read];
740 }
741 }
742 e->len = write;
743}
744
745static const rb_data_type_t cc_refinement_set_type = {
746 "VM/cc_refinement_set",
747 {
748 NULL,
749 cc_refinement_set_free,
750 cc_refinement_set_memsize,
751 cc_refinement_set_compact,
752 cc_refinement_set_handle_weak_references,
753 },
754 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
755};
756
757VALUE
758rb_cc_refinement_set_create(void)
759{
760 struct cc_refinement_entries *e;
761 VALUE obj = TypedData_Make_Struct(0, struct cc_refinement_entries, &cc_refinement_set_type, e);
762
763 e->entries = NULL;
764 e->len = 0;
765 e->capa = 0;
766
767 rb_gc_declare_weak_references(obj);
768
769 return obj;
770}
771
772void
773rb_vm_insert_cc_refinement(const struct rb_callcache *cc)
774{
775 rb_vm_t *vm = GET_VM();
776 RB_VM_LOCK_ENTER();
777 {
778 struct cc_refinement_entries *e = RTYPEDDATA_GET_DATA(vm->cc_refinement_set);
779 if (e->len == e->capa) {
780 size_t new_capa = e->capa == 0 ? 16 : e->capa * 2;
781 SIZED_REALLOC_N(e->entries, VALUE, new_capa, e->capa);
782 e->capa = new_capa;
783 }
784 e->entries[e->len++] = (VALUE)cc;
785
786 // We never mark the cc, but we need to issue a writebarrier so that
787 // the refinement set can be added to the remembered set
788 RB_OBJ_WRITTEN(vm->cc_refinement_set, Qundef, (VALUE)cc);
789 }
790 RB_VM_LOCK_LEAVE();
791}
792
793void
794rb_clear_all_refinement_method_cache(void)
795{
796 rb_vm_t *vm = GET_VM();
797
798 RB_VM_LOCK_ENTER();
799 {
800 struct cc_refinement_entries *e = RTYPEDDATA_GET_DATA(vm->cc_refinement_set);
801 for (size_t i = 0; i < e->len; i++) {
802 VALUE v = e->entries[i];
803
804 // All objects should be live as weak references are pruned in
805 // cc_refinement_set_handle_weak_references
806 VM_ASSERT(rb_gc_pointer_to_heap_p(v));
807 VM_ASSERT(!rb_objspace_garbage_object_p(v));
808
809 const struct rb_callcache *cc = (const struct rb_callcache *)v;
810 VM_ASSERT(vm_cc_refinement_p(cc));
811
812 if (vm_cc_valid(cc)) {
813 vm_cc_invalidate(cc);
814 }
815 }
816 e->len = 0;
817 }
818 RB_VM_LOCK_LEAVE();
819
820 rb_yjit_invalidate_all_method_lookup_assumptions();
821}
822
823void
824rb_method_table_insert(VALUE klass, struct rb_id_table *table, ID method_id, const rb_method_entry_t *me)
825{
826 RB_VM_LOCKING() {
827 rb_method_table_insert0(klass, table, method_id, me, RB_TYPE_P(klass, T_ICLASS) && !RICLASS_OWNS_M_TBL_P(klass));
828 }
829}
830
831void
832rb_method_table_insert0(VALUE klass, struct rb_id_table *table, ID method_id, const rb_method_entry_t *me, bool iclass_shared_mtbl)
833{
834 VALUE table_owner = klass;
835 if (iclass_shared_mtbl) {
836 table_owner = RBASIC(table_owner)->klass;
837 }
838 VM_ASSERT_TYPE3(table_owner, T_CLASS, T_ICLASS, T_MODULE);
839 rb_id_table_insert(table, method_id, (VALUE)me);
840 RB_OBJ_WRITTEN(table_owner, Qundef, (VALUE)me);
841}
842
843// rb_f_notimplement has an extra trailing argument to distinguish it from other methods
844// at compile-time to override arity to be -1. But the trailing argument introduces a
845// signature mismatch between caller and callee, so rb_define_method family inserts a
846// method entry with rb_f_notimplement_internal, which has canonical arity=-1 signature,
847// instead of rb_f_notimplement.
848NORETURN(static VALUE rb_f_notimplement_internal(int argc, const VALUE *argv, VALUE obj));
849
850static VALUE
851rb_f_notimplement_internal(int argc, const VALUE *argv, VALUE obj)
852{
854
856}
857
858VALUE
859rb_f_notimplement(int argc, const VALUE *argv, VALUE obj, VALUE marker)
860{
861 rb_f_notimplement_internal(argc, argv, obj);
862}
863
864static void
865rb_define_notimplement_method_id(VALUE mod, ID id, rb_method_visibility_t visi)
866{
867 rb_add_method(mod, id, VM_METHOD_TYPE_NOTIMPLEMENTED, (void *)1, visi);
868}
869
870void
871rb_add_method_cfunc(VALUE klass, ID mid, VALUE (*func)(ANYARGS), int argc, rb_method_visibility_t visi)
872{
873 if (argc < -2 || 15 < argc) rb_raise(rb_eArgError, "arity out of range: %d for -2..15", argc);
874 if (func != (VALUE(*)(ANYARGS))rb_f_notimplement) {
876 opt.func = func;
877 opt.argc = argc;
878 rb_add_method(klass, mid, VM_METHOD_TYPE_CFUNC, &opt, visi);
879 }
880 else {
881 rb_define_notimplement_method_id(klass, mid, visi);
882 }
883}
884
885void
886rb_add_method_optimized(VALUE klass, ID mid, enum method_optimized_type opt_type, unsigned int index, rb_method_visibility_t visi)
887{
889 .type = opt_type,
890 .index = index,
891 };
892 rb_add_method(klass, mid, VM_METHOD_TYPE_OPTIMIZED, &opt, visi);
893}
894
895static void
896method_definition_release(rb_method_definition_t *def)
897{
898 if (def != NULL) {
899 const unsigned int reference_count_was = RUBY_ATOMIC_FETCH_SUB(def->reference_count, 1);
900
901 RUBY_ASSERT_ALWAYS(reference_count_was != 0);
902
903 if (reference_count_was == 1) {
904 if (METHOD_DEBUG) fprintf(stderr, "-%p-%s:1->0 (remove)\n", (void *)def,
905 rb_id2name(def->original_id));
906 SIZED_FREE(def);
907 }
908 else {
909 if (METHOD_DEBUG) fprintf(stderr, "-%p-%s:%d->%d (dec)\n", (void *)def, rb_id2name(def->original_id),
910 reference_count_was, reference_count_was - 1);
911 }
912 }
913}
914
915void
916rb_method_definition_release(rb_method_definition_t *def)
917{
918 method_definition_release(def);
919}
920
921static void delete_overloaded_cme(const rb_callable_method_entry_t *cme);
922
923void
924rb_free_method_entry_vm_weak_references(const rb_method_entry_t *me)
925{
926 if (me->def && me->def->iseq_overload) {
927 delete_overloaded_cme((const rb_callable_method_entry_t *)me);
928 }
929}
930
931void
932rb_free_method_entry(const rb_method_entry_t *me)
933{
934#if USE_ZJIT
935 if (METHOD_ENTRY_CACHED(me)) {
936 rb_zjit_cme_free((const rb_callable_method_entry_t *)me);
937 }
938#endif
939
940#if USE_YJIT
941 // YJIT rb_yjit_root_mark() roots CMEs in `Invariants`,
942 // to remove from `Invariants` here.
943#endif
944
945 method_definition_release(me->def);
946}
947
948static inline rb_method_entry_t *search_method(VALUE klass, ID id, VALUE *defined_class_ptr);
949extern int rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_definition_t *d2);
950
951static VALUE
952(*call_cfunc_invoker_func(int argc))(VALUE recv, int argc, const VALUE *, VALUE (*func)(ANYARGS))
953{
954 if (!GET_THREAD()->ext_config.ractor_safe) {
955 switch (argc) {
956 case -2: return &call_cfunc_m2;
957 case -1: return &call_cfunc_m1;
958 case 0: return &call_cfunc_0;
959 case 1: return &call_cfunc_1;
960 case 2: return &call_cfunc_2;
961 case 3: return &call_cfunc_3;
962 case 4: return &call_cfunc_4;
963 case 5: return &call_cfunc_5;
964 case 6: return &call_cfunc_6;
965 case 7: return &call_cfunc_7;
966 case 8: return &call_cfunc_8;
967 case 9: return &call_cfunc_9;
968 case 10: return &call_cfunc_10;
969 case 11: return &call_cfunc_11;
970 case 12: return &call_cfunc_12;
971 case 13: return &call_cfunc_13;
972 case 14: return &call_cfunc_14;
973 case 15: return &call_cfunc_15;
974 default:
975 rb_bug("unsupported length: %d", argc);
976 }
977 }
978 else {
979 switch (argc) {
980 case -2: return &ractor_safe_call_cfunc_m2;
981 case -1: return &ractor_safe_call_cfunc_m1;
982 case 0: return &ractor_safe_call_cfunc_0;
983 case 1: return &ractor_safe_call_cfunc_1;
984 case 2: return &ractor_safe_call_cfunc_2;
985 case 3: return &ractor_safe_call_cfunc_3;
986 case 4: return &ractor_safe_call_cfunc_4;
987 case 5: return &ractor_safe_call_cfunc_5;
988 case 6: return &ractor_safe_call_cfunc_6;
989 case 7: return &ractor_safe_call_cfunc_7;
990 case 8: return &ractor_safe_call_cfunc_8;
991 case 9: return &ractor_safe_call_cfunc_9;
992 case 10: return &ractor_safe_call_cfunc_10;
993 case 11: return &ractor_safe_call_cfunc_11;
994 case 12: return &ractor_safe_call_cfunc_12;
995 case 13: return &ractor_safe_call_cfunc_13;
996 case 14: return &ractor_safe_call_cfunc_14;
997 case 15: return &ractor_safe_call_cfunc_15;
998 default:
999 rb_bug("unsupported length: %d", argc);
1000 }
1001 }
1002}
1003
1004static void
1005setup_method_cfunc_struct(rb_method_cfunc_t *cfunc, VALUE (*func)(ANYARGS), int argc)
1006{
1007 cfunc->func = func;
1008 cfunc->argc = argc;
1009 cfunc->invoker = call_cfunc_invoker_func(argc);
1010}
1011
1012
1014method_definition_addref(rb_method_definition_t *def, bool complemented)
1015{
1016 unsigned int reference_count_was = RUBY_ATOMIC_FETCH_ADD(def->reference_count, 1);
1017 if (!complemented && reference_count_was > 0) {
1018 /* TODO: A Ractor can reach this via UnboundMethod#bind */
1019 def->aliased = true;
1020 }
1021 if (METHOD_DEBUG) fprintf(stderr, "+%p-%s:%d->%d\n", (void *)def, rb_id2name(def->original_id), reference_count_was, reference_count_was+1);
1022
1023 return def;
1024}
1025
1026void
1027rb_method_definition_addref(rb_method_definition_t *def)
1028{
1029 method_definition_addref(def, false);
1030}
1031
1032void
1033rb_method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *def, void *opts)
1034{
1035 method_definition_release(me->def);
1036 *(rb_method_definition_t **)&me->def = method_definition_addref(def, METHOD_ENTRY_COMPLEMENTED(me));
1037
1038 if (!ruby_running) add_opt_method_entry(me);
1039
1040 if (opts != NULL) {
1041 switch (def->type) {
1042 case VM_METHOD_TYPE_ISEQ:
1043 {
1044 rb_method_iseq_t *iseq_body = (rb_method_iseq_t *)opts;
1045 const rb_iseq_t *iseq = iseq_body->iseqptr;
1046 rb_cref_t *method_cref, *cref = iseq_body->cref;
1047
1048 /* setup iseq first (before invoking GC) */
1049 RB_OBJ_WRITE(me, &def->body.iseq.iseqptr, iseq);
1050
1051 // Methods defined in `with_jit` should be considered METHOD_ENTRY_BASIC
1052 if (rb_iseq_attr_p(iseq, BUILTIN_ATTR_C_TRACE)) {
1053 METHOD_ENTRY_BASIC_SET((rb_method_entry_t *)me, TRUE);
1054 }
1055
1056 if (ISEQ_BODY(iseq)->mandatory_only_iseq) def->iseq_overload = 1;
1057
1058 if (0) vm_cref_dump("rb_method_definition_create", cref);
1059
1060 if (cref) {
1061 method_cref = cref;
1062 }
1063 else {
1064 method_cref = vm_cref_new_toplevel(GET_EC()); /* TODO: can we reuse? */
1065 }
1066
1067 RB_OBJ_WRITE(me, &def->body.iseq.cref, method_cref);
1068 return;
1069 }
1070 case VM_METHOD_TYPE_CFUNC:
1071 {
1072 rb_method_cfunc_t *cfunc = (rb_method_cfunc_t *)opts;
1073 setup_method_cfunc_struct(UNALIGNED_MEMBER_PTR(def, body.cfunc), cfunc->func, cfunc->argc);
1074 return;
1075 }
1076 case VM_METHOD_TYPE_ATTRSET:
1077 case VM_METHOD_TYPE_IVAR:
1078 {
1079 const rb_execution_context_t *ec = GET_EC();
1080 rb_control_frame_t *cfp;
1081 int line;
1082
1083 def->body.attr.id = (ID)(VALUE)opts;
1084
1085 cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
1086
1087 if (cfp && (line = rb_vm_get_sourceline(cfp))) {
1088 VALUE location = rb_ary_new3(2, rb_iseq_path(CFP_ISEQ(cfp)), INT2FIX(line));
1089 rb_ary_freeze(location);
1090 RB_OBJ_SET_SHAREABLE(location);
1091 RB_OBJ_WRITE(me, &def->body.attr.location, location);
1092 }
1093 else {
1094 VM_ASSERT(def->body.attr.location == 0);
1095 }
1096 return;
1097 }
1098 case VM_METHOD_TYPE_BMETHOD:
1099 RB_OBJ_WRITE(me, &def->body.bmethod.proc, (VALUE)opts);
1100 def->body.bmethod.defined_ractor_id = rb_ec_ractor_id(GET_EC());
1101 return;
1102 case VM_METHOD_TYPE_NOTIMPLEMENTED:
1103 setup_method_cfunc_struct(UNALIGNED_MEMBER_PTR(def, body.cfunc), (VALUE(*)(ANYARGS))rb_f_notimplement_internal, -1);
1104 return;
1105 case VM_METHOD_TYPE_OPTIMIZED:
1106 def->body.optimized = *(rb_method_optimized_t *)opts;
1107 return;
1108 case VM_METHOD_TYPE_REFINED:
1109 {
1110 RB_OBJ_WRITE(me, &def->body.refined.orig_me, (rb_method_entry_t *)opts);
1111 return;
1112 }
1113 case VM_METHOD_TYPE_ALIAS:
1114 RB_OBJ_WRITE(me, &def->body.alias.original_me, (rb_method_entry_t *)opts);
1115 return;
1116 case VM_METHOD_TYPE_ZSUPER:
1117 case VM_METHOD_TYPE_UNDEF:
1118 case VM_METHOD_TYPE_MISSING:
1119 return;
1120 }
1121 }
1122}
1123
1124static void
1125method_definition_reset(const rb_method_entry_t *me)
1126{
1127 rb_method_definition_t *def = me->def;
1128
1129 switch (def->type) {
1130 case VM_METHOD_TYPE_ISEQ:
1131 RB_OBJ_WRITTEN(me, Qundef, def->body.iseq.iseqptr);
1132 RB_OBJ_WRITTEN(me, Qundef, def->body.iseq.cref);
1133 break;
1134 case VM_METHOD_TYPE_ATTRSET:
1135 case VM_METHOD_TYPE_IVAR:
1136 RB_OBJ_WRITTEN(me, Qundef, def->body.attr.location);
1137 break;
1138 case VM_METHOD_TYPE_BMETHOD:
1139 RB_OBJ_WRITTEN(me, Qundef, def->body.bmethod.proc);
1140 break;
1141 case VM_METHOD_TYPE_REFINED:
1142 RB_OBJ_WRITTEN(me, Qundef, def->body.refined.orig_me);
1143 break;
1144 case VM_METHOD_TYPE_ALIAS:
1145 RB_OBJ_WRITTEN(me, Qundef, def->body.alias.original_me);
1146 break;
1147 case VM_METHOD_TYPE_CFUNC:
1148 case VM_METHOD_TYPE_ZSUPER:
1149 case VM_METHOD_TYPE_MISSING:
1150 case VM_METHOD_TYPE_OPTIMIZED:
1151 case VM_METHOD_TYPE_UNDEF:
1152 case VM_METHOD_TYPE_NOTIMPLEMENTED:
1153 break;
1154 }
1155}
1156
1157static rb_atomic_t method_serial = 1;
1158
1160rb_method_definition_create(rb_method_type_t type, ID mid)
1161{
1164 def->type = type;
1165 def->original_id = mid;
1166 def->method_serial = (uintptr_t)RUBY_ATOMIC_FETCH_ADD(method_serial, 1);
1167 def->box = rb_current_box();
1168 return def;
1169}
1170
1171static rb_method_entry_t *
1172rb_method_entry_alloc(ID called_id, VALUE owner, VALUE defined_class, rb_method_definition_t *def, bool complement)
1173{
1174 if (def) method_definition_addref(def, complement);
1175 if (RTEST(defined_class)) {
1176 // not negative cache
1177 VM_ASSERT_TYPE2(defined_class, T_CLASS, T_ICLASS);
1178 }
1179 rb_method_entry_t *me = SHAREABLE_IMEMO_NEW(rb_method_entry_t, imemo_ment, defined_class);
1180
1181 // mark_and_move_method_entry pins itself when it is in the overloaded_cme table
1182 rb_gc_register_pinning_obj((VALUE)me);
1183
1184 *((rb_method_definition_t **)&me->def) = def;
1185 me->called_id = called_id;
1186 me->owner = owner;
1187
1188 return me;
1189}
1190
1191static VALUE
1192filter_defined_class(VALUE klass)
1193{
1194 switch (BUILTIN_TYPE(klass)) {
1195 case T_CLASS:
1196 return klass;
1197 case T_MODULE:
1198 return 0;
1199 case T_ICLASS:
1200 break;
1201 default:
1202 break;
1203 }
1204 rb_bug("filter_defined_class: %s", rb_obj_info(klass));
1205}
1206
1208rb_method_entry_create(ID called_id, VALUE klass, rb_method_visibility_t visi, rb_method_definition_t *def)
1209{
1210 rb_method_entry_t *me = rb_method_entry_alloc(called_id, klass, filter_defined_class(klass), def, false);
1211 METHOD_ENTRY_FLAGS_SET(me, visi, ruby_running ? FALSE : TRUE);
1212 if (def != NULL) method_definition_reset(me);
1213 return me;
1214}
1215
1216// Return a cloned ME that's not invalidated (MEs are disposable for caching).
1217const rb_method_entry_t *
1218rb_method_entry_clone(const rb_method_entry_t *src_me)
1219{
1220 rb_method_entry_t *me = rb_method_entry_alloc(src_me->called_id, src_me->owner, src_me->defined_class, src_me->def, METHOD_ENTRY_COMPLEMENTED(src_me));
1221
1222 METHOD_ENTRY_FLAGS_COPY(me, src_me);
1223
1224 // Also clone inner ME in case of refinement ME
1225 if (src_me->def &&
1226 src_me->def->type == VM_METHOD_TYPE_REFINED &&
1227 src_me->def->body.refined.orig_me) {
1228 const rb_method_entry_t *orig_me = src_me->def->body.refined.orig_me;
1229 VM_ASSERT(orig_me->def->type != VM_METHOD_TYPE_REFINED);
1230
1231 rb_method_entry_t *orig_clone = rb_method_entry_alloc(orig_me->called_id,
1232 orig_me->owner, orig_me->defined_class, orig_me->def, METHOD_ENTRY_COMPLEMENTED(orig_me));
1233 METHOD_ENTRY_FLAGS_COPY(orig_clone, orig_me);
1234
1235 // Clone definition, since writing a VALUE to a shared definition
1236 // can create reference edges we can't run WBs for.
1237 rb_method_definition_t *clone_def =
1238 rb_method_definition_create(VM_METHOD_TYPE_REFINED, src_me->called_id);
1239 rb_method_definition_set(me, clone_def, orig_clone);
1240 }
1241 return me;
1242}
1243
1245rb_method_entry_complement_defined_class(const rb_method_entry_t *src_me, ID called_id, VALUE defined_class)
1246{
1247 rb_method_definition_t *def = src_me->def;
1249 const rb_method_entry_t *refined_orig_me = NULL;
1250
1251 if (!src_me->defined_class &&
1252 def->type == VM_METHOD_TYPE_REFINED &&
1253 def->body.refined.orig_me) {
1254 const rb_method_entry_t *orig_me =
1255 rb_method_entry_clone(def->body.refined.orig_me);
1256 RB_OBJ_WRITE((VALUE)orig_me, &orig_me->defined_class, defined_class);
1257 refined_orig_me = orig_me;
1258 def = NULL;
1259 }
1260
1261 me = rb_method_entry_alloc(called_id, src_me->owner, defined_class, def, true);
1262 METHOD_ENTRY_FLAGS_COPY(me, src_me);
1263 METHOD_ENTRY_COMPLEMENTED_SET(me);
1264 if (!def) {
1265 def = rb_method_definition_create(VM_METHOD_TYPE_REFINED, called_id);
1266 rb_method_definition_set(me, def, (void *)refined_orig_me);
1267 }
1268
1269 VM_ASSERT_TYPE(me->owner, T_MODULE);
1270
1271 return (rb_callable_method_entry_t *)me;
1272}
1273
1274void
1275rb_method_entry_copy(rb_method_entry_t *dst, const rb_method_entry_t *src)
1276{
1277 method_definition_release(dst->def);
1278 *(rb_method_definition_t **)&dst->def = method_definition_addref(src->def, METHOD_ENTRY_COMPLEMENTED(src));
1279 method_definition_reset(dst);
1280 dst->called_id = src->called_id;
1281 RB_OBJ_WRITE((VALUE)dst, &dst->owner, src->owner);
1282 RB_OBJ_WRITE((VALUE)dst, &dst->defined_class, src->defined_class);
1283 METHOD_ENTRY_FLAGS_COPY(dst, src);
1284}
1285
1286static void
1287make_method_entry_refined(VALUE owner, rb_method_entry_t *me)
1288{
1289 if (me->def->type == VM_METHOD_TYPE_REFINED) {
1290 return;
1291 }
1292 else {
1294
1295 rb_vm_check_redefinition_opt_method(me, me->owner);
1296
1297 struct rb_method_entry_struct *orig_me =
1298 rb_method_entry_alloc(me->called_id,
1299 me->owner,
1300 me->defined_class,
1301 me->def,
1302 true);
1303 METHOD_ENTRY_FLAGS_COPY(orig_me, me);
1304
1305 def = rb_method_definition_create(VM_METHOD_TYPE_REFINED, me->called_id);
1306 rb_method_definition_set(me, def, orig_me);
1307 METHOD_ENTRY_VISI_SET(me, METHOD_VISI_PUBLIC);
1308 }
1309}
1310
1311static inline rb_method_entry_t *
1312lookup_method_table(VALUE klass, ID id)
1313{
1314 st_data_t body;
1315 struct rb_id_table *m_tbl = RCLASS_M_TBL(klass);
1316
1317 if (rb_id_table_lookup(m_tbl, id, &body)) {
1318 return (rb_method_entry_t *) body;
1319 }
1320 else {
1321 return 0;
1322 }
1323}
1324
1325void
1326rb_add_refined_method_entry(VALUE refined_class, ID mid)
1327{
1328 rb_method_entry_t *me = lookup_method_table(refined_class, mid);
1329
1330 if (me) {
1331 make_method_entry_refined(refined_class, me);
1332 rb_clear_method_cache(refined_class, mid);
1333 }
1334 else {
1335 rb_add_method(refined_class, mid, VM_METHOD_TYPE_REFINED, 0, METHOD_VISI_PUBLIC);
1336 }
1337}
1338
1339static void
1340check_override_opt_method_i(VALUE klass, VALUE arg)
1341{
1342 if (RB_TYPE_P(klass, T_ICLASS)) {
1343 // ICLASS from a module's subclass list: check the includer and
1344 // recurse into the includer's T_CLASS subclasses.
1345 VALUE includer = RCLASS_INCLUDER(klass);
1346 if (!UNDEF_P(includer) && includer) {
1347 check_override_opt_method_i(includer, arg);
1348 }
1349 return;
1350 }
1351
1352 ID mid = (ID)arg;
1353 const rb_method_entry_t *me, *newme;
1354
1355 if (vm_redefinition_check_flag(klass)) {
1356 me = lookup_method_table(RCLASS_ORIGIN(klass), mid);
1357 if (me) {
1358 newme = rb_method_entry(klass, mid);
1359 if (newme != me) rb_vm_check_redefinition_opt_method(me, me->owner);
1360 }
1361 }
1362 rb_class_foreach_subclass(klass, check_override_opt_method_i, (VALUE)mid);
1363}
1364
1365static void
1366check_override_opt_method(VALUE klass, VALUE mid)
1367{
1368 if (rb_vm_check_optimizable_mid(mid)) {
1369 check_override_opt_method_i(klass, mid);
1370 }
1371}
1372
1373static VALUE
1374zsuper_to_super(int argc, VALUE *argv, VALUE self)
1375{
1376 return rb_call_super_kw(argc, argv, RB_PASS_CALLED_KEYWORDS);
1377}
1378
1379static inline rb_method_entry_t* search_method0(VALUE klass, ID id, VALUE *defined_class_ptr, bool skip_refined);
1380/*
1381 * klass->method_table[mid] = method_entry(defined_class, visi, def)
1382 *
1383 * If def is given (!= NULL), then just use it and ignore original_id and otps.
1384 * If not given, then make a new def with original_id and opts.
1385 */
1386static rb_method_entry_t *
1387rb_method_entry_make(VALUE klass, ID mid, VALUE defined_class, rb_method_visibility_t visi,
1388 rb_method_type_t type, rb_method_definition_t *def, ID original_id, void *opts)
1389{
1391 struct rb_id_table *mtbl;
1392 st_data_t data;
1393 int make_refined = 0;
1394 VALUE orig_klass;
1395 bool turn_zsuper_to_super = false;
1396
1397 if (NIL_P(klass)) {
1398 klass = rb_cObject;
1399 }
1400 orig_klass = klass;
1401
1402 if (!RCLASS_SINGLETON_P(klass) &&
1403 type != VM_METHOD_TYPE_NOTIMPLEMENTED &&
1404 type != VM_METHOD_TYPE_ZSUPER) {
1405 switch (mid) {
1406 case idInitialize:
1407 case idInitialize_copy:
1408 case idInitialize_clone:
1409 case idInitialize_dup:
1410 case idRespond_to_missing:
1411 visi = METHOD_VISI_PRIVATE;
1412 }
1413 }
1414
1415 if (type != VM_METHOD_TYPE_REFINED) {
1416 rb_class_modify_check(klass);
1417 }
1418
1419 if (RB_TYPE_P(klass, T_MODULE) && FL_TEST(klass, RMODULE_IS_REFINEMENT)) {
1420 VALUE refined_class = rb_refinement_module_get_refined_class(klass);
1421 if (type == VM_METHOD_TYPE_ZSUPER) {
1422 turn_zsuper_to_super = true;
1423 }
1424 rb_add_refined_method_entry(refined_class, mid);
1425 }
1426 if (type == VM_METHOD_TYPE_REFINED) {
1427 rb_method_entry_t *old_me = lookup_method_table(RCLASS_ORIGIN(klass), mid);
1428 if (old_me) rb_vm_check_redefinition_opt_method(old_me, klass);
1429 }
1430 else {
1431 klass = RCLASS_ORIGIN(klass);
1432 if (klass != orig_klass) {
1433 rb_clear_method_cache(orig_klass, mid);
1434 }
1435 }
1436 mtbl = RCLASS_WRITABLE_M_TBL(klass);
1437
1438 /* check re-definition */
1439 if (rb_id_table_lookup(mtbl, mid, &data)) {
1440 rb_method_entry_t *old_me = (rb_method_entry_t *)data;
1441 rb_method_definition_t *old_def = old_me->def;
1442
1443 if (rb_method_definition_eq(old_def, def)) return old_me;
1444 rb_vm_check_redefinition_opt_method(old_me, klass);
1445
1446 if (old_def->type == VM_METHOD_TYPE_REFINED) make_refined = 1;
1447
1448 if (RTEST(ruby_verbose) &&
1449 type != VM_METHOD_TYPE_UNDEF &&
1450 (old_def->aliased == false) &&
1451 (!old_def->no_redef_warning) &&
1452 !make_refined &&
1453 old_def->type != VM_METHOD_TYPE_UNDEF &&
1454 old_def->type != VM_METHOD_TYPE_ZSUPER &&
1455 old_def->type != VM_METHOD_TYPE_ALIAS) {
1456 const rb_iseq_t *iseq = 0;
1457
1458 switch (old_def->type) {
1459 case VM_METHOD_TYPE_ISEQ:
1460 iseq = def_iseq_ptr(old_def);
1461 break;
1462 case VM_METHOD_TYPE_BMETHOD:
1463 iseq = rb_proc_get_iseq(old_def->body.bmethod.proc, 0);
1464 break;
1465 default:
1466 break;
1467 }
1468 if (iseq) {
1469 rb_warning(
1470 "method redefined; discarding old %"PRIsVALUE"\n%s:%d: warning: previous definition of %"PRIsVALUE" was here",
1471 rb_id2str(mid),
1472 RSTRING_PTR(rb_iseq_path(iseq)),
1473 ISEQ_BODY(iseq)->location.first_lineno,
1474 rb_id2str(old_def->original_id)
1475 );
1476 }
1477 else {
1478 rb_warning("method redefined; discarding old %"PRIsVALUE, rb_id2str(mid));
1479 }
1480 }
1481 }
1482
1483 /* create method entry */
1484 me = rb_method_entry_create(mid, defined_class, visi, NULL);
1485 if (def == NULL) {
1486 def = rb_method_definition_create(type, original_id);
1487 if (turn_zsuper_to_super) {
1488 def->type = VM_METHOD_TYPE_CFUNC;
1489 def->body.cfunc.func = (rb_cfunc_t)zsuper_to_super;
1490 def->body.cfunc.invoker = ractor_safe_call_cfunc_m1;
1491 def->body.cfunc.argc = -1;
1492 }
1493 }
1494 rb_method_definition_set(me, def, opts);
1495
1496 rb_clear_method_cache(klass, mid);
1497
1498 /* check mid */
1499 if (klass == rb_cObject) {
1500 switch (mid) {
1501 case idInitialize:
1502 case idRespond_to_missing:
1503 case idMethodMissing:
1504 case idRespond_to:
1505 rb_warn("redefining Object#%s may cause infinite loop", rb_id2name(mid));
1506 }
1507 }
1508 /* check mid */
1509 if (mid == object_id || mid == id__id__ || mid == id__send__) {
1510 if (type != VM_METHOD_TYPE_CFUNC && search_method(klass, mid, 0)) {
1511 rb_warn("redefining '%s' may cause serious problems", rb_id2name(mid));
1512 }
1513 }
1514
1515 if (make_refined) {
1516 make_method_entry_refined(klass, me);
1517 }
1518
1519 rb_method_table_insert(klass, mtbl, mid, me);
1520
1521 VM_ASSERT(me->def != NULL);
1522
1523 /* check optimized method override by a prepended module */
1524 if (RB_TYPE_P(orig_klass, T_MODULE)) {
1525 check_override_opt_method(klass, (VALUE)mid);
1526 }
1527
1528 return me;
1529}
1530
1531static st_table *
1532overloaded_cme_table(void)
1533{
1534 return &GET_VM()->overloaded_cme_table;
1535}
1536
1537#if VM_CHECK_MODE > 0
1538static int
1539vm_dump_overloaded_cme_table(st_data_t key, st_data_t val, st_data_t dmy)
1540{
1541 fprintf(stderr, "key: "); rp(key);
1542 fprintf(stderr, "val: "); rp(val);
1543 return ST_CONTINUE;
1544}
1545
1546void
1547rb_vm_dump_overloaded_cme_table(void)
1548{
1549 fprintf(stderr, "== rb_vm_dump_overloaded_cme_table\n");
1550 st_foreach(overloaded_cme_table(), vm_dump_overloaded_cme_table, 0);
1551}
1552#endif
1553
1554static int
1555lookup_overloaded_cme_i(st_data_t *key, st_data_t *value, st_data_t data, int existing)
1556{
1557 if (existing) {
1558 const rb_callable_method_entry_t *cme = (const rb_callable_method_entry_t *)*key;
1559 const rb_callable_method_entry_t *monly_cme = (const rb_callable_method_entry_t *)*value;
1560 const rb_callable_method_entry_t **ptr = (const rb_callable_method_entry_t **)data;
1561
1562 if (rb_objspace_garbage_object_p((VALUE)cme) ||
1563 rb_objspace_garbage_object_p((VALUE)monly_cme)) {
1564 *ptr = NULL;
1565 return ST_DELETE;
1566 }
1567 else {
1568 *ptr = monly_cme;
1569 }
1570 }
1571
1572 return ST_STOP;
1573}
1574
1575static const rb_callable_method_entry_t *
1576lookup_overloaded_cme(const rb_callable_method_entry_t *cme)
1577{
1578 ASSERT_vm_locking();
1579
1580 const rb_callable_method_entry_t *monly_cme = NULL;
1581 st_update(overloaded_cme_table(), (st_data_t)cme, lookup_overloaded_cme_i, (st_data_t)&monly_cme);
1582 return monly_cme;
1583}
1584
1585#if VM_CHECK_MODE > 0
1587rb_vm_lookup_overloaded_cme(const rb_callable_method_entry_t *cme)
1588{
1589 return lookup_overloaded_cme(cme);
1590}
1591#endif
1592
1593static void
1594delete_overloaded_cme(const rb_callable_method_entry_t *cme)
1595{
1596 st_data_t cme_data = (st_data_t)cme;
1597 ASSERT_vm_locking();
1598 st_delete(overloaded_cme_table(), &cme_data, NULL);
1599}
1600
1601static const rb_callable_method_entry_t *
1602get_overloaded_cme(const rb_callable_method_entry_t *cme)
1603{
1604 const rb_callable_method_entry_t *monly_cme = lookup_overloaded_cme(cme);
1605
1606 if (monly_cme && !METHOD_ENTRY_INVALIDATED(monly_cme)) {
1607 return monly_cme;
1608 }
1609 else {
1610 // create
1611 rb_method_definition_t *def = rb_method_definition_create(VM_METHOD_TYPE_ISEQ, cme->def->original_id);
1612 rb_method_entry_t *me = rb_method_entry_alloc(cme->called_id,
1613 cme->owner,
1614 cme->defined_class,
1615 def,
1616 false);
1617
1618 RB_OBJ_WRITE(me, &def->body.iseq.cref, cme->def->body.iseq.cref);
1619 RB_OBJ_WRITE(me, &def->body.iseq.iseqptr, ISEQ_BODY(cme->def->body.iseq.iseqptr)->mandatory_only_iseq);
1620
1621 ASSERT_vm_locking();
1622 st_insert(overloaded_cme_table(), (st_data_t)cme, (st_data_t)me);
1623
1624 METHOD_ENTRY_VISI_SET(me, METHOD_ENTRY_VISI(cme));
1625 return (rb_callable_method_entry_t *)me;
1626 }
1627}
1628
1630rb_check_overloaded_cme(const rb_callable_method_entry_t *cme, const struct rb_callinfo * const ci)
1631{
1632 if (UNLIKELY(cme->def->iseq_overload) &&
1633 (vm_ci_flag(ci) & (VM_CALL_ARGS_SIMPLE)) &&
1634 (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) &&
1635 (int)vm_ci_argc(ci) == ISEQ_BODY(method_entry_iseqptr(cme))->param.lead_num) {
1636 VM_ASSERT(cme->def->type == VM_METHOD_TYPE_ISEQ, "type: %d", cme->def->type); // iseq_overload is marked only on ISEQ methods
1637
1638 cme = get_overloaded_cme(cme);
1639
1640 VM_ASSERT(cme != NULL);
1641 METHOD_ENTRY_CACHED_SET((struct rb_callable_method_entry_struct *)cme);
1642 }
1643
1644 return cme;
1645}
1646
1647#define CALL_METHOD_HOOK(klass, hook, mid) do { \
1648 const VALUE arg = ID2SYM(mid); \
1649 VALUE recv_class = (klass); \
1650 ID hook_id = (hook); \
1651 if (RCLASS_SINGLETON_P((klass))) { \
1652 recv_class = RCLASS_ATTACHED_OBJECT((klass)); \
1653 hook_id = singleton_##hook; \
1654 } \
1655 rb_funcallv(recv_class, hook_id, 1, &arg); \
1656 } while (0)
1657
1658static void
1659method_added(VALUE klass, ID mid)
1660{
1661 if (ruby_running) {
1662 CALL_METHOD_HOOK(klass, added, mid);
1663 }
1664}
1665
1666void
1667rb_add_method(VALUE klass, ID mid, rb_method_type_t type, void *opts, rb_method_visibility_t visi)
1668{
1669 RB_VM_LOCKING() {
1670 rb_method_entry_make(klass, mid, klass, visi, type, NULL, mid, opts);
1671 }
1672
1673 if (type != VM_METHOD_TYPE_UNDEF && type != VM_METHOD_TYPE_REFINED) {
1674 method_added(klass, mid);
1675 }
1676}
1677
1678void
1679rb_add_method_iseq(VALUE klass, ID mid, const rb_iseq_t *iseq, rb_cref_t *cref, rb_method_visibility_t visi)
1680{
1681 struct { /* should be same fields with rb_method_iseq_struct */
1682 const rb_iseq_t *iseqptr;
1683 rb_cref_t *cref;
1684 } iseq_body;
1685
1686 iseq_body.iseqptr = iseq;
1687 iseq_body.cref = cref;
1688
1689 rb_add_method(klass, mid, VM_METHOD_TYPE_ISEQ, &iseq_body, visi);
1690}
1691
1692static rb_method_entry_t *
1693method_entry_set(VALUE klass, ID mid, const rb_method_entry_t *me,
1694 rb_method_visibility_t visi, VALUE defined_class)
1695{
1696 rb_method_entry_t *newme;
1697 RB_VM_LOCKING() {
1698 newme = rb_method_entry_make(klass, mid, defined_class, visi,
1699 me->def->type, me->def, 0, NULL);
1700 if (newme == me) {
1701 me->def->no_redef_warning = TRUE;
1702 METHOD_ENTRY_FLAGS_SET(newme, visi, FALSE);
1703 }
1704 }
1705
1706 method_added(klass, mid);
1707 return newme;
1708}
1709
1711rb_method_entry_set(VALUE klass, ID mid, const rb_method_entry_t *me, rb_method_visibility_t visi)
1712{
1713 return method_entry_set(klass, mid, me, visi, klass);
1714}
1715
1716#define UNDEF_ALLOC_FUNC ((rb_alloc_func_t)-1)
1717
1718static void
1719propagate_alloc_func(VALUE subclass, VALUE arg)
1720{
1721 if (RB_TYPE_P(subclass, T_CLASS) &&
1722 !RCLASS_SINGLETON_P(subclass) &&
1723 !FL_TEST_RAW(subclass, RCLASS_ALLOCATOR_DEFINED)) {
1724 RCLASS_SET_ALLOCATOR(subclass, (rb_alloc_func_t)arg);
1725 rb_class_foreach_subclass(subclass, propagate_alloc_func, arg);
1726 }
1727}
1728
1729void
1730rb_define_alloc_func(VALUE klass, VALUE (*func)(VALUE))
1731{
1732 Check_Type(klass, T_CLASS);
1733 if (RCLASS_SINGLETON_P(klass)) {
1734 rb_raise(rb_eTypeError, "can't define an allocator for a singleton class");
1735 }
1736 RCLASS_SET_ALLOCATOR(klass, func);
1737 FL_SET_RAW(klass, RCLASS_ALLOCATOR_DEFINED);
1738 rb_class_foreach_subclass(klass, propagate_alloc_func, (VALUE)func);
1739}
1740
1741void
1743{
1744 Check_Type(klass, T_CLASS);
1745 RCLASS_SET_ALLOCATOR(klass, UNDEF_ALLOC_FUNC);
1746 FL_SET_RAW(klass, RCLASS_ALLOCATOR_DEFINED);
1747 rb_class_foreach_subclass(klass, propagate_alloc_func, (VALUE)UNDEF_ALLOC_FUNC);
1748}
1749
1752{
1753 RBIMPL_ASSERT_TYPE(klass, T_CLASS);
1754
1755 rb_alloc_func_t allocator = RCLASS_ALLOCATOR(klass);
1756 if (allocator == UNDEF_ALLOC_FUNC) return 0;
1757 RUBY_ASSERT(allocator);
1758 return allocator;
1759}
1760
1761const rb_method_entry_t *
1762rb_method_entry_at(VALUE klass, ID id)
1763{
1764 return lookup_method_table(klass, id);
1765}
1766
1767static inline rb_method_entry_t*
1768search_method0(VALUE klass, ID id, VALUE *defined_class_ptr, bool skip_refined)
1769{
1770 rb_method_entry_t *me = NULL;
1771
1772 RB_DEBUG_COUNTER_INC(mc_search);
1773
1774 for (; klass; klass = RCLASS_SUPER(klass)) {
1775 RB_DEBUG_COUNTER_INC(mc_search_super);
1776 if ((me = lookup_method_table(klass, id)) != 0) {
1777 if (!skip_refined || me->def->type != VM_METHOD_TYPE_REFINED ||
1778 me->def->body.refined.orig_me) {
1779 break;
1780 }
1781 }
1782 }
1783
1784 if (defined_class_ptr) *defined_class_ptr = klass;
1785
1786 if (me == NULL) RB_DEBUG_COUNTER_INC(mc_search_notfound);
1787
1788 VM_ASSERT(me == NULL || !METHOD_ENTRY_INVALIDATED(me),
1789 "invalid me, mid:%s, klass:%s(%s)",
1790 rb_id2name(id),
1791 RTEST(rb_mod_name(klass)) ? RSTRING_PTR(rb_mod_name(klass)) : "anonymous",
1792 rb_obj_info(klass));
1793 return me;
1794}
1795
1796static inline rb_method_entry_t*
1797search_method(VALUE klass, ID id, VALUE *defined_class_ptr)
1798{
1799 return search_method0(klass, id, defined_class_ptr, false);
1800}
1801
1802static rb_method_entry_t *
1803search_method_protect(VALUE klass, ID id, VALUE *defined_class_ptr)
1804{
1805 rb_method_entry_t *me = search_method(klass, id, defined_class_ptr);
1806
1807 if (!UNDEFINED_METHOD_ENTRY_P(me)) {
1808 return me;
1809 }
1810 else {
1811 return NULL;
1812 }
1813}
1814
1815const rb_method_entry_t *
1816rb_method_entry(VALUE klass, ID id)
1817{
1818 return search_method_protect(klass, id, NULL);
1819}
1820
1821static inline const rb_callable_method_entry_t *
1822prepare_callable_method_entry(VALUE defined_class, ID id, const rb_method_entry_t * const me, int create)
1823{
1824 struct rb_id_table *mtbl;
1825 const rb_callable_method_entry_t *cme;
1826 VALUE cme_data;
1827 int cme_found = 0;
1828
1829 if (me) {
1830 if (me->defined_class == 0) {
1831 RB_DEBUG_COUNTER_INC(mc_cme_complement);
1832 VM_ASSERT_TYPE2(defined_class, T_ICLASS, T_MODULE);
1833
1834 mtbl = RCLASS_WRITABLE_CALLABLE_M_TBL(defined_class);
1835 if (mtbl && rb_id_table_lookup(mtbl, id, &cme_data)) {
1836 cme = (rb_callable_method_entry_t *)cme_data;
1837 cme_found = 1;
1838 }
1839 if (cme_found) {
1840 RB_DEBUG_COUNTER_INC(mc_cme_complement_hit);
1841 VM_ASSERT(callable_method_entry_p(cme));
1842 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(cme));
1843 }
1844 else if (create) {
1845 if (!mtbl) {
1846 mtbl = rb_id_table_create(0);
1847 RCLASS_WRITE_CALLABLE_M_TBL(defined_class, mtbl);
1848 }
1849 cme = rb_method_entry_complement_defined_class(me, me->called_id, defined_class);
1850 rb_id_table_insert(mtbl, id, (VALUE)cme);
1851 RB_OBJ_WRITTEN(defined_class, Qundef, (VALUE)cme);
1852 VM_ASSERT(callable_method_entry_p(cme));
1853 }
1854 else {
1855 return NULL;
1856 }
1857 }
1858 else {
1859 cme = (const rb_callable_method_entry_t *)me;
1860 VM_ASSERT(callable_method_entry_p(cme));
1861 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(cme));
1862 }
1863 return cme;
1864 }
1865 else {
1866 return NULL;
1867 }
1868}
1869
1870static const rb_callable_method_entry_t *
1871complemented_callable_method_entry(VALUE klass, ID id)
1872{
1873 VALUE defined_class;
1874 rb_method_entry_t *me = search_method(klass, id, &defined_class);
1875 return prepare_callable_method_entry(defined_class, id, me, FALSE);
1876}
1877
1878static const rb_callable_method_entry_t *
1879cached_callable_method_entry(VALUE klass, ID mid)
1880{
1881 ASSERT_vm_locking();
1882
1883 VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
1884 VALUE ccs_data;
1885
1886 if (cc_tbl && rb_managed_id_table_lookup(cc_tbl, mid, &ccs_data)) {
1887 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_data;
1888 VM_ASSERT(vm_ccs_p(ccs));
1889
1890 if (LIKELY(!METHOD_ENTRY_INVALIDATED(ccs->cme))) {
1891 VM_ASSERT(ccs->cme->called_id == mid);
1892 RB_DEBUG_COUNTER_INC(ccs_found);
1893 return ccs->cme;
1894 }
1895 else {
1896 rb_vm_barrier();
1897
1898 rb_managed_id_table_delete(cc_tbl, mid);
1899 rb_vm_ccs_invalidate_and_free(ccs);
1900 }
1901 }
1902
1903 RB_DEBUG_COUNTER_INC(ccs_not_found);
1904 return NULL;
1905}
1906
1907static void
1908cache_callable_method_entry(VALUE klass, ID mid, const rb_callable_method_entry_t *cme)
1909{
1910 ASSERT_vm_locking();
1911 VM_ASSERT(cme != NULL);
1912
1913 VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
1914 VALUE ccs_data;
1915
1916 if (!cc_tbl) {
1917 cc_tbl = rb_vm_cc_table_create(2);
1918 RCLASS_WRITE_CC_TBL(klass, cc_tbl);
1919 }
1920
1921 if (rb_managed_id_table_lookup(cc_tbl, mid, &ccs_data)) {
1922#if VM_CHECK_MODE > 0
1923 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_data;
1924 VM_ASSERT(ccs->cme == cme);
1925#endif
1926 }
1927 else {
1928 if (rb_multi_ractor_p()) {
1929 VALUE new_cc_tbl = rb_vm_cc_table_dup(cc_tbl);
1930 vm_ccs_create(klass, new_cc_tbl, mid, cme);
1931 RB_OBJ_ATOMIC_WRITE(klass, &RCLASSEXT_CC_TBL(RCLASS_EXT_WRITABLE(klass)), new_cc_tbl);
1932 }
1933 else {
1934 vm_ccs_create(klass, cc_tbl, mid, cme);
1935 }
1936 }
1937}
1938
1939static const rb_callable_method_entry_t *
1940negative_cme(ID mid)
1941{
1942 rb_vm_t *vm = GET_VM();
1943 const rb_callable_method_entry_t *cme;
1944 VALUE cme_data;
1945
1946 if (rb_id_table_lookup(&vm->negative_cme_table, mid, &cme_data)) {
1947 cme = (rb_callable_method_entry_t *)cme_data;
1948 }
1949 else {
1950 cme = (rb_callable_method_entry_t *)rb_method_entry_alloc(mid, Qnil, Qnil, NULL, false);
1951 rb_id_table_insert(&vm->negative_cme_table, mid, (VALUE)cme);
1952 }
1953
1954 VM_ASSERT(cme != NULL);
1955 return cme;
1956}
1957
1958static const rb_callable_method_entry_t *
1959callable_method_entry_or_negative(VALUE klass, ID mid, VALUE *defined_class_ptr)
1960{
1961 const rb_callable_method_entry_t *cme;
1962
1963 VM_ASSERT_TYPE2(klass, T_CLASS, T_ICLASS);
1964
1965 /* Fast path: lock-free read from cache */
1966 VALUE cc_tbl = RUBY_ATOMIC_VALUE_LOAD(RCLASS_WRITABLE_CC_TBL(klass));
1967 if (cc_tbl) {
1968 VALUE ccs_data;
1969 if (rb_managed_id_table_lookup(cc_tbl, mid, &ccs_data)) {
1970 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_data;
1971 VM_ASSERT(vm_ccs_p(ccs));
1972
1973 if (LIKELY(!METHOD_ENTRY_INVALIDATED(ccs->cme))) {
1974 VM_ASSERT(ccs->cme->called_id == mid);
1975 if (defined_class_ptr != NULL) *defined_class_ptr = ccs->cme->defined_class;
1976 RB_DEBUG_COUNTER_INC(ccs_found);
1977 return ccs->cme;
1978 }
1979 }
1980 }
1981
1982 /* Slow path: need to lock and potentially populate cache */
1983 RB_VM_LOCKING() {
1984 cme = cached_callable_method_entry(klass, mid);
1985
1986 if (cme) {
1987 if (defined_class_ptr != NULL) *defined_class_ptr = cme->defined_class;
1988 }
1989 else {
1990 VALUE defined_class;
1991 rb_method_entry_t *me = search_method(klass, mid, &defined_class);
1992 if (defined_class_ptr) *defined_class_ptr = defined_class;
1993
1994 if (me != NULL) {
1995 cme = prepare_callable_method_entry(defined_class, mid, me, TRUE);
1996 }
1997 else {
1998 cme = negative_cme(mid);
1999 }
2000
2001 cache_callable_method_entry(klass, mid, cme);
2002 }
2003 }
2004
2005 return cme;
2006}
2007
2008// This is exposed for YJIT so that we can make assumptions that methods are
2009// not defined.
2011rb_callable_method_entry_or_negative(VALUE klass, ID mid)
2012{
2013 return callable_method_entry_or_negative(klass, mid, NULL);
2014}
2015
2016static const rb_callable_method_entry_t *
2017callable_method_entry(VALUE klass, ID mid, VALUE *defined_class_ptr)
2018{
2019 const rb_callable_method_entry_t *cme;
2020 cme = callable_method_entry_or_negative(klass, mid, defined_class_ptr);
2021 return !UNDEFINED_METHOD_ENTRY_P(cme) ? cme : NULL;
2022}
2023
2025rb_callable_method_entry(VALUE klass, ID mid)
2026{
2027 return callable_method_entry(klass, mid, NULL);
2028}
2029
2030static const rb_method_entry_t *resolve_refined_method(VALUE refinements, const rb_method_entry_t *me, VALUE *defined_class_ptr);
2031
2032static const rb_method_entry_t *
2033method_entry_resolve_refinement(VALUE klass, ID id, int with_refinement, VALUE *defined_class_ptr)
2034{
2035 const rb_method_entry_t *me = search_method_protect(klass, id, defined_class_ptr);
2036
2037 if (me) {
2038 if (me->def->type == VM_METHOD_TYPE_REFINED) {
2039 if (with_refinement) {
2040 const rb_cref_t *cref = rb_vm_cref();
2041 VALUE refinements = cref ? CREF_REFINEMENTS(cref) : Qnil;
2042 me = resolve_refined_method(refinements, me, defined_class_ptr);
2043 }
2044 else {
2045 me = resolve_refined_method(Qnil, me, defined_class_ptr);
2046 }
2047
2048 if (UNDEFINED_METHOD_ENTRY_P(me)) me = NULL;
2049 }
2050 }
2051
2052 return me;
2053}
2054
2055const rb_method_entry_t *
2056rb_method_entry_with_refinements(VALUE klass, ID id, VALUE *defined_class_ptr)
2057{
2058 return method_entry_resolve_refinement(klass, id, TRUE, defined_class_ptr);
2059}
2060
2061static const rb_callable_method_entry_t *
2062callable_method_entry_refinements0(VALUE klass, ID id, VALUE *defined_class_ptr, bool with_refinements,
2063 const rb_callable_method_entry_t *cme)
2064{
2065 if (cme == NULL || LIKELY(cme->def->type != VM_METHOD_TYPE_REFINED)) {
2066 return cme;
2067 }
2068 else {
2069 VALUE defined_class, *dcp = defined_class_ptr ? defined_class_ptr : &defined_class;
2070 const rb_method_entry_t *me = method_entry_resolve_refinement(klass, id, with_refinements, dcp);
2071 return prepare_callable_method_entry(*dcp, id, me, TRUE);
2072 }
2073}
2074
2075static const rb_callable_method_entry_t *
2076callable_method_entry_refinements(VALUE klass, ID id, VALUE *defined_class_ptr, bool with_refinements)
2077{
2078 const rb_callable_method_entry_t *cme = callable_method_entry(klass, id, defined_class_ptr);
2079 return callable_method_entry_refinements0(klass, id, defined_class_ptr, with_refinements, cme);
2080}
2081
2083rb_callable_method_entry_with_refinements(VALUE klass, ID id, VALUE *defined_class_ptr)
2084{
2085 return callable_method_entry_refinements(klass, id, defined_class_ptr, true);
2086}
2087
2088static const rb_callable_method_entry_t *
2089callable_method_entry_without_refinements(VALUE klass, ID id, VALUE *defined_class_ptr)
2090{
2091 return callable_method_entry_refinements(klass, id, defined_class_ptr, false);
2092}
2093
2094const rb_method_entry_t *
2095rb_method_entry_without_refinements(VALUE klass, ID id, VALUE *defined_class_ptr)
2096{
2097 return method_entry_resolve_refinement(klass, id, FALSE, defined_class_ptr);
2098}
2099
2101rb_callable_method_entry_without_refinements(VALUE klass, ID id, VALUE *defined_class_ptr)
2102{
2103 VALUE defined_class, *dcp = defined_class_ptr ? defined_class_ptr : &defined_class;
2104 const rb_method_entry_t *me = method_entry_resolve_refinement(klass, id, FALSE, dcp);
2105 return prepare_callable_method_entry(*dcp, id, me, TRUE);
2106}
2107
2108static const rb_method_entry_t *
2109resolve_refined_method(VALUE refinements, const rb_method_entry_t *me, VALUE *defined_class_ptr)
2110{
2111 while (me && me->def->type == VM_METHOD_TYPE_REFINED) {
2112 VALUE refinement;
2113 const rb_method_entry_t *tmp_me;
2114 VALUE super;
2115
2116 refinement = find_refinement(refinements, me->owner);
2117 if (!NIL_P(refinement)) {
2118 tmp_me = search_method_protect(refinement, me->called_id, defined_class_ptr);
2119
2120 if (tmp_me && tmp_me->def->type != VM_METHOD_TYPE_REFINED) {
2121 return tmp_me;
2122 }
2123 }
2124
2125 tmp_me = me->def->body.refined.orig_me;
2126 if (tmp_me) {
2127 if (!tmp_me->defined_class) {
2128 VM_ASSERT_TYPE(tmp_me->owner, T_MODULE);
2129 }
2130 else if (defined_class_ptr) {
2131 *defined_class_ptr = tmp_me->defined_class;
2132 }
2133 return tmp_me;
2134 }
2135
2136 super = RCLASS_SUPER(me->owner);
2137 if (!super) {
2138 return 0;
2139 }
2140
2141 me = search_method_protect(super, me->called_id, defined_class_ptr);
2142 }
2143 return me;
2144}
2145
2146const rb_method_entry_t *
2147rb_resolve_refined_method(VALUE refinements, const rb_method_entry_t *me)
2148{
2149 return resolve_refined_method(refinements, me, NULL);
2150}
2151
2153rb_resolve_refined_method_callable(VALUE refinements, const rb_callable_method_entry_t *me)
2154{
2155 VALUE defined_class = me->defined_class;
2156 const rb_method_entry_t *resolved_me = resolve_refined_method(refinements, (const rb_method_entry_t *)me, &defined_class);
2157
2158 if (resolved_me && resolved_me->defined_class == 0) {
2159 return rb_method_entry_complement_defined_class(resolved_me, me->called_id, defined_class);
2160 }
2161 else {
2162 return (const rb_callable_method_entry_t *)resolved_me;
2163 }
2164}
2165
2166static void
2167remove_method(VALUE klass, ID mid)
2168{
2169 VALUE data;
2170 rb_method_entry_t *me = 0;
2171 VALUE self = klass;
2172
2173 rb_class_modify_check(klass);
2174 klass = RCLASS_ORIGIN(klass);
2175 if (mid == object_id || mid == id__id__ || mid == id__send__ || mid == idInitialize) {
2176 rb_warn("removing '%s' may cause serious problems", rb_id2name(mid));
2177 }
2178
2179 if (!rb_id_table_lookup(RCLASS_M_TBL(klass), mid, &data) ||
2180 !(me = (rb_method_entry_t *)data) ||
2181 (!me->def || me->def->type == VM_METHOD_TYPE_UNDEF) ||
2182 UNDEFINED_REFINED_METHOD_P(me->def)) {
2183 rb_name_err_raise("method '%1$s' not defined in %2$s",
2184 klass, ID2SYM(mid));
2185 }
2186
2187 if (klass != self) {
2188 rb_clear_method_cache(self, mid);
2189 }
2190 rb_clear_method_cache(klass, mid);
2191 rb_id_table_delete(RCLASS_WRITABLE_M_TBL(klass), mid);
2192
2193 rb_vm_check_redefinition_opt_method(me, klass);
2194
2195 if (me->def->type == VM_METHOD_TYPE_REFINED) {
2196 rb_add_refined_method_entry(klass, mid);
2197 }
2198
2199 CALL_METHOD_HOOK(self, removed, mid);
2200}
2201
2202void
2204{
2205 remove_method(klass, mid);
2206}
2207
2208void
2209rb_remove_method(VALUE klass, const char *name)
2210{
2211 remove_method(klass, rb_intern(name));
2212}
2213
2214/*
2215 * call-seq:
2216 * remove_method(symbol) -> self
2217 * remove_method(string) -> self
2218 *
2219 * Removes the method identified by _symbol_ from the current
2220 * class. For an example, see Module#undef_method.
2221 * String arguments are converted to symbols.
2222 */
2223
2224static VALUE
2225rb_mod_remove_method(int argc, VALUE *argv, VALUE mod)
2226{
2227 int i;
2228
2229 for (i = 0; i < argc; i++) {
2230 VALUE v = argv[i];
2231 ID id = rb_check_id(&v);
2232 if (!id) {
2233 rb_name_err_raise("method '%1$s' not defined in %2$s",
2234 mod, v);
2235 }
2236 remove_method(mod, id);
2237 }
2238 return mod;
2239}
2240
2241static void
2242rb_export_method(VALUE klass, ID name, rb_method_visibility_t visi)
2243{
2245 VALUE defined_class;
2246 VALUE origin_class = RCLASS_ORIGIN(klass);
2247
2248 me = search_method0(origin_class, name, &defined_class, true);
2249
2250 if (!me && RB_TYPE_P(klass, T_MODULE)) {
2251 me = search_method(rb_cObject, name, &defined_class);
2252 }
2253
2254 if (UNDEFINED_METHOD_ENTRY_P(me) ||
2255 UNDEFINED_REFINED_METHOD_P(me->def)) {
2256 rb_print_undef(klass, name, METHOD_VISI_UNDEF);
2257 }
2258
2259 if (METHOD_ENTRY_VISI(me) != visi) {
2260 rb_vm_check_redefinition_opt_method(me, klass);
2261
2262 if (klass == defined_class || origin_class == defined_class) {
2263 if (me->def->type == VM_METHOD_TYPE_REFINED) {
2264 // Refinement method entries should always be public because the refinement
2265 // search is always performed.
2266 if (me->def->body.refined.orig_me) {
2267 METHOD_ENTRY_VISI_SET((rb_method_entry_t *)me->def->body.refined.orig_me, visi);
2268 }
2269 }
2270 else {
2271 METHOD_ENTRY_VISI_SET(me, visi);
2272 }
2273 rb_clear_method_cache(klass, name);
2274 }
2275 else {
2276 rb_add_method(klass, name, VM_METHOD_TYPE_ZSUPER, 0, visi);
2277 }
2278 }
2279}
2280
2281#define BOUND_PRIVATE 0x01
2282#define BOUND_RESPONDS 0x02
2283
2284static int
2285method_boundp(VALUE klass, ID id, int ex)
2286{
2287 const rb_callable_method_entry_t *cme;
2288
2289 VM_ASSERT_TYPE2(klass, T_CLASS, T_ICLASS);
2290
2291 if (ex & BOUND_RESPONDS) {
2292 cme = rb_callable_method_entry_with_refinements(klass, id, NULL);
2293 }
2294 else {
2295 cme = callable_method_entry_without_refinements(klass, id, NULL);
2296 }
2297
2298 if (cme != NULL) {
2299 if (ex & ~BOUND_RESPONDS) {
2300 switch (METHOD_ENTRY_VISI(cme)) {
2301 case METHOD_VISI_PRIVATE:
2302 return 0;
2303 case METHOD_VISI_PROTECTED:
2304 if (ex & BOUND_RESPONDS) return 0;
2305 default:
2306 break;
2307 }
2308 }
2309
2310 if (cme->def->type == VM_METHOD_TYPE_NOTIMPLEMENTED) {
2311 if (ex & BOUND_RESPONDS) return 2;
2312 return 0;
2313 }
2314 return 1;
2315 }
2316 return 0;
2317}
2318
2319// deprecated
2320int
2321rb_method_boundp(VALUE klass, ID id, int ex)
2322{
2323 return method_boundp(klass, id, ex);
2324}
2325
2326static void
2327vm_cref_set_visibility(rb_method_visibility_t method_visi, int module_func)
2328{
2329 rb_scope_visibility_t *scope_visi = (rb_scope_visibility_t *)&rb_vm_cref()->scope_visi;
2330 scope_visi->method_visi = method_visi;
2331 scope_visi->module_func = module_func;
2332}
2333
2334void
2335rb_scope_visibility_set(rb_method_visibility_t visi)
2336{
2337 vm_cref_set_visibility(visi, FALSE);
2338}
2339
2340static void
2341scope_visibility_check(void)
2342{
2343 /* Check for public/protected/private/module_function called inside a method */
2344 rb_control_frame_t *cfp = GET_EC()->cfp+1;
2345 if (cfp && CFP_ISEQ(cfp) && ISEQ_BODY(CFP_ISEQ(cfp))->type == ISEQ_TYPE_METHOD) {
2346 rb_warn("calling %s without arguments inside a method may not have the intended effect",
2347 rb_id2name(rb_frame_this_func()));
2348 }
2349}
2350
2351static void
2352rb_scope_module_func_set(void)
2353{
2354 scope_visibility_check();
2355 vm_cref_set_visibility(METHOD_VISI_PRIVATE, TRUE);
2356}
2357
2358const rb_cref_t *rb_vm_cref_in_context(VALUE self, VALUE cbase);
2359void
2360rb_attr(VALUE klass, ID id, int read, int write, int ex)
2361{
2362 ID attriv;
2363 rb_method_visibility_t visi;
2364 const rb_execution_context_t *ec = GET_EC();
2365 const rb_cref_t *cref = rb_vm_cref_in_context(klass, klass);
2366
2367 if (!ex || !cref) {
2368 visi = METHOD_VISI_PUBLIC;
2369 }
2370 else {
2371 switch (vm_scope_visibility_get(ec)) {
2372 case METHOD_VISI_PRIVATE:
2373 if (vm_scope_module_func_check(ec)) {
2374 rb_warning("attribute accessor as module_function");
2375 }
2376 visi = METHOD_VISI_PRIVATE;
2377 break;
2378 case METHOD_VISI_PROTECTED:
2379 visi = METHOD_VISI_PROTECTED;
2380 break;
2381 default:
2382 visi = METHOD_VISI_PUBLIC;
2383 break;
2384 }
2385 }
2386
2387 attriv = rb_intern_str(rb_sprintf("@%"PRIsVALUE, rb_id2str(id)));
2388 if (read) {
2389 rb_add_method(klass, id, VM_METHOD_TYPE_IVAR, (void *)attriv, visi);
2390 }
2391 if (write) {
2392 rb_add_method(klass, rb_id_attrset(id), VM_METHOD_TYPE_ATTRSET, (void *)attriv, visi);
2393 }
2394}
2395
2396void
2398{
2399 const rb_method_entry_t *me;
2400
2401 if (NIL_P(klass)) {
2402 rb_raise(rb_eTypeError, "no class to undef method");
2403 }
2404 rb_class_modify_check(klass);
2405 if (id == object_id || id == id__id__ || id == id__send__ || id == idInitialize) {
2406 rb_warn("undefining '%s' may cause serious problems", rb_id2name(id));
2407 }
2408
2409 me = search_method(klass, id, 0);
2410 if (me && me->def->type == VM_METHOD_TYPE_REFINED) {
2411 me = rb_resolve_refined_method(Qnil, me);
2412 }
2413
2414 if (UNDEFINED_METHOD_ENTRY_P(me) ||
2415 UNDEFINED_REFINED_METHOD_P(me->def)) {
2416 rb_method_name_error(klass, rb_id2str(id));
2417 }
2418
2419 rb_add_method(klass, id, VM_METHOD_TYPE_UNDEF, 0, METHOD_VISI_PUBLIC);
2420
2421 CALL_METHOD_HOOK(klass, undefined, id);
2422}
2423
2424/*
2425 * call-seq:
2426 * undef_method(symbol) -> self
2427 * undef_method(string) -> self
2428 *
2429 * Prevents the current class from responding to calls to the named
2430 * method. Contrast this with <code>remove_method</code>, which deletes
2431 * the method from the particular class; Ruby will still search
2432 * superclasses and mixed-in modules for a possible receiver.
2433 * String arguments are converted to symbols.
2434 *
2435 * class Parent
2436 * def hello
2437 * puts "In parent"
2438 * end
2439 * end
2440 * class Child < Parent
2441 * def hello
2442 * puts "In child"
2443 * end
2444 * end
2445 *
2446 *
2447 * c = Child.new
2448 * c.hello
2449 *
2450 *
2451 * class Child
2452 * remove_method :hello # remove from child, still in parent
2453 * end
2454 * c.hello
2455 *
2456 *
2457 * class Child
2458 * undef_method :hello # prevent any calls to 'hello'
2459 * end
2460 * c.hello
2461 *
2462 * <em>produces:</em>
2463 *
2464 * In child
2465 * In parent
2466 * prog.rb:23: undefined method 'hello' for #<Child:0x401b3bb4> (NoMethodError)
2467 */
2468
2469static VALUE
2470rb_mod_undef_method(int argc, VALUE *argv, VALUE mod)
2471{
2472 int i;
2473 for (i = 0; i < argc; i++) {
2474 VALUE v = argv[i];
2475 ID id = rb_check_id(&v);
2476 if (!id) {
2477 rb_method_name_error(mod, v);
2478 }
2479 rb_undef(mod, id);
2480 }
2481 return mod;
2482}
2483
2484static rb_method_visibility_t
2485check_definition_visibility(VALUE mod, int argc, VALUE *argv)
2486{
2487 const rb_method_entry_t *me;
2488 VALUE mid, include_super, lookup_mod = mod;
2489 int inc_super;
2490 ID id;
2491
2492 rb_scan_args(argc, argv, "11", &mid, &include_super);
2493 id = rb_check_id(&mid);
2494 if (!id) return METHOD_VISI_UNDEF;
2495
2496 if (argc == 1) {
2497 inc_super = 1;
2498 }
2499 else {
2500 inc_super = RTEST(include_super);
2501 if (!inc_super) {
2502 lookup_mod = RCLASS_ORIGIN(mod);
2503 }
2504 }
2505
2506 me = rb_method_entry_without_refinements(lookup_mod, id, NULL);
2507 if (me) {
2508 if (me->def->type == VM_METHOD_TYPE_NOTIMPLEMENTED) return METHOD_VISI_UNDEF;
2509 if (!inc_super && me->owner != mod) return METHOD_VISI_UNDEF;
2510 return METHOD_ENTRY_VISI(me);
2511 }
2512 return METHOD_VISI_UNDEF;
2513}
2514
2515/*
2516 * call-seq:
2517 * mod.method_defined?(symbol, inherit=true) -> true or false
2518 * mod.method_defined?(string, inherit=true) -> true or false
2519 *
2520 * Returns +true+ if the named method is defined by
2521 * _mod_. If _inherit_ is set, the lookup will also search _mod_'s
2522 * ancestors. Public and protected methods are matched.
2523 * String arguments are converted to symbols.
2524 *
2525 * module A
2526 * def method1() end
2527 * def protected_method1() end
2528 * protected :protected_method1
2529 * end
2530 * class B
2531 * def method2() end
2532 * def private_method2() end
2533 * private :private_method2
2534 * end
2535 * class C < B
2536 * include A
2537 * def method3() end
2538 * end
2539 *
2540 * A.method_defined? :method1 #=> true
2541 * C.method_defined? "method1" #=> true
2542 * C.method_defined? "method2" #=> true
2543 * C.method_defined? "method2", true #=> true
2544 * C.method_defined? "method2", false #=> false
2545 * C.method_defined? "method3" #=> true
2546 * C.method_defined? "protected_method1" #=> true
2547 * C.method_defined? "method4" #=> false
2548 * C.method_defined? "private_method2" #=> false
2549 */
2550
2551static VALUE
2552rb_mod_method_defined(int argc, VALUE *argv, VALUE mod)
2553{
2554 rb_method_visibility_t visi = check_definition_visibility(mod, argc, argv);
2555 return RBOOL(visi == METHOD_VISI_PUBLIC || visi == METHOD_VISI_PROTECTED);
2556}
2557
2558static VALUE
2559check_definition(VALUE mod, int argc, VALUE *argv, rb_method_visibility_t visi)
2560{
2561 return RBOOL(check_definition_visibility(mod, argc, argv) == visi);
2562}
2563
2564/*
2565 * call-seq:
2566 * mod.public_method_defined?(symbol, inherit=true) -> true or false
2567 * mod.public_method_defined?(string, inherit=true) -> true or false
2568 *
2569 * Returns +true+ if the named public method is defined by
2570 * _mod_. If _inherit_ is set, the lookup will also search _mod_'s
2571 * ancestors.
2572 * String arguments are converted to symbols.
2573 *
2574 * module A
2575 * def method1() end
2576 * end
2577 * class B
2578 * protected
2579 * def method2() end
2580 * end
2581 * class C < B
2582 * include A
2583 * def method3() end
2584 * end
2585 *
2586 * A.method_defined? :method1 #=> true
2587 * C.public_method_defined? "method1" #=> true
2588 * C.public_method_defined? "method1", true #=> true
2589 * C.public_method_defined? "method1", false #=> true
2590 * C.public_method_defined? "method2" #=> false
2591 * C.method_defined? "method2" #=> true
2592 */
2593
2594static VALUE
2595rb_mod_public_method_defined(int argc, VALUE *argv, VALUE mod)
2596{
2597 return check_definition(mod, argc, argv, METHOD_VISI_PUBLIC);
2598}
2599
2600/*
2601 * call-seq:
2602 * mod.private_method_defined?(symbol, inherit=true) -> true or false
2603 * mod.private_method_defined?(string, inherit=true) -> true or false
2604 *
2605 * Returns +true+ if the named private method is defined by
2606 * _mod_. If _inherit_ is set, the lookup will also search _mod_'s
2607 * ancestors.
2608 * String arguments are converted to symbols.
2609 *
2610 * module A
2611 * def method1() end
2612 * end
2613 * class B
2614 * private
2615 * def method2() end
2616 * end
2617 * class C < B
2618 * include A
2619 * def method3() end
2620 * end
2621 *
2622 * A.method_defined? :method1 #=> true
2623 * C.private_method_defined? "method1" #=> false
2624 * C.private_method_defined? "method2" #=> true
2625 * C.private_method_defined? "method2", true #=> true
2626 * C.private_method_defined? "method2", false #=> false
2627 * C.method_defined? "method2" #=> false
2628 */
2629
2630static VALUE
2631rb_mod_private_method_defined(int argc, VALUE *argv, VALUE mod)
2632{
2633 return check_definition(mod, argc, argv, METHOD_VISI_PRIVATE);
2634}
2635
2636/*
2637 * call-seq:
2638 * mod.protected_method_defined?(symbol, inherit=true) -> true or false
2639 * mod.protected_method_defined?(string, inherit=true) -> true or false
2640 *
2641 * Returns +true+ if the named protected method is defined
2642 * _mod_. If _inherit_ is set, the lookup will also search _mod_'s
2643 * ancestors.
2644 * String arguments are converted to symbols.
2645 *
2646 * module A
2647 * def method1() end
2648 * end
2649 * class B
2650 * protected
2651 * def method2() end
2652 * end
2653 * class C < B
2654 * include A
2655 * def method3() end
2656 * end
2657 *
2658 * A.method_defined? :method1 #=> true
2659 * C.protected_method_defined? "method1" #=> false
2660 * C.protected_method_defined? "method2" #=> true
2661 * C.protected_method_defined? "method2", true #=> true
2662 * C.protected_method_defined? "method2", false #=> false
2663 * C.method_defined? "method2" #=> true
2664 */
2665
2666static VALUE
2667rb_mod_protected_method_defined(int argc, VALUE *argv, VALUE mod)
2668{
2669 return check_definition(mod, argc, argv, METHOD_VISI_PROTECTED);
2670}
2671
2672int
2673rb_method_entry_eq(const rb_method_entry_t *m1, const rb_method_entry_t *m2)
2674{
2675 return rb_method_definition_eq(m1->def, m2->def);
2676}
2677
2678static const rb_method_definition_t *
2679original_method_definition(const rb_method_definition_t *def)
2680{
2681 again:
2682 if (def) {
2683 switch (def->type) {
2684 case VM_METHOD_TYPE_REFINED:
2685 if (def->body.refined.orig_me) {
2686 def = def->body.refined.orig_me->def;
2687 goto again;
2688 }
2689 break;
2690 case VM_METHOD_TYPE_ALIAS:
2691 def = def->body.alias.original_me->def;
2692 goto again;
2693 default:
2694 break;
2695 }
2696 }
2697 return def;
2698}
2699
2700int
2701rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_definition_t *d2)
2702{
2703 d1 = original_method_definition(d1);
2704 d2 = original_method_definition(d2);
2705
2706 if (d1 == d2) return 1;
2707 if (!d1 || !d2) return 0;
2708 if (d1->type != d2->type) return 0;
2709
2710 switch (d1->type) {
2711 case VM_METHOD_TYPE_ISEQ:
2712 return d1->body.iseq.iseqptr == d2->body.iseq.iseqptr;
2713 case VM_METHOD_TYPE_CFUNC:
2714 return
2715 d1->body.cfunc.func == d2->body.cfunc.func &&
2716 d1->body.cfunc.argc == d2->body.cfunc.argc;
2717 case VM_METHOD_TYPE_ATTRSET:
2718 case VM_METHOD_TYPE_IVAR:
2719 return d1->body.attr.id == d2->body.attr.id;
2720 case VM_METHOD_TYPE_BMETHOD:
2721 return RTEST(rb_equal(d1->body.bmethod.proc, d2->body.bmethod.proc));
2722 case VM_METHOD_TYPE_MISSING:
2723 return d1->original_id == d2->original_id;
2724 case VM_METHOD_TYPE_ZSUPER:
2725 case VM_METHOD_TYPE_NOTIMPLEMENTED:
2726 case VM_METHOD_TYPE_UNDEF:
2727 return 1;
2728 case VM_METHOD_TYPE_OPTIMIZED:
2729 return (d1->body.optimized.type == d2->body.optimized.type) &&
2730 (d1->body.optimized.index == d2->body.optimized.index);
2731 case VM_METHOD_TYPE_REFINED:
2732 case VM_METHOD_TYPE_ALIAS:
2733 break;
2734 }
2735 rb_bug("rb_method_definition_eq: unsupported type: %d", d1->type);
2736}
2737
2738static st_index_t
2739rb_hash_method_definition(st_index_t hash, const rb_method_definition_t *def)
2740{
2741 hash = rb_hash_uint(hash, def->type);
2742 def = original_method_definition(def);
2743
2744 if (!def) return hash;
2745
2746 switch (def->type) {
2747 case VM_METHOD_TYPE_ISEQ:
2748 return rb_hash_uint(hash, (st_index_t)def->body.iseq.iseqptr->body);
2749 case VM_METHOD_TYPE_CFUNC:
2750 hash = rb_hash_uint(hash, (st_index_t)def->body.cfunc.func);
2751 return rb_hash_uint(hash, def->body.cfunc.argc);
2752 case VM_METHOD_TYPE_ATTRSET:
2753 case VM_METHOD_TYPE_IVAR:
2754 return rb_hash_uint(hash, def->body.attr.id);
2755 case VM_METHOD_TYPE_BMETHOD:
2756 return rb_hash_proc(hash, def->body.bmethod.proc);
2757 case VM_METHOD_TYPE_MISSING:
2758 return rb_hash_uint(hash, def->original_id);
2759 case VM_METHOD_TYPE_ZSUPER:
2760 case VM_METHOD_TYPE_NOTIMPLEMENTED:
2761 case VM_METHOD_TYPE_UNDEF:
2762 return hash;
2763 case VM_METHOD_TYPE_OPTIMIZED:
2764 hash = rb_hash_uint(hash, def->body.optimized.index);
2765 return rb_hash_uint(hash, def->body.optimized.type);
2766 case VM_METHOD_TYPE_REFINED:
2767 case VM_METHOD_TYPE_ALIAS:
2768 break; /* unreachable */
2769 }
2770 rb_bug("rb_hash_method_definition: unsupported method type (%d)", def->type);
2771}
2772
2773st_index_t
2774rb_hash_method_entry(st_index_t hash, const rb_method_entry_t *me)
2775{
2776 return rb_hash_method_definition(hash, me->def);
2777}
2778
2779void
2780rb_alias(VALUE klass, ID alias_name, ID original_name)
2781{
2782 const VALUE target_klass = klass;
2783 VALUE defined_class;
2784 const rb_method_entry_t *orig_me;
2785 rb_method_visibility_t visi = METHOD_VISI_UNDEF;
2786
2787 if (NIL_P(klass)) {
2788 rb_raise(rb_eTypeError, "no class to make alias");
2789 }
2790
2791 rb_class_modify_check(klass);
2792
2793 again:
2794 orig_me = search_method(klass, original_name, &defined_class);
2795
2796 if (orig_me && orig_me->def->type == VM_METHOD_TYPE_REFINED) {
2797 orig_me = rb_resolve_refined_method(Qnil, orig_me);
2798 }
2799
2800 if (UNDEFINED_METHOD_ENTRY_P(orig_me) ||
2801 UNDEFINED_REFINED_METHOD_P(orig_me->def)) {
2802 if ((!RB_TYPE_P(klass, T_MODULE)) ||
2803 (orig_me = search_method(rb_cObject, original_name, &defined_class),
2804 UNDEFINED_METHOD_ENTRY_P(orig_me))) {
2805 rb_print_undef(target_klass, original_name, METHOD_VISI_UNDEF);
2806 }
2807 }
2808
2809 switch (orig_me->def->type) {
2810 case VM_METHOD_TYPE_ZSUPER:
2811 klass = RCLASS_SUPER(klass);
2812 original_name = orig_me->def->original_id;
2813 visi = METHOD_ENTRY_VISI(orig_me);
2814 goto again;
2815 case VM_METHOD_TYPE_ALIAS:
2816 visi = METHOD_ENTRY_VISI(orig_me);
2817 orig_me = orig_me->def->body.alias.original_me;
2818 VM_ASSERT(orig_me->def->type != VM_METHOD_TYPE_ALIAS);
2819 break;
2820 default: break;
2821 }
2822
2823 if (visi == METHOD_VISI_UNDEF) visi = METHOD_ENTRY_VISI(orig_me);
2824
2825 if (orig_me->defined_class == 0) {
2826 rb_method_entry_make(target_klass, alias_name, target_klass, visi,
2827 VM_METHOD_TYPE_ALIAS, NULL, orig_me->called_id,
2828 (void *)rb_method_entry_clone(orig_me));
2829 method_added(target_klass, alias_name);
2830 }
2831 else {
2832 rb_method_entry_t *alias_me;
2833
2834 alias_me = method_entry_set(target_klass, alias_name, orig_me, visi, orig_me->owner);
2835 RB_OBJ_WRITE(alias_me, &alias_me->owner, target_klass);
2836
2837 if (RB_TYPE_P(target_klass, T_MODULE)) {
2838 // defined_class should not be set
2839 }
2840 else {
2841 RB_OBJ_WRITE(alias_me, &alias_me->defined_class, orig_me->defined_class);
2842 }
2843 }
2844}
2845
2846/*
2847 * call-seq:
2848 * alias_method(new_name, old_name) -> symbol
2849 *
2850 * Makes <i>new_name</i> a new copy of the method <i>old_name</i>. This can
2851 * be used to retain access to methods that are overridden.
2852 *
2853 * module Mod
2854 * alias_method :orig_exit, :exit #=> :orig_exit
2855 * def exit(code=0)
2856 * puts "Exiting with code #{code}"
2857 * orig_exit(code)
2858 * end
2859 * end
2860 * include Mod
2861 * exit(99)
2862 *
2863 * <em>produces:</em>
2864 *
2865 * Exiting with code 99
2866 */
2867
2868static VALUE
2869rb_mod_alias_method(VALUE mod, VALUE newname, VALUE oldname)
2870{
2871 ID oldid = rb_check_id(&oldname);
2872 if (!oldid) {
2873 rb_print_undef_str(mod, oldname);
2874 }
2875 VALUE id = rb_to_id(newname);
2876 rb_alias(mod, id, oldid);
2877 return ID2SYM(id);
2878}
2879
2880static void
2881check_and_export_method(VALUE self, VALUE name, rb_method_visibility_t visi)
2882{
2883 ID id = rb_check_id(&name);
2884 if (!id) {
2885 rb_print_undef_str(self, name);
2886 }
2887 rb_export_method(self, id, visi);
2888}
2889
2890static void
2891set_method_visibility(VALUE self, int argc, const VALUE *argv, rb_method_visibility_t visi)
2892{
2893 int i;
2894
2895 rb_check_frozen(self);
2896 if (argc == 0) {
2897 rb_warning("%"PRIsVALUE" with no argument is just ignored",
2898 QUOTE_ID(rb_frame_callee()));
2899 return;
2900 }
2901
2902
2903 VALUE v;
2904
2905 if (argc == 1 && (v = rb_check_array_type(argv[0])) != Qnil) {
2906 long j;
2907
2908 for (j = 0; j < RARRAY_LEN(v); j++) {
2909 check_and_export_method(self, RARRAY_AREF(v, j), visi);
2910 }
2911 }
2912 else {
2913 for (i = 0; i < argc; i++) {
2914 check_and_export_method(self, argv[i], visi);
2915 }
2916 }
2917}
2918
2919static VALUE
2920set_visibility(int argc, const VALUE *argv, VALUE module, rb_method_visibility_t visi)
2921{
2922 if (argc == 0) {
2923 scope_visibility_check();
2924 rb_scope_visibility_set(visi);
2925 return Qnil;
2926 }
2927
2928 set_method_visibility(module, argc, argv, visi);
2929 if (argc == 1) {
2930 return argv[0];
2931 }
2932 return rb_ary_new_from_values(argc, argv);
2933}
2934
2935/*
2936 * call-seq:
2937 * public -> nil
2938 * public(method_name) -> method_name
2939 * public(method_name, method_name, ...) -> array
2940 * public(array) -> array
2941 *
2942 * With no arguments, sets the default visibility for subsequently
2943 * defined methods to public. With arguments, sets the named methods to
2944 * have public visibility.
2945 * String arguments are converted to symbols.
2946 * An Array of Symbols and/or Strings is also accepted.
2947 * If a single argument is passed, it is returned.
2948 * If no argument is passed, nil is returned.
2949 * If multiple arguments are passed, the arguments are returned as an array.
2950 */
2951
2952static VALUE
2953rb_mod_public(int argc, VALUE *argv, VALUE module)
2954{
2955 return set_visibility(argc, argv, module, METHOD_VISI_PUBLIC);
2956}
2957
2958/*
2959 * call-seq:
2960 * protected -> nil
2961 * protected(method_name) -> method_name
2962 * protected(method_name, method_name, ...) -> array
2963 * protected(array) -> array
2964 *
2965 * Sets the visibility of a section or of a list of method names as protected.
2966 * Accepts no arguments, a splat of method names (symbols or strings) or an
2967 * array of method names. Returns the arguments that it received.
2968 *
2969 * == Important difference between protected in other languages
2970 *
2971 * Protected methods in Ruby are different from other languages such as Java,
2972 * where methods are marked as protected to give access to subclasses. In Ruby,
2973 * subclasses <b>already have access to all methods defined in the parent
2974 * class</b>, even private ones.
2975 *
2976 * Marking a method as protected allows <b>different objects of the same
2977 * class</b> to call it.
2978 *
2979 * One use case is for comparison methods, such as <code>==</code>, if we want
2980 * to expose a method for comparison between objects of the same class without
2981 * making the method public to objects of other classes.
2982 *
2983 * == Performance considerations
2984 *
2985 * Protected methods are slower than others because they can't use inline
2986 * cache.
2987 *
2988 * == Example
2989 *
2990 * class Account
2991 * # Mark balance as protected, so that we can compare between accounts
2992 * # without making it public.
2993 * attr_reader :balance
2994 * protected :balance
2995 *
2996 * def initialize(balance)
2997 * @balance = balance
2998 * end
2999 *
3000 * def >(other)
3001 * # The invocation to `other.balance` is allowed because `other` is a
3002 * # different object of the same class (Account).
3003 * balance > other.balance
3004 * end
3005 * end
3006 *
3007 * account1 = Account.new(100)
3008 * account2 = Account.new(50)
3009 *
3010 * account1 > account2 # => true (works)
3011 * account1.balance # => NoMethodError (fails because balance is not public)
3012 *
3013 * To show a private method on RDoc, use <code>:doc:</code> instead of this.
3014 */
3015
3016static VALUE
3017rb_mod_protected(int argc, VALUE *argv, VALUE module)
3018{
3019 return set_visibility(argc, argv, module, METHOD_VISI_PROTECTED);
3020}
3021
3022/*
3023 * call-seq:
3024 * private -> nil
3025 * private(method_name) -> method_name
3026 * private(method_name, method_name, ...) -> array
3027 * private(array) -> array
3028 *
3029 * With no arguments, sets the default visibility for subsequently
3030 * defined methods to private. With arguments, sets the named methods
3031 * to have private visibility.
3032 * String arguments are converted to symbols.
3033 * An Array of Symbols and/or Strings is also accepted.
3034 * If a single argument is passed, it is returned.
3035 * If no argument is passed, nil is returned.
3036 * If multiple arguments are passed, the arguments are returned as an array.
3037 *
3038 * module Mod
3039 * def a() end
3040 * def b() end
3041 * private
3042 * def c() end
3043 * private :a
3044 * end
3045 * Mod.private_instance_methods #=> [:a, :c]
3046 *
3047 * Note that to show a private method on RDoc, use <code>:doc:</code>.
3048 */
3049
3050static VALUE
3051rb_mod_private(int argc, VALUE *argv, VALUE module)
3052{
3053 return set_visibility(argc, argv, module, METHOD_VISI_PRIVATE);
3054}
3055
3056/*
3057 * call-seq:
3058 * ruby2_keywords(method_name, ...) -> nil
3059 *
3060 * For the given method names, marks the method as passing keywords through
3061 * a normal argument splat. This should only be called on methods that
3062 * accept an argument splat (<tt>*args</tt>) but not explicit keywords or
3063 * a keyword splat. It marks the method such that if the method is called
3064 * with keyword arguments, the final hash argument is marked with a special
3065 * flag such that if it is the final element of a normal argument splat to
3066 * another method call, and that method call does not include explicit
3067 * keywords or a keyword splat, the final element is interpreted as keywords.
3068 * In other words, keywords will be passed through the method to other
3069 * methods.
3070 *
3071 * This should only be used for methods that delegate keywords to another
3072 * method, and only for backwards compatibility with Ruby versions before 3.0.
3073 * See https://www.ruby-lang.org/en/news/2019/12/12/separation-of-positional-and-keyword-arguments-in-ruby-3-0/
3074 * for details on why +ruby2_keywords+ exists and when and how to use it.
3075 *
3076 * This method will probably be removed at some point, as it exists only
3077 * for backwards compatibility. As it does not exist in Ruby versions before
3078 * 2.7, check that the module responds to this method before calling it:
3079 *
3080 * module Mod
3081 * def foo(meth, *args, &block)
3082 * send(:"do_#{meth}", *args, &block)
3083 * end
3084 * ruby2_keywords(:foo) if respond_to?(:ruby2_keywords, true)
3085 * end
3086 *
3087 * However, be aware that if the +ruby2_keywords+ method is removed, the
3088 * behavior of the +foo+ method using the above approach will change so that
3089 * the method does not pass through keywords.
3090 */
3091
3092static VALUE
3093rb_mod_ruby2_keywords(int argc, VALUE *argv, VALUE module)
3094{
3095 int i;
3096 VALUE origin_class = RCLASS_ORIGIN(module);
3097
3099 rb_check_frozen(module);
3100
3101 for (i = 0; i < argc; i++) {
3102 VALUE v = argv[i];
3103 ID name = rb_check_id(&v);
3105 VALUE defined_class;
3106
3107 if (!name) {
3108 rb_print_undef_str(module, v);
3109 }
3110
3111 me = search_method(origin_class, name, &defined_class);
3112 if (!me && RB_TYPE_P(module, T_MODULE)) {
3113 me = search_method(rb_cObject, name, &defined_class);
3114 }
3115
3116 if (UNDEFINED_METHOD_ENTRY_P(me) ||
3117 UNDEFINED_REFINED_METHOD_P(me->def)) {
3118 rb_print_undef(module, name, METHOD_VISI_UNDEF);
3119 }
3120
3121 if (module == defined_class || origin_class == defined_class) {
3122 switch (me->def->type) {
3123 case VM_METHOD_TYPE_ISEQ:
3124 if (ISEQ_BODY(me->def->body.iseq.iseqptr)->param.flags.has_rest &&
3125 !ISEQ_BODY(me->def->body.iseq.iseqptr)->param.flags.has_post &&
3126 !ISEQ_BODY(me->def->body.iseq.iseqptr)->param.flags.has_kw &&
3127 !ISEQ_BODY(me->def->body.iseq.iseqptr)->param.flags.has_kwrest) {
3128 ISEQ_BODY(me->def->body.iseq.iseqptr)->param.flags.ruby2_keywords = 1;
3129 rb_clear_method_cache(module, name);
3130 }
3131 else {
3132 rb_warn("Skipping set of ruby2_keywords flag for %"PRIsVALUE" (method accepts keywords or post arguments or method does not accept argument splat)", QUOTE_ID(name));
3133 }
3134 break;
3135 case VM_METHOD_TYPE_BMETHOD: {
3136 VALUE procval = me->def->body.bmethod.proc;
3137 if (vm_block_handler_type(procval) == block_handler_type_proc) {
3138 procval = vm_proc_to_block_handler(VM_BH_TO_PROC(procval));
3139 }
3140
3141 if (vm_block_handler_type(procval) == block_handler_type_iseq) {
3142 const struct rb_captured_block *captured = VM_BH_TO_ISEQ_BLOCK(procval);
3143 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
3144 if (ISEQ_BODY(iseq)->param.flags.has_rest &&
3145 !ISEQ_BODY(iseq)->param.flags.has_post &&
3146 !ISEQ_BODY(iseq)->param.flags.has_kw &&
3147 !ISEQ_BODY(iseq)->param.flags.has_kwrest) {
3148 ISEQ_BODY(iseq)->param.flags.ruby2_keywords = 1;
3149 rb_clear_method_cache(module, name);
3150 }
3151 else {
3152 rb_warn("Skipping set of ruby2_keywords flag for %"PRIsVALUE" (method accepts keywords or post arguments or method does not accept argument splat)", QUOTE_ID(name));
3153 }
3154 break;
3155 }
3156 }
3157 /* fallthrough */
3158 default:
3159 rb_warn("Skipping set of ruby2_keywords flag for %"PRIsVALUE" (method not defined in Ruby)", QUOTE_ID(name));
3160 break;
3161 }
3162 }
3163 else {
3164 rb_warn("Skipping set of ruby2_keywords flag for %"PRIsVALUE" (can only set in method defining module)", QUOTE_ID(name));
3165 }
3166 }
3167 return Qnil;
3168}
3169
3170/*
3171 * call-seq:
3172 * mod.public_class_method(symbol, ...) -> mod
3173 * mod.public_class_method(string, ...) -> mod
3174 * mod.public_class_method(array) -> mod
3175 *
3176 * Makes a list of existing class methods public.
3177 *
3178 * String arguments are converted to symbols.
3179 * An Array of Symbols and/or Strings is also accepted.
3180 */
3181
3182static VALUE
3183rb_mod_public_method(int argc, VALUE *argv, VALUE obj)
3184{
3185 set_method_visibility(rb_singleton_class(obj), argc, argv, METHOD_VISI_PUBLIC);
3186 return obj;
3187}
3188
3189/*
3190 * call-seq:
3191 * mod.private_class_method(symbol, ...) -> mod
3192 * mod.private_class_method(string, ...) -> mod
3193 * mod.private_class_method(array) -> mod
3194 *
3195 * Makes existing class methods private. Often used to hide the default
3196 * constructor <code>new</code>.
3197 *
3198 * String arguments are converted to symbols.
3199 * An Array of Symbols and/or Strings is also accepted.
3200 *
3201 * class SimpleSingleton # Not thread safe
3202 * private_class_method :new
3203 * def SimpleSingleton.create(*args, &block)
3204 * @me = new(*args, &block) if ! @me
3205 * @me
3206 * end
3207 * end
3208 */
3209
3210static VALUE
3211rb_mod_private_method(int argc, VALUE *argv, VALUE obj)
3212{
3213 set_method_visibility(rb_singleton_class(obj), argc, argv, METHOD_VISI_PRIVATE);
3214 return obj;
3215}
3216
3217/*
3218 * call-seq:
3219 * public
3220 * public(symbol, ...)
3221 * public(string, ...)
3222 * public(array)
3223 *
3224 * With no arguments, sets the default visibility for subsequently
3225 * defined methods to public. With arguments, sets the named methods to
3226 * have public visibility.
3227 *
3228 * String arguments are converted to symbols.
3229 * An Array of Symbols and/or Strings is also accepted.
3230 */
3231
3232static VALUE
3233top_public(int argc, VALUE *argv, VALUE _)
3234{
3235 return rb_mod_public(argc, argv, rb_top_main_class("public"));
3236}
3237
3238/*
3239 * call-seq:
3240 * private
3241 * private(symbol, ...)
3242 * private(string, ...)
3243 * private(array)
3244 *
3245 * With no arguments, sets the default visibility for subsequently
3246 * defined methods to private. With arguments, sets the named methods to
3247 * have private visibility.
3248 *
3249 * String arguments are converted to symbols.
3250 * An Array of Symbols and/or Strings is also accepted.
3251 */
3252static VALUE
3253top_private(int argc, VALUE *argv, VALUE _)
3254{
3255 return rb_mod_private(argc, argv, rb_top_main_class("private"));
3256}
3257
3258/*
3259 * call-seq:
3260 * ruby2_keywords(method_name, ...) -> self
3261 *
3262 * For the given method names, marks the method as passing keywords through
3263 * a normal argument splat. See Module#ruby2_keywords in detail.
3264 */
3265static VALUE
3266top_ruby2_keywords(int argc, VALUE *argv, VALUE module)
3267{
3268 return rb_mod_ruby2_keywords(argc, argv, rb_top_main_class("ruby2_keywords"));
3269}
3270
3271/*
3272 * call-seq:
3273 * module_function -> nil
3274 * module_function(method_name) -> method_name
3275 * module_function(method_name, method_name, ...) -> array
3276 *
3277 * Creates module functions for the named methods. These functions may
3278 * be called with the module as a receiver, and also become available
3279 * as instance methods to classes that mix in the module. Module
3280 * functions are copies of the original, and so may be changed
3281 * independently. The instance-method versions are made private. If
3282 * used with no arguments, subsequently defined methods become module
3283 * functions.
3284 * String arguments are converted to symbols.
3285 * If a single argument is passed, it is returned.
3286 * If no argument is passed, nil is returned.
3287 * If multiple arguments are passed, the arguments are returned as an array.
3288 *
3289 * module Mod
3290 * def one
3291 * "This is one"
3292 * end
3293 * module_function :one
3294 * end
3295 * class Cls
3296 * include Mod
3297 * def call_one
3298 * one
3299 * end
3300 * end
3301 * Mod.one #=> "This is one"
3302 * c = Cls.new
3303 * c.call_one #=> "This is one"
3304 * module Mod
3305 * def one
3306 * "This is the new one"
3307 * end
3308 * end
3309 * Mod.one #=> "This is one"
3310 * c.call_one #=> "This is the new one"
3311 */
3312
3313static VALUE
3314rb_mod_modfunc(int argc, VALUE *argv, VALUE module)
3315{
3316 int i;
3317 ID id;
3318 const rb_method_entry_t *me;
3319
3320 if (!RB_TYPE_P(module, T_MODULE)) {
3321 rb_raise(rb_eTypeError, "module_function must be called for modules");
3322 }
3323
3324 if (argc == 0) {
3325 rb_scope_module_func_set();
3326 return Qnil;
3327 }
3328
3329 set_method_visibility(module, argc, argv, METHOD_VISI_PRIVATE);
3330
3331 for (i = 0; i < argc; i++) {
3332 VALUE m = module;
3333
3334 id = rb_to_id(argv[i]);
3335 for (;;) {
3336 me = search_method(m, id, 0);
3337 if (me == 0) {
3338 me = search_method(rb_cObject, id, 0);
3339 }
3340 if (UNDEFINED_METHOD_ENTRY_P(me)) {
3341 rb_print_undef(module, id, METHOD_VISI_UNDEF);
3342 }
3343 if (me->def->type != VM_METHOD_TYPE_ZSUPER) {
3344 break; /* normal case: need not to follow 'super' link */
3345 }
3346 m = RCLASS_SUPER(m);
3347 if (!m)
3348 break;
3349 }
3350 rb_method_entry_set(rb_singleton_class(module), id, me, METHOD_VISI_PUBLIC);
3351 }
3352 if (argc == 1) {
3353 return argv[0];
3354 }
3355 return rb_ary_new_from_values(argc, argv);
3356}
3357
3358#ifdef __GNUC__
3359#pragma push_macro("rb_method_basic_definition_p")
3360#undef rb_method_basic_definition_p
3361#endif
3362int
3363rb_method_basic_definition_p(VALUE klass, ID id)
3364{
3365 const rb_callable_method_entry_t *cme;
3366 if (!klass) return TRUE; /* hidden object cannot be overridden */
3367 cme = rb_callable_method_entry(klass, id);
3368 return (cme && METHOD_ENTRY_BASIC(cme)) ? TRUE : FALSE;
3369}
3370#ifdef __GNUC__
3371#pragma pop_macro("rb_method_basic_definition_p")
3372#endif
3373
3374static VALUE
3375call_method_entry(rb_execution_context_t *ec, VALUE defined_class, VALUE obj, ID id,
3376 const rb_callable_method_entry_t *cme, int argc, const VALUE *argv, int kw_splat)
3377{
3378 VALUE passed_block_handler = vm_passed_block_handler(ec);
3379 VALUE result = rb_vm_call_kw(ec, obj, id, argc, argv, cme, kw_splat);
3380 vm_passed_block_handler_set(ec, passed_block_handler);
3381 return result;
3382}
3383
3384static VALUE
3385basic_obj_respond_to_missing(rb_execution_context_t *ec, VALUE klass, VALUE obj,
3386 VALUE mid, VALUE priv)
3387{
3388 VALUE defined_class, args[2];
3389 const ID rtmid = idRespond_to_missing;
3390 const rb_callable_method_entry_t *const cme = callable_method_entry(klass, rtmid, &defined_class);
3391
3392 if (!cme || METHOD_ENTRY_BASIC(cme)) return Qundef;
3393 args[0] = mid;
3394 args[1] = priv;
3395 return call_method_entry(ec, defined_class, obj, rtmid, cme, 2, args, RB_NO_KEYWORDS);
3396}
3397
3398static inline int
3399basic_obj_respond_to(rb_execution_context_t *ec, VALUE obj, ID id, int pub)
3400{
3401 VALUE klass = CLASS_OF(obj);
3402 VALUE ret;
3403
3404 switch (method_boundp(klass, id, pub|BOUND_RESPONDS)) {
3405 case 2:
3406 return FALSE;
3407 case 0:
3408 ret = basic_obj_respond_to_missing(ec, klass, obj, ID2SYM(id),
3409 RBOOL(!pub));
3410 return RTEST(ret) && !UNDEF_P(ret);
3411 default:
3412 return TRUE;
3413 }
3414}
3415
3416static int
3417vm_respond_to(rb_execution_context_t *ec, VALUE klass, VALUE obj, ID id, int priv)
3418{
3419 VALUE defined_class;
3420 const ID resid = idRespond_to;
3421 const rb_callable_method_entry_t *const cme = callable_method_entry(klass, resid, &defined_class);
3422
3423 if (!cme) return -1;
3424 if (METHOD_ENTRY_BASIC(cme)) {
3425 return -1;
3426 }
3427 else {
3428 int argc = 1;
3429 VALUE args[2];
3430 VALUE result;
3431
3432 args[0] = ID2SYM(id);
3433 args[1] = Qtrue;
3434 if (priv) {
3435 argc = rb_method_entry_arity((const rb_method_entry_t *)cme);
3436 if (argc > 2) {
3437 rb_raise(rb_eArgError,
3438 "respond_to? must accept 1 or 2 arguments (requires %d)",
3439 argc);
3440 }
3441 if (argc != 1) {
3442 argc = 2;
3443 }
3444 else if (!NIL_P(ruby_verbose)) {
3445 VALUE location = rb_method_entry_location((const rb_method_entry_t *)cme);
3447 "%"PRIsVALUE"%c""respond_to?(:%"PRIsVALUE") uses"
3448 " the deprecated method signature, which takes one parameter",
3449 (RCLASS_SINGLETON_P(klass) ? obj : klass),
3450 (RCLASS_SINGLETON_P(klass) ? '.' : '#'),
3451 QUOTE_ID(id));
3452 if (!NIL_P(location)) {
3453 VALUE path = RARRAY_AREF(location, 0);
3454 VALUE line = RARRAY_AREF(location, 1);
3455 if (!NIL_P(path)) {
3457 RSTRING_PTR(path), NUM2INT(line),
3458 "respond_to? is defined here");
3459 }
3460 }
3461 }
3462 }
3463 result = call_method_entry(ec, defined_class, obj, resid, cme, argc, args, RB_NO_KEYWORDS);
3464 return RTEST(result);
3465 }
3466}
3467
3468int
3469rb_obj_respond_to(VALUE obj, ID id, int priv)
3470{
3471 rb_execution_context_t *ec = GET_EC();
3472 return rb_ec_obj_respond_to(ec, obj, id, priv);
3473}
3474
3475int
3476rb_ec_obj_respond_to(rb_execution_context_t *ec, VALUE obj, ID id, int priv)
3477{
3478 VALUE klass = CLASS_OF(obj);
3479 int ret = vm_respond_to(ec, klass, obj, id, priv);
3480 if (ret == -1) ret = basic_obj_respond_to(ec, obj, id, !priv);
3481 return ret;
3482}
3483
3484int
3486{
3487 return rb_obj_respond_to(obj, id, FALSE);
3488}
3489
3490
3491/*
3492 * call-seq:
3493 * obj.respond_to?(symbol, include_all=false) -> true or false
3494 * obj.respond_to?(string, include_all=false) -> true or false
3495 *
3496 * Returns +true+ if _obj_ responds to the given method. Private and
3497 * protected methods are included in the search only if the optional
3498 * second parameter evaluates to +true+.
3499 *
3500 * If the method is not implemented,
3501 * as Process.fork on Windows, File.lchmod on GNU/Linux, etc.,
3502 * false is returned.
3503 *
3504 * If the method is not defined, <code>respond_to_missing?</code>
3505 * method is called and the result is returned.
3506 *
3507 * When the method name parameter is given as a string, the string is
3508 * converted to a symbol.
3509 */
3510
3511static VALUE
3512obj_respond_to(int argc, VALUE *argv, VALUE obj)
3513{
3514 VALUE mid, priv;
3515 ID id;
3516 rb_execution_context_t *ec = GET_EC();
3517
3518 rb_scan_args(argc, argv, "11", &mid, &priv);
3519 if (!(id = rb_check_id(&mid))) {
3520 VALUE ret = basic_obj_respond_to_missing(ec, CLASS_OF(obj), obj,
3521 rb_to_symbol(mid), priv);
3522 if (UNDEF_P(ret)) ret = Qfalse;
3523 return ret;
3524 }
3525 return RBOOL(basic_obj_respond_to(ec, obj, id, !RTEST(priv)));
3526}
3527
3528/*
3529 * call-seq:
3530 * obj.respond_to_missing?(symbol, include_all) -> true or false
3531 * obj.respond_to_missing?(string, include_all) -> true or false
3532 *
3533 * DO NOT USE THIS DIRECTLY.
3534 *
3535 * Hook method to return whether the _obj_ can respond to _id_ method
3536 * or not.
3537 *
3538 * When the method name parameter is given as a string, the string is
3539 * converted to a symbol.
3540 *
3541 * See #respond_to?, and the example of BasicObject.
3542 */
3543static VALUE
3544obj_respond_to_missing(VALUE obj, VALUE mid, VALUE priv)
3545{
3546 return Qfalse;
3547}
3548
3549void
3550Init_eval_method(void)
3551{
3552 rb_define_method(rb_mKernel, "respond_to?", obj_respond_to, -1);
3553 rb_define_method(rb_mKernel, "respond_to_missing?", obj_respond_to_missing, 2);
3554
3555 rb_define_method(rb_cModule, "remove_method", rb_mod_remove_method, -1);
3556 rb_define_method(rb_cModule, "undef_method", rb_mod_undef_method, -1);
3557 rb_define_method(rb_cModule, "alias_method", rb_mod_alias_method, 2);
3558 rb_define_private_method(rb_cModule, "public", rb_mod_public, -1);
3559 rb_define_private_method(rb_cModule, "protected", rb_mod_protected, -1);
3560 rb_define_private_method(rb_cModule, "private", rb_mod_private, -1);
3561 rb_define_private_method(rb_cModule, "module_function", rb_mod_modfunc, -1);
3562 rb_define_private_method(rb_cModule, "ruby2_keywords", rb_mod_ruby2_keywords, -1);
3563
3564 rb_define_method(rb_cModule, "method_defined?", rb_mod_method_defined, -1);
3565 rb_define_method(rb_cModule, "public_method_defined?", rb_mod_public_method_defined, -1);
3566 rb_define_method(rb_cModule, "private_method_defined?", rb_mod_private_method_defined, -1);
3567 rb_define_method(rb_cModule, "protected_method_defined?", rb_mod_protected_method_defined, -1);
3568 rb_define_method(rb_cModule, "public_class_method", rb_mod_public_method, -1);
3569 rb_define_method(rb_cModule, "private_class_method", rb_mod_private_method, -1);
3570
3572 "public", top_public, -1);
3574 "private", top_private, -1);
3576 "ruby2_keywords", top_ruby2_keywords, -1);
3577
3578 {
3579#define REPLICATE_METHOD(klass, id) do { \
3580 const rb_method_entry_t *me = rb_method_entry((klass), (id)); \
3581 rb_method_entry_set((klass), (id), me, METHOD_ENTRY_VISI(me)); \
3582 } while (0)
3583
3584 REPLICATE_METHOD(rb_eException, idMethodMissing);
3585 REPLICATE_METHOD(rb_eException, idRespond_to);
3586 REPLICATE_METHOD(rb_eException, idRespond_to_missing);
3587 }
3588}
#define RUBY_ASSERT_ALWAYS(expr,...)
A variant of RUBY_ASSERT that does not interface with RUBY_DEBUG.
Definition assert.h:199
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
Definition assert.h:219
std::atomic< unsigned > rb_atomic_t
Type that is eligible for atomic operations.
Definition atomic.h:69
#define RUBY_ATOMIC_FETCH_ADD(var, val)
Atomically replaces the value pointed by var with the result of addition of val to the old value of v...
Definition atomic.h:118
#define RUBY_ATOMIC_FETCH_SUB(var, val)
Atomically replaces the value pointed by var with the result of subtraction of val to the old value o...
Definition atomic.h:129
#define rb_define_method(klass, mid, func, arity)
Defines klass#mid.
#define rb_define_private_method(klass, mid, func, arity)
Defines klass#mid and makes it private.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
Definition class.c:2728
void rb_class_modify_check(VALUE klass)
Asserts that klass is not a frozen class.
Definition eval.c:429
int rb_scan_args(int argc, const VALUE *argv, const char *fmt,...)
Retrieves argument from argc and argv to given VALUE references according to the format string.
Definition class.c:3061
#define xfree
Old name of ruby_xfree.
Definition xmalloc.h:58
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition long.h:48
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
Definition assume.h:29
#define ZALLOC
Old name of RB_ZALLOC.
Definition memory.h:402
#define CLASS_OF
Old name of rb_class_of.
Definition globals.h:205
#define T_MODULE
Old name of RUBY_T_MODULE.
Definition value_type.h:70
#define T_ICLASS
Old name of RUBY_T_ICLASS.
Definition value_type.h:66
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
Definition fl_type.h:128
#define rb_ary_new3
Old name of rb_ary_new_from_args.
Definition array.h:658
#define Qtrue
Old name of RUBY_Qtrue.
#define NUM2INT
Old name of RB_NUM2INT.
Definition int.h:44
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define NIL_P
Old name of RB_NIL_P.
#define T_CLASS
Old name of RUBY_T_CLASS.
Definition value_type.h:58
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
Definition value_type.h:85
#define FL_TEST
Old name of RB_FL_TEST.
Definition fl_type.h:127
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
Definition fl_type.h:126
void rb_notimplement(void)
Definition error.c:3898
void rb_category_warn(rb_warning_category_t category, const char *fmt,...)
Identical to rb_category_warning(), except it reports unless $VERBOSE is nil.
Definition error.c:477
#define ruby_verbose
This variable controls whether the interpreter is in debug mode.
Definition error.h:476
VALUE rb_eTypeError
TypeError exception.
Definition error.c:1427
void rb_category_compile_warn(rb_warning_category_t category, const char *file, int line, const char *fmt,...)
Identical to rb_compile_warn(), except it also accepts category.
Definition error.c:440
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
Definition error.c:467
VALUE rb_eException
Mother of all exceptions.
Definition error.c:1419
void rb_warning(const char *fmt,...)
Issues a warning.
Definition error.c:498
@ RB_WARN_CATEGORY_DEPRECATED
Warning is for deprecated features.
Definition error.h:48
VALUE rb_mKernel
Kernel module.
Definition object.c:60
VALUE rb_cObject
Object class.
Definition object.c:61
VALUE rb_cModule
Module class.
Definition object.c:62
VALUE rb_equal(VALUE lhs, VALUE rhs)
This function is an optimised version of calling #==.
Definition object.c:141
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
Definition gc.h:468
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition gc.h:456
VALUE rb_call_super_kw(int argc, const VALUE *argv, int kw_splat)
Identical to rb_call_super(), except you can specify how to handle the last element of the given arra...
Definition vm_eval.c:354
VALUE rb_ary_new_from_values(long n, const VALUE *elts)
Identical to rb_ary_new_from_args(), except how objects are passed.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_freeze(VALUE obj)
Freeze an array, preventing further modifications.
void rb_undef(VALUE mod, ID mid)
Inserts a method entry that hides previous method definition of the given name.
Definition vm_method.c:2397
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
Definition error.h:35
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
Definition error.h:284
#define rb_hash_uint(h, i)
Just another name of st_hash_uint.
Definition string.h:943
st_index_t rb_hash_start(st_index_t i)
Starts a series of hashing.
Definition random.c:1785
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
Definition variable.c:136
int rb_respond_to(VALUE obj, ID mid)
Queries if the object responds to the method.
Definition vm_method.c:3485
VALUE(* rb_alloc_func_t)(VALUE klass)
This is the type of functions that ruby calls when trying to allocate an object.
Definition vm.h:219
void rb_undef_alloc_func(VALUE klass)
Deletes the allocator function of a class.
Definition vm_method.c:1742
void rb_alias(VALUE klass, ID dst, ID src)
Resembles alias.
Definition vm_method.c:2780
void rb_attr(VALUE klass, ID name, int need_reader, int need_writer, int honour_visibility)
This function resembles now-deprecated Module#attr.
Definition vm_method.c:2360
void rb_remove_method(VALUE klass, const char *name)
Removes a method.
Definition vm_method.c:2209
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
Definition vm_method.c:1751
void rb_clear_constant_cache_for_id(ID id)
Clears the inline constant caches associated with a particular ID.
Definition vm_method.c:329
void rb_remove_method_id(VALUE klass, ID mid)
Identical to rb_remove_method(), except it accepts the method name as ID.
Definition vm_method.c:2203
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
VALUE rb_f_notimplement(int argc, const VALUE *argv, VALUE obj, VALUE marker)
Raises rb_eNotImpError.
Definition vm_method.c:859
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
Definition vm_method.c:2321
int rb_obj_respond_to(VALUE obj, ID mid, int private_p)
Identical to rb_respond_to(), except it additionally takes the visibility parameter.
Definition vm_method.c:3469
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
Definition symbol.c:1164
VALUE rb_to_symbol(VALUE name)
Identical to rb_intern_str(), except it generates a dynamic symbol if necessary.
Definition string.c:12708
ID rb_to_id(VALUE str)
Identical to rb_intern_str(), except it tries to convert the parameter object to an instance of rb_cS...
Definition string.c:12698
int capa
Designed capacity of the buffer.
Definition io.h:11
VALUE type(ANYARGS)
ANYARGS-ed function type.
#define RARRAY_LEN
Just another name of rb_array_len.
Definition rarray.h:51
#define RARRAY_AREF(a, i)
Definition rarray.h:403
#define RBASIC(obj)
Convenient casting macro.
Definition rbasic.h:40
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
Definition rclass.h:44
#define RUBY_TYPED_FREE_IMMEDIATELY
Macros to see if each corresponding flag is defined.
Definition rtypeddata.h:122
#define TypedData_Make_Struct(klass, type, data_type, sval)
Identical to TypedData_Wrap_Struct, except it allocates a new data region internally instead of takin...
Definition rtypeddata.h:578
#define RB_PASS_CALLED_KEYWORDS
Pass keywords if current method is called with keywords, useful for argument delegation.
Definition scan_args.h:78
#define RB_NO_KEYWORDS
Do not pass keywords.
Definition scan_args.h:69
#define RTEST
This is an old name of RB_TEST.
#define _(args)
This was a transition path from K&R to ANSI.
Definition stdarg.h:35
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
Definition stdarg.h:64
Definition vm_method.c:388
Definition method.h:63
CREF (Class REFerence)
Definition method.h:45
This is the struct that holds necessary info for a struct.
Definition rtypeddata.h:229
struct rb_data_type_struct::@62 function
Function pointers.
size_t(* dsize)(const void *)
This function is to query the size of the underlying memory regions.
Definition rtypeddata.h:269
RUBY_DATA_FUNC dfree
This function is called when the object is no longer used.
Definition rtypeddata.h:259
const char * wrap_struct_name
Name of structs of this kind.
Definition rtypeddata.h:236
VALUE flags
Type-specific behavioural characteristics.
Definition rtypeddata.h:343
Definition method.h:55
rb_cref_t * cref
class reference, should be marked
Definition method.h:144
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
Definition method.h:143
Definition st.h:79
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40
static void Check_Type(VALUE v, enum ruby_value_type t)
Identical to RB_TYPE_P(), except it raises exceptions on predication failure.
Definition value_type.h:433
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
Definition value_type.h:376