Ruby 4.1.0dev (2026-05-15 revision a8bcae043f931d9b79f1cb1fe2c021985d07b984)
enumerator.c (a8bcae043f931d9b79f1cb1fe2c021985d07b984)
1/************************************************
2
3 enumerator.c - provides Enumerator class
4
5 $Author$
6
7 Copyright (C) 2001-2003 Akinori MUSHA
8
9 $Idaemons: /home/cvs/rb/enumerator/enumerator.c,v 1.1.1.1 2001/07/15 10:12:48 knu Exp $
10 $RoughId: enumerator.c,v 1.6 2003/07/27 11:03:24 nobu Exp $
11 $Id$
12
13************************************************/
14
15#include "ruby/internal/config.h"
16
17#ifdef HAVE_FLOAT_H
18#include <float.h>
19#endif
20
21#include <limits.h>
22#include "id.h"
23#include "internal.h"
24#include "internal/class.h"
25#include "internal/enumerator.h"
26#include "internal/error.h"
27#include "internal/hash.h"
28#include "internal/imemo.h"
29#include "internal/numeric.h"
30#include "internal/range.h"
31#include "internal/rational.h"
32#include "ruby/ruby.h"
33
34/*
35 * Document-class: Enumerator
36 *
37 * \Class \Enumerator supports:
38 *
39 * - {External iteration}[rdoc-ref:Enumerator@External+Iteration].
40 * - {Internal iteration}[rdoc-ref:Enumerator@Internal+Iteration].
41 *
42 * An \Enumerator may be created by the following methods:
43 *
44 * - Object#to_enum.
45 * - Object#enum_for.
46 * - Enumerator.new.
47 *
48 * In addition, certain Ruby methods return \Enumerator objects:
49 * a Ruby iterator method that accepts a block
50 * may return an \Enumerator if no block is given.
51 * There are many such methods, for example, in classes Array and Hash.
52 * (In the documentation for those classes, search for `new_enumerator`.)
53 *
54 * == Internal Iteration
55 *
56 * In _internal iteration_, an iterator method drives the iteration
57 * and the caller's block handles the processing;
58 * this example uses method #each_with_index:
59 *
60 * words = %w[foo bar baz] # => ["foo", "bar", "baz"]
61 * enumerator = words.each # => #<Enumerator: ...>
62 * enumerator.each_with_index {|word, i| puts "#{i}: #{word}" }
63 * 0: foo
64 * 1: bar
65 * 2: baz
66 *
67 * Iterator methods in class \Enumerator include:
68 *
69 * - #each:
70 * passes each item to the block.
71 * - #each_with_index:
72 * passes each item and its index to the block.
73 * - #each_with_object (aliased as #with_object):
74 * passes each item and a given object to the block.
75 * - #with_index:
76 * like #each_with_index, but starting at a given offset (instead of zero).
77 *
78 * \Class \Enumerator includes module Enumerable,
79 * which provides many more iterator methods.
80 *
81 * == External Iteration
82 *
83 * In _external iteration_, the user's program both drives the iteration
84 * and handles the processing in stream-like fashion;
85 * this example uses method #next:
86 *
87 * words = %w[foo bar baz]
88 * enumerator = words.each
89 * enumerator.next # => "foo"
90 * enumerator.next # => "bar"
91 * enumerator.next # => "baz"
92 * enumerator.next # Raises StopIteration: iteration reached an end
93 *
94 * External iteration methods in class \Enumerator include:
95 *
96 * - #feed:
97 * sets the value that is next to be returned.
98 * - #next:
99 * returns the next value and increments the position.
100 * - #next_values:
101 * returns the next value in a 1-element array and increments the position.
102 * - #peek:
103 * returns the next value but does not increment the position.
104 * - #peek_values:
105 * returns the next value in a 1-element array but does not increment the position.
106 * - #rewind:
107 * sets the position to zero.
108 *
109 * Each of these methods raises FrozenError if called from a frozen \Enumerator.
110 *
111 * == External Iteration and \Fiber
112 *
113 * External iteration that uses Fiber differs *significantly* from internal iteration:
114 *
115 * - Using \Fiber adds some overhead compared to internal enumeration.
116 * - The stacktrace will only include the stack from the \Enumerator, not above.
117 * - \Fiber-local variables are *not* inherited inside the \Enumerator \Fiber,
118 * which instead starts with no \Fiber-local variables.
119 * - \Fiber storage variables *are* inherited and are designed
120 * to handle \Enumerator Fibers. Assigning to a \Fiber storage variable
121 * only affects the current \Fiber, so if you want to change state
122 * in the caller \Fiber of the \Enumerator \Fiber, you need to use an
123 * extra indirection (e.g., use some object in the \Fiber storage
124 * variable and mutate some ivar of it).
125 *
126 * Concretely:
127 *
128 * Thread.current[:fiber_local] = 1
129 * Fiber[:storage_var] = 1
130 * e = Enumerator.new do |y|
131 * p Thread.current[:fiber_local] # for external iteration: nil, for internal iteration: 1
132 * p Fiber[:storage_var] # => 1, inherited
133 * Fiber[:storage_var] += 1
134 * y << 42
135 * end
136 *
137 * p e.next # => 42
138 * p Fiber[:storage_var] # => 1 (it ran in a different Fiber)
139 *
140 * e.each { p _1 }
141 * p Fiber[:storage_var] # => 2 (it ran in the same Fiber/"stack" as the current Fiber)
142 *
143 * == Converting External Iteration to Internal Iteration
144 *
145 * You can use an external iterator to implement an internal iterator as follows:
146 *
147 * def ext_each(e)
148 * while true
149 * begin
150 * vs = e.next_values
151 * rescue StopIteration
152 * return $!.result
153 * end
154 * y = yield(*vs)
155 * e.feed y
156 * end
157 * end
158 *
159 * o = Object.new
160 *
161 * def o.each
162 * puts yield
163 * puts yield(1)
164 * puts yield(1, 2)
165 * 3
166 * end
167 *
168 * # use o.each as an internal iterator directly.
169 * puts o.each {|*x| puts x; [:b, *x] }
170 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
171 *
172 * # convert o.each to an external iterator for
173 * # implementing an internal iterator.
174 * puts ext_each(o.to_enum) {|*x| puts x; [:b, *x] }
175 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
176 *
177 */
179static VALUE rb_cLazy;
180static ID id_rewind, id_to_enum, id_each_entry;
181static ID id_next, id_result, id_receiver, id_arguments, id_memo, id_method, id_force;
182static VALUE sym_each, sym_yield;
183
184static VALUE lazy_use_super_method;
185
186extern ID ruby_static_id_cause;
187
188#define id_call idCall
189#define id_cause ruby_static_id_cause
190#define id_each idEach
191#define id_eqq idEqq
192#define id_initialize idInitialize
193#define id_size idSize
194
196
198 VALUE obj;
199 ID meth;
200 VALUE args;
201 VALUE fib;
202 VALUE dst;
203 VALUE lookahead;
204 VALUE feedvalue;
205 VALUE stop_exc;
206 VALUE size;
207 VALUE procs;
209 int kw_splat;
210};
211
212RUBY_REFERENCES(enumerator_refs) = {
213 RUBY_REF_EDGE(struct enumerator, obj),
214 RUBY_REF_EDGE(struct enumerator, args),
215 RUBY_REF_EDGE(struct enumerator, fib),
216 RUBY_REF_EDGE(struct enumerator, dst),
217 RUBY_REF_EDGE(struct enumerator, lookahead),
218 RUBY_REF_EDGE(struct enumerator, feedvalue),
219 RUBY_REF_EDGE(struct enumerator, stop_exc),
220 RUBY_REF_EDGE(struct enumerator, size),
221 RUBY_REF_EDGE(struct enumerator, procs),
222 RUBY_REF_END
223};
224
225static VALUE rb_cGenerator, rb_cYielder, rb_cEnumProducer;
226
227struct generator {
228 VALUE proc;
229 VALUE obj;
230};
231
232struct yielder {
233 VALUE proc;
234};
235
236struct producer {
237 VALUE init;
238 VALUE proc;
239 VALUE size;
240};
241
242typedef struct MEMO *lazyenum_proc_func(VALUE, struct MEMO *, VALUE, long);
243typedef VALUE lazyenum_size_func(VALUE, VALUE);
244typedef int lazyenum_precheck_func(VALUE proc_entry);
245typedef struct {
246 lazyenum_proc_func *proc;
247 lazyenum_size_func *size;
248 lazyenum_precheck_func *precheck;
250
252 VALUE proc;
253 VALUE memo;
254 const lazyenum_funcs *fn;
255};
256
257static VALUE generator_allocate(VALUE klass);
258static VALUE generator_init(VALUE obj, VALUE proc);
259
260static VALUE rb_cEnumChain;
261
263 VALUE enums;
264 long pos;
265};
266
267static VALUE rb_cEnumProduct;
268
270 VALUE enums;
271};
272
273VALUE rb_cArithSeq;
274
275static const rb_data_type_t enumerator_data_type = {
276 "enumerator",
277 {
278 RUBY_REFS_LIST_PTR(enumerator_refs),
280 NULL, // Nothing allocated externally, so don't need a memsize function
281 NULL,
282 },
283 0, NULL, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_DECL_MARKING | RUBY_TYPED_EMBEDDABLE
284};
285
286static struct enumerator *
287enumerator_ptr(VALUE obj)
288{
289 struct enumerator *ptr;
290
291 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr);
292 if (!ptr || UNDEF_P(ptr->obj)) {
293 rb_raise(rb_eArgError, "uninitialized enumerator");
294 }
295 return ptr;
296}
297
298static void
299proc_entry_mark_and_move(void *p)
300{
301 struct proc_entry *ptr = p;
302 rb_gc_mark_and_move(&ptr->proc);
303 rb_gc_mark_and_move(&ptr->memo);
304}
305
306static const rb_data_type_t proc_entry_data_type = {
307 "proc_entry",
308 {
309 proc_entry_mark_and_move,
311 NULL, // Nothing allocated externally, so don't need a memsize function
312 proc_entry_mark_and_move,
313 },
314 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
315};
316
317static struct proc_entry *
318proc_entry_ptr(VALUE proc_entry)
319{
320 struct proc_entry *ptr;
321
322 TypedData_Get_Struct(proc_entry, struct proc_entry, &proc_entry_data_type, ptr);
323
324 return ptr;
325}
326
327/*
328 * call-seq:
329 * obj.to_enum(method = :each, *args) -> enum
330 * obj.enum_for(method = :each, *args) -> enum
331 * obj.to_enum(method = :each, *args) {|*args| block} -> enum
332 * obj.enum_for(method = :each, *args){|*args| block} -> enum
333 *
334 * Creates a new Enumerator which will enumerate by calling +method+ on
335 * +obj+, passing +args+ if any. What was _yielded_ by method becomes
336 * values of enumerator.
337 *
338 * If a block is given, it will be used to calculate the size of
339 * the enumerator without the need to iterate it (see Enumerator#size).
340 *
341 * === Examples
342 *
343 * str = "xyz"
344 *
345 * enum = str.enum_for(:each_byte)
346 * enum.each { |b| puts b }
347 * # => 120
348 * # => 121
349 * # => 122
350 *
351 * # protect an array from being modified by some_method
352 * a = [1, 2, 3]
353 * some_method(a.to_enum)
354 *
355 * # String#split in block form is more memory-effective:
356 * very_large_string.split("|") { |chunk| return chunk if chunk.include?('DATE') }
357 * # This could be rewritten more idiomatically with to_enum:
358 * very_large_string.to_enum(:split, "|").lazy.grep(/DATE/).first
359 *
360 * It is typical to call to_enum when defining methods for
361 * a generic Enumerable, in case no block is passed.
362 *
363 * Here is such an example, with parameter passing and a sizing block:
364 *
365 * module Enumerable
366 * # a generic method to repeat the values of any enumerable
367 * def repeat(n)
368 * raise ArgumentError, "#{n} is negative!" if n < 0
369 * unless block_given?
370 * return to_enum(__method__, n) do # __method__ is :repeat here
371 * sz = size # Call size and multiply by n...
372 * sz * n if sz # but return nil if size itself is nil
373 * end
374 * end
375 * each do |*val|
376 * n.times { yield *val }
377 * end
378 * end
379 * end
380 *
381 * %i[hello world].repeat(2) { |w| puts w }
382 * # => Prints 'hello', 'hello', 'world', 'world'
383 * enum = (1..14).repeat(3)
384 * # => returns an Enumerator when called without a block
385 * enum.first(4) # => [1, 1, 1, 2]
386 * enum.size # => 42
387 */
388static VALUE
389obj_to_enum(int argc, VALUE *argv, VALUE obj)
390{
391 VALUE enumerator, meth = sym_each;
392
393 if (argc > 0) {
394 --argc;
395 meth = *argv++;
396 }
397 enumerator = rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
398 if (rb_block_given_p()) {
399 RB_OBJ_WRITE(enumerator, &enumerator_ptr(enumerator)->size, rb_block_proc());
400 }
401 return enumerator;
402}
403
404static VALUE
405enumerator_allocate(VALUE klass)
406{
407 struct enumerator *ptr;
408 VALUE enum_obj;
409
410 enum_obj = TypedData_Make_Struct(klass, struct enumerator, &enumerator_data_type, ptr);
411 ptr->obj = Qundef;
412
413 return enum_obj;
414}
415
416static VALUE
417enumerator_init(VALUE enum_obj, VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, VALUE size, int kw_splat)
418{
419 struct enumerator *ptr;
420
421 rb_check_frozen(enum_obj);
422 TypedData_Get_Struct(enum_obj, struct enumerator, &enumerator_data_type, ptr);
423
424 if (!ptr) {
425 rb_raise(rb_eArgError, "unallocated enumerator");
426 }
427
428 RB_OBJ_WRITE(enum_obj, &ptr->obj, obj);
429 ptr->meth = rb_to_id(meth);
430 if (argc) RB_OBJ_WRITE(enum_obj, &ptr->args, rb_ary_new4(argc, argv));
431 ptr->fib = 0;
432 ptr->dst = Qnil;
433 ptr->lookahead = Qundef;
434 ptr->feedvalue = Qundef;
435 ptr->stop_exc = Qfalse;
436 RB_OBJ_WRITE(enum_obj, &ptr->size, size);
437 ptr->size_fn = size_fn;
438 ptr->kw_splat = kw_splat;
439
440 return enum_obj;
441}
442
443static VALUE
444convert_to_feasible_size_value(VALUE obj)
445{
446 if (NIL_P(obj)) {
447 return obj;
448 }
449 else if (rb_respond_to(obj, id_call)) {
450 return obj;
451 }
452 else if (RB_FLOAT_TYPE_P(obj) && RFLOAT_VALUE(obj) == HUGE_VAL) {
453 return obj;
454 }
455 else {
456 return rb_to_int(obj);
457 }
458}
459
460/*
461 * call-seq:
462 * Enumerator.new(size = nil) {|yielder| ... }
463 *
464 * Returns a new \Enumerator object that can be used for iteration.
465 *
466 * The given block defines the iteration;
467 * it is called with a "yielder" object that can yield an object
468 * via a call to method <tt>yielder.yield</tt>:
469 *
470 * fib = Enumerator.new do |yielder|
471 * n = next_n = 1
472 * while true do
473 * yielder.yield(n)
474 * n, next_n = next_n, n + next_n
475 * end
476 * end
477 *
478 * fib.take(10) # => [1, 1, 2, 3, 5, 8, 13, 21, 34, 55]
479 *
480 * Parameter +size+ specifies how the size is to be calculated (see #size);
481 * it can either be a value or a callable object:
482 *
483 * Enumerator.new{}.size # => nil
484 * Enumerator.new(42){}.size # => 42
485 * Enumerator.new(-> {42}){}.size # => 42
486 *
487 */
488static VALUE
489enumerator_initialize(int argc, VALUE *argv, VALUE obj)
490{
491 VALUE iter = rb_block_proc();
492 VALUE recv = generator_init(generator_allocate(rb_cGenerator), iter);
493 VALUE arg0 = rb_check_arity(argc, 0, 1) ? argv[0] : Qnil;
494 VALUE size = convert_to_feasible_size_value(arg0);
495
496 return enumerator_init(obj, recv, sym_each, 0, 0, 0, size, false);
497}
498
499/* :nodoc: */
500static VALUE
501enumerator_init_copy(VALUE obj, VALUE orig)
502{
503 struct enumerator *ptr0, *ptr1;
504
505 if (!OBJ_INIT_COPY(obj, orig)) return obj;
506 ptr0 = enumerator_ptr(orig);
507 if (ptr0->fib) {
508 /* Fibers cannot be copied */
509 rb_raise(rb_eTypeError, "can't copy execution context");
510 }
511
512 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr1);
513
514 if (!ptr1) {
515 rb_raise(rb_eArgError, "unallocated enumerator");
516 }
517
518 RB_OBJ_WRITE(obj, &ptr1->obj, ptr0->obj);
519 ptr1->meth = ptr0->meth;
520 RB_OBJ_WRITE(obj, &ptr1->args, ptr0->args);
521 ptr1->fib = 0;
522 ptr1->lookahead = Qundef;
523 ptr1->feedvalue = Qundef;
524 RB_OBJ_WRITE(obj, &ptr1->size, ptr0->size);
525 ptr1->size_fn = ptr0->size_fn;
526
527 return obj;
528}
529
530/*
531 * For backwards compatibility; use rb_enumeratorize_with_size
532 */
533VALUE
534rb_enumeratorize(VALUE obj, VALUE meth, int argc, const VALUE *argv)
535{
536 return rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
537}
538
539static VALUE lazy_to_enum_i(VALUE self, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat);
540static int lazy_precheck(VALUE procs);
541
542VALUE
543rb_enumeratorize_with_size_kw(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
544{
545 VALUE base_class = rb_cEnumerator;
546
547 if (RTEST(rb_obj_is_kind_of(obj, rb_cLazy))) {
548 base_class = rb_cLazy;
549 }
550 else if (RTEST(rb_obj_is_kind_of(obj, rb_cEnumChain))) {
551 obj = enumerator_init(enumerator_allocate(rb_cEnumerator), obj, sym_each, 0, 0, 0, Qnil, false);
552 }
553
554 return enumerator_init(enumerator_allocate(base_class),
555 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
556}
557
558VALUE
559rb_enumeratorize_with_size(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn)
560{
561 return rb_enumeratorize_with_size_kw(obj, meth, argc, argv, size_fn, rb_keyword_given_p());
562}
563
564static VALUE
565enumerator_block_call(VALUE obj, rb_block_call_func *func, VALUE arg)
566{
567 int argc = 0;
568 const VALUE *argv = 0;
569 const struct enumerator *e = enumerator_ptr(obj);
570 ID meth = e->meth;
571
572 VALUE args = e->args;
573 if (args) {
574 argc = RARRAY_LENINT(args);
575 argv = RARRAY_CONST_PTR(args);
576 }
577
578 VALUE ret = rb_block_call_kw(e->obj, meth, argc, argv, func, arg, e->kw_splat);
579
580 RB_GC_GUARD(args);
581
582 return ret;
583}
584
585/*
586 * call-seq:
587 * enum.each { |elm| block } -> obj
588 * enum.each -> enum
589 * enum.each(*appending_args) { |elm| block } -> obj
590 * enum.each(*appending_args) -> an_enumerator
591 *
592 * Iterates over the block according to how this Enumerator was constructed.
593 * If no block and no arguments are given, returns self.
594 *
595 * === Examples
596 *
597 * "Hello, world!".scan(/\w+/) #=> ["Hello", "world"]
598 * "Hello, world!".to_enum(:scan, /\w+/).to_a #=> ["Hello", "world"]
599 * "Hello, world!".to_enum(:scan).each(/\w+/).to_a #=> ["Hello", "world"]
600 *
601 * obj = Object.new
602 *
603 * def obj.each_arg(a, b=:b, *rest)
604 * yield a
605 * yield b
606 * yield rest
607 * :method_returned
608 * end
609 *
610 * enum = obj.to_enum :each_arg, :a, :x
611 *
612 * enum.each.to_a #=> [:a, :x, []]
613 * enum.each.equal?(enum) #=> true
614 * enum.each { |elm| elm } #=> :method_returned
615 *
616 * enum.each(:y, :z).to_a #=> [:a, :x, [:y, :z]]
617 * enum.each(:y, :z).equal?(enum) #=> false
618 * enum.each(:y, :z) { |elm| elm } #=> :method_returned
619 *
620 */
621static VALUE
622enumerator_each(int argc, VALUE *argv, VALUE obj)
623{
624 struct enumerator *e = enumerator_ptr(obj);
625
626 if (argc > 0) {
627 VALUE args = (e = enumerator_ptr(obj = rb_obj_dup(obj)))->args;
628 if (args) {
629#if SIZEOF_INT < SIZEOF_LONG
630 /* check int range overflow */
631 rb_long2int(RARRAY_LEN(args) + argc);
632#endif
633 args = rb_ary_dup(args);
634 rb_ary_cat(args, argv, argc);
635 }
636 else {
637 args = rb_ary_new4(argc, argv);
638 }
639 RB_OBJ_WRITE(obj, &e->args, args);
640 e->size = Qnil;
641 e->size_fn = 0;
642 }
643 if (!rb_block_given_p()) return obj;
644
645 if (!lazy_precheck(e->procs)) return Qnil;
646
647 return enumerator_block_call(obj, 0, obj);
648}
649
650static VALUE
651enumerator_with_index_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
652{
653 struct MEMO *memo = (struct MEMO *)m;
654 VALUE idx = memo->v1;
655 MEMO_V1_SET(memo, rb_int_succ(idx));
656
657 if (argc <= 1)
658 return rb_yield_values(2, val, idx);
659
660 return rb_yield_values(2, rb_ary_new4(argc, argv), idx);
661}
662
663static VALUE
664enumerator_size(VALUE obj);
665
666static VALUE
667enumerator_enum_size(VALUE obj, VALUE args, VALUE eobj)
668{
669 return enumerator_size(obj);
670}
671
672/*
673 * call-seq:
674 * e.with_index(offset = 0) {|(*args), idx| ... }
675 * e.with_index(offset = 0)
676 *
677 * Iterates the given block for each element with an index, which
678 * starts from +offset+. If no block is given, returns a new Enumerator
679 * that includes the index, starting from +offset+
680 *
681 * +offset+:: the starting index to use
682 *
683 */
684static VALUE
685enumerator_with_index(int argc, VALUE *argv, VALUE obj)
686{
687 VALUE memo;
688
689 rb_check_arity(argc, 0, 1);
690 RETURN_SIZED_ENUMERATOR(obj, argc, argv, enumerator_enum_size);
691 memo = (!argc || NIL_P(memo = argv[0])) ? INT2FIX(0) : rb_to_int(memo);
692 return enumerator_block_call(obj, enumerator_with_index_i, (VALUE)rb_imemo_memo_new(memo, 0, 0));
693}
694
695/*
696 * call-seq:
697 * e.each_with_index {|(*args), idx| ... }
698 * e.each_with_index
699 *
700 * Same as Enumerator#with_index(0), i.e. there is no starting offset.
701 *
702 * If no block is given, a new Enumerator is returned that includes the index.
703 *
704 */
705static VALUE
706enumerator_each_with_index(VALUE obj)
707{
708 return enumerator_with_index(0, NULL, obj);
709}
710
711static VALUE
712enumerator_with_object_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, memo))
713{
714 if (argc <= 1)
715 return rb_yield_values(2, val, memo);
716
717 return rb_yield_values(2, rb_ary_new4(argc, argv), memo);
718}
719
720/*
721 * call-seq:
722 * e.each_with_object(obj) {|(*args), obj| ... }
723 * e.each_with_object(obj)
724 * e.with_object(obj) {|(*args), obj| ... }
725 * e.with_object(obj)
726 *
727 * Iterates the given block for each element with an arbitrary object, +obj+,
728 * and returns +obj+
729 *
730 * If no block is given, returns a new Enumerator.
731 *
732 * === Example
733 *
734 * to_three = Enumerator.new do |y|
735 * 3.times do |x|
736 * y << x
737 * end
738 * end
739 *
740 * to_three_with_string = to_three.with_object("foo")
741 * to_three_with_string.each do |x,string|
742 * puts "#{string}: #{x}"
743 * end
744 *
745 * # => foo: 0
746 * # => foo: 1
747 * # => foo: 2
748 */
749static VALUE
750enumerator_with_object(VALUE obj, VALUE memo)
751{
752 RETURN_SIZED_ENUMERATOR(obj, 1, &memo, enumerator_enum_size);
753 enumerator_block_call(obj, enumerator_with_object_i, memo);
754
755 return memo;
756}
757
758static VALUE
759next_ii(RB_BLOCK_CALL_FUNC_ARGLIST(i, obj))
760{
761 struct enumerator *e = enumerator_ptr(obj);
762 VALUE feedvalue = Qnil;
763 VALUE args = rb_ary_new4(argc, argv);
764 rb_fiber_yield(1, &args);
765 if (!UNDEF_P(e->feedvalue)) {
766 feedvalue = e->feedvalue;
767 e->feedvalue = Qundef;
768 }
769 return feedvalue;
770}
771
772static VALUE
773next_i(RB_BLOCK_CALL_FUNC_ARGLIST(_, obj))
774{
775 struct enumerator *e = enumerator_ptr(obj);
776 VALUE nil = Qnil;
777 VALUE result;
778
779 result = rb_block_call(obj, id_each, 0, 0, next_ii, obj);
780 RB_OBJ_WRITE(obj, &e->stop_exc, rb_exc_new2(rb_eStopIteration, "iteration reached an end"));
781 rb_ivar_set(e->stop_exc, id_result, result);
782 return rb_fiber_yield(1, &nil);
783}
784
785static void
786next_init(VALUE obj, struct enumerator *e)
787{
788 VALUE curr = rb_fiber_current();
789 RB_OBJ_WRITE(obj, &e->dst, curr);
790 RB_OBJ_WRITE(obj, &e->fib, rb_fiber_new(next_i, obj));
791 e->lookahead = Qundef;
792}
793
794static VALUE
795get_next_values(VALUE obj, struct enumerator *e)
796{
797 VALUE curr, vs;
798
799 if (e->stop_exc) {
800 VALUE exc = e->stop_exc;
801 VALUE result = rb_attr_get(exc, id_result);
802 VALUE mesg = rb_attr_get(exc, idMesg);
803 if (!NIL_P(mesg)) mesg = rb_str_dup(mesg);
804 VALUE stop_exc = rb_exc_new_str(rb_eStopIteration, mesg);
805 rb_ivar_set(stop_exc, id_cause, exc);
806 rb_ivar_set(stop_exc, id_result, result);
807 rb_exc_raise(stop_exc);
808 }
809
810 curr = rb_fiber_current();
811
812 if (!e->fib || !rb_fiber_alive_p(e->fib)) {
813 next_init(obj, e);
814 }
815
816 vs = rb_fiber_resume(e->fib, 1, &curr);
817 if (e->stop_exc) {
818 e->fib = 0;
819 e->dst = Qnil;
820 e->lookahead = Qundef;
821 e->feedvalue = Qundef;
822 rb_exc_raise(e->stop_exc);
823 }
824 return vs;
825}
826
827/*
828 * call-seq:
829 * e.next_values -> array
830 *
831 * Returns the next object as an array in the enumerator, and move the
832 * internal position forward. When the position reached at the end,
833 * StopIteration is raised.
834 *
835 * See class-level notes about external iterators.
836 *
837 * This method can be used to distinguish <code>yield</code> and <code>yield
838 * nil</code>.
839 *
840 * === Example
841 *
842 * o = Object.new
843 * def o.each
844 * yield
845 * yield 1
846 * yield 1, 2
847 * yield nil
848 * yield [1, 2]
849 * end
850 * e = o.to_enum
851 * p e.next_values
852 * p e.next_values
853 * p e.next_values
854 * p e.next_values
855 * p e.next_values
856 * e = o.to_enum
857 * p e.next
858 * p e.next
859 * p e.next
860 * p e.next
861 * p e.next
862 *
863 * ## yield args next_values next
864 * # yield [] nil
865 * # yield 1 [1] 1
866 * # yield 1, 2 [1, 2] [1, 2]
867 * # yield nil [nil] nil
868 * # yield [1, 2] [[1, 2]] [1, 2]
869 *
870 */
871
872static VALUE
873enumerator_next_values(VALUE obj)
874{
875 struct enumerator *e = enumerator_ptr(obj);
876 VALUE vs;
877
878 rb_check_frozen(obj);
879
880 if (!UNDEF_P(e->lookahead)) {
881 vs = e->lookahead;
882 e->lookahead = Qundef;
883 return vs;
884 }
885
886 return get_next_values(obj, e);
887}
888
889static VALUE
890ary2sv(VALUE args, int dup)
891{
892 if (!RB_TYPE_P(args, T_ARRAY))
893 return args;
894
895 switch (RARRAY_LEN(args)) {
896 case 0:
897 return Qnil;
898
899 case 1:
900 return RARRAY_AREF(args, 0);
901
902 default:
903 if (dup)
904 return rb_ary_dup(args);
905 return args;
906 }
907}
908
909/*
910 * call-seq:
911 * e.next -> object
912 *
913 * Returns the next object in the enumerator, and move the internal position
914 * forward. When the position reached at the end, StopIteration is raised.
915 *
916 * === Example
917 *
918 * a = [1,2,3]
919 * e = a.to_enum
920 * p e.next #=> 1
921 * p e.next #=> 2
922 * p e.next #=> 3
923 * p e.next #raises StopIteration
924 *
925 * See class-level notes about external iterators.
926 *
927 */
928
929static VALUE
930enumerator_next(VALUE obj)
931{
932 VALUE vs = enumerator_next_values(obj);
933 return ary2sv(vs, 0);
934}
935
936static VALUE
937enumerator_peek_values(VALUE obj)
938{
939 struct enumerator *e = enumerator_ptr(obj);
940
941 rb_check_frozen(obj);
942
943 if (UNDEF_P(e->lookahead)) {
944 RB_OBJ_WRITE(obj, &e->lookahead, get_next_values(obj, e));
945 }
946
947 return e->lookahead;
948}
949
950/*
951 * call-seq:
952 * e.peek_values -> array
953 *
954 * Returns the next object as an array, similar to Enumerator#next_values, but
955 * doesn't move the internal position forward. If the position is already at
956 * the end, StopIteration is raised.
957 *
958 * See class-level notes about external iterators.
959 *
960 * === Example
961 *
962 * o = Object.new
963 * def o.each
964 * yield
965 * yield 1
966 * yield 1, 2
967 * end
968 * e = o.to_enum
969 * p e.peek_values #=> []
970 * e.next
971 * p e.peek_values #=> [1]
972 * p e.peek_values #=> [1]
973 * e.next
974 * p e.peek_values #=> [1, 2]
975 * e.next
976 * p e.peek_values # raises StopIteration
977 *
978 */
979
980static VALUE
981enumerator_peek_values_m(VALUE obj)
982{
983 return rb_ary_dup(enumerator_peek_values(obj));
984}
985
986/*
987 * call-seq:
988 * e.peek -> object
989 *
990 * Returns the next object in the enumerator, but doesn't move the internal
991 * position forward. If the position is already at the end, StopIteration
992 * is raised.
993 *
994 * See class-level notes about external iterators.
995 *
996 * === Example
997 *
998 * a = [1,2,3]
999 * e = a.to_enum
1000 * p e.next #=> 1
1001 * p e.peek #=> 2
1002 * p e.peek #=> 2
1003 * p e.peek #=> 2
1004 * p e.next #=> 2
1005 * p e.next #=> 3
1006 * p e.peek #raises StopIteration
1007 *
1008 */
1009
1010static VALUE
1011enumerator_peek(VALUE obj)
1012{
1013 VALUE vs = enumerator_peek_values(obj);
1014 return ary2sv(vs, 1);
1015}
1016
1017/*
1018 * call-seq:
1019 * e.feed obj -> nil
1020 *
1021 * Sets the value to be returned by the next yield inside +e+.
1022 *
1023 * If the value is not set, the yield returns nil.
1024 *
1025 * This value is cleared after being yielded.
1026 *
1027 * # Array#map passes the array's elements to "yield" and collects the
1028 * # results of "yield" as an array.
1029 * # Following example shows that "next" returns the passed elements and
1030 * # values passed to "feed" are collected as an array which can be
1031 * # obtained by StopIteration#result.
1032 * e = [1,2,3].map
1033 * p e.next #=> 1
1034 * e.feed "a"
1035 * p e.next #=> 2
1036 * e.feed "b"
1037 * p e.next #=> 3
1038 * e.feed "c"
1039 * begin
1040 * e.next
1041 * rescue StopIteration
1042 * p $!.result #=> ["a", "b", "c"]
1043 * end
1044 *
1045 * o = Object.new
1046 * def o.each
1047 * x = yield # (2) blocks
1048 * p x # (5) => "foo"
1049 * x = yield # (6) blocks
1050 * p x # (8) => nil
1051 * x = yield # (9) blocks
1052 * p x # not reached w/o another e.next
1053 * end
1054 *
1055 * e = o.to_enum
1056 * e.next # (1)
1057 * e.feed "foo" # (3)
1058 * e.next # (4)
1059 * e.next # (7)
1060 * # (10)
1061 */
1062
1063static VALUE
1064enumerator_feed(VALUE obj, VALUE v)
1065{
1066 struct enumerator *e = enumerator_ptr(obj);
1067
1068 rb_check_frozen(obj);
1069
1070 if (!UNDEF_P(e->feedvalue)) {
1071 rb_raise(rb_eTypeError, "feed value already set");
1072 }
1073 RB_OBJ_WRITE(obj, &e->feedvalue, v);
1074
1075 return Qnil;
1076}
1077
1078/*
1079 * call-seq:
1080 * e.rewind -> e
1081 *
1082 * Rewinds the enumeration sequence to the beginning.
1083 *
1084 * If the enclosed object responds to a "rewind" method, it is called.
1085 */
1086
1087static VALUE
1088enumerator_rewind(VALUE obj)
1089{
1090 struct enumerator *e = enumerator_ptr(obj);
1091
1092 rb_check_frozen(obj);
1093
1094 rb_check_funcall(e->obj, id_rewind, 0, 0);
1095
1096 e->fib = 0;
1097 e->dst = Qnil;
1098 e->lookahead = Qundef;
1099 e->feedvalue = Qundef;
1100 e->stop_exc = Qfalse;
1101 return obj;
1102}
1103
1104static struct generator *generator_ptr(VALUE obj);
1105static VALUE append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args);
1106static VALUE append_method_args(VALUE obj, VALUE str, VALUE default_args);
1107
1108static VALUE
1109inspect_enumerator(VALUE obj, VALUE dummy, int recur)
1110{
1111 struct enumerator *e;
1112 VALUE eobj, str, cname;
1113
1114 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, e);
1115
1116 cname = rb_obj_class(obj);
1117
1118 if (!e || UNDEF_P(e->obj)) {
1119 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(cname));
1120 }
1121
1122 if (recur) {
1123 str = rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(cname));
1124 return str;
1125 }
1126
1127 if (e->procs) {
1128 long i;
1129
1130 eobj = generator_ptr(e->obj)->obj;
1131 /* In case procs chained enumerator traversing all proc entries manually */
1132 if (rb_obj_class(eobj) == cname) {
1133 str = rb_inspect(eobj);
1134 }
1135 else {
1136 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(cname), eobj);
1137 }
1138 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1139 str = rb_sprintf("#<%"PRIsVALUE": %"PRIsVALUE, cname, str);
1140 append_method(RARRAY_AREF(e->procs, i), str, e->meth, e->args);
1141 rb_str_buf_cat2(str, ">");
1142 }
1143 return str;
1144 }
1145
1146 eobj = rb_attr_get(obj, id_receiver);
1147 if (NIL_P(eobj)) {
1148 eobj = e->obj;
1149 }
1150
1151 /* (1..100).each_cons(2) => "#<Enumerator: 1..100:each_cons(2)>" */
1152 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE, rb_class_path(cname), eobj);
1153 append_method(obj, str, e->meth, e->args);
1154
1155 rb_str_buf_cat2(str, ">");
1156
1157 return str;
1158}
1159
1160static int
1161key_symbol_p(VALUE key, VALUE val, VALUE arg)
1162{
1163 if (SYMBOL_P(key)) return ST_CONTINUE;
1164 *(int *)arg = FALSE;
1165 return ST_STOP;
1166}
1167
1168static int
1169kwd_append(VALUE key, VALUE val, VALUE str)
1170{
1171 if (!SYMBOL_P(key)) rb_raise(rb_eRuntimeError, "non-symbol key inserted");
1172 rb_str_catf(str, "% "PRIsVALUE": %"PRIsVALUE", ", key, val);
1173 return ST_CONTINUE;
1174}
1175
1176static VALUE
1177append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args)
1178{
1179 VALUE method;
1180
1181 method = rb_attr_get(obj, id_method);
1182 if (method != Qfalse) {
1183 if (!NIL_P(method)) {
1184 Check_Type(method, T_SYMBOL);
1185 method = rb_sym2str(method);
1186 }
1187 else {
1188 method = rb_id2str(default_method);
1189 }
1190 rb_str_buf_cat2(str, ":");
1191 rb_str_buf_append(str, method);
1192 }
1193 return append_method_args(obj, str, default_args);
1194}
1195
1196static VALUE
1197append_method_args(VALUE obj, VALUE str, VALUE default_args)
1198{
1199 VALUE eargs;
1200
1201 eargs = rb_attr_get(obj, id_arguments);
1202 if (NIL_P(eargs)) {
1203 eargs = default_args;
1204 }
1205 if (eargs != Qfalse) {
1206 long argc = RARRAY_LEN(eargs);
1207 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
1208
1209 if (argc > 0) {
1210 VALUE kwds = Qnil;
1211
1212 rb_str_buf_cat2(str, "(");
1213
1214 if (RB_TYPE_P(argv[argc-1], T_HASH) && !RHASH_EMPTY_P(argv[argc-1])) {
1215 int all_key = TRUE;
1216 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
1217 if (all_key) kwds = argv[--argc];
1218 }
1219
1220 while (argc--) {
1221 VALUE arg = *argv++;
1222
1223 rb_str_append(str, rb_inspect(arg));
1224 rb_str_buf_cat2(str, ", ");
1225 }
1226 if (!NIL_P(kwds)) {
1227 rb_hash_foreach(kwds, kwd_append, str);
1228 }
1229 rb_str_set_len(str, RSTRING_LEN(str)-2); /* drop the last ", " */
1230 rb_str_buf_cat2(str, ")");
1231 }
1232 }
1233 RB_GC_GUARD(eargs);
1234
1235 return str;
1236}
1237
1238/*
1239 * call-seq:
1240 * e.inspect -> string
1241 *
1242 * Creates a printable version of <i>e</i>.
1243 */
1244
1245static VALUE
1246enumerator_inspect(VALUE obj)
1247{
1248 return rb_exec_recursive(inspect_enumerator, obj, 0);
1249}
1250
1251/*
1252 * call-seq:
1253 * e.size -> int, Float::INFINITY or nil
1254 *
1255 * Returns the size of the enumerator, or +nil+ if it can't be calculated lazily.
1256 *
1257 * (1..100).to_a.permutation(4).size # => 94109400
1258 * loop.size # => Float::INFINITY
1259 * (1..100).drop_while.size # => nil
1260 *
1261 * Note that enumerator size might be inaccurate, and should be rather treated as a hint.
1262 * For example, there is no check that the size provided to ::new is accurate:
1263 *
1264 * e = Enumerator.new(5) { |y| 2.times { y << it} }
1265 * e.size # => 5
1266 * e.to_a.size # => 2
1267 *
1268 * Another example is an enumerator created by ::produce without a +size+ argument.
1269 * Such enumerators return +Infinity+ for size, but this is inaccurate if the passed
1270 * block raises StopIteration:
1271 *
1272 * e = Enumerator.produce(1) { it + 1 }
1273 * e.size # => Infinity
1274 *
1275 * e = Enumerator.produce(1) { it > 3 ? raise(StopIteration) : it + 1 }
1276 * e.size # => Infinity
1277 * e.to_a.size # => 4
1278 */
1279
1280static VALUE
1281enumerator_size(VALUE obj)
1282{
1283 struct enumerator *e = enumerator_ptr(obj);
1284 int argc = 0;
1285 const VALUE *argv = NULL;
1286 VALUE size;
1287
1288 if (e->procs) {
1289 struct generator *g = generator_ptr(e->obj);
1290 VALUE receiver = rb_check_funcall(g->obj, id_size, 0, 0);
1291 long i = 0;
1292
1293 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1294 VALUE proc = RARRAY_AREF(e->procs, i);
1295 struct proc_entry *entry = proc_entry_ptr(proc);
1296 lazyenum_size_func *size_fn = entry->fn->size;
1297 if (!size_fn) {
1298 return Qnil;
1299 }
1300 receiver = (*size_fn)(proc, receiver);
1301 }
1302 return receiver;
1303 }
1304
1305 if (e->size_fn) {
1306 return (*e->size_fn)(e->obj, e->args, obj);
1307 }
1308 if (e->args) {
1309 argc = (int)RARRAY_LEN(e->args);
1310 argv = RARRAY_CONST_PTR(e->args);
1311 }
1312 size = rb_check_funcall_kw(e->size, id_call, argc, argv, e->kw_splat);
1313 if (!UNDEF_P(size)) return size;
1314 return e->size;
1315}
1316
1317/*
1318 * Yielder
1319 */
1320static void
1321yielder_mark_and_move(void *p)
1322{
1323 struct yielder *ptr = p;
1324 rb_gc_mark_and_move(&ptr->proc);
1325}
1326
1327static const rb_data_type_t yielder_data_type = {
1328 "yielder",
1329 {
1330 yielder_mark_and_move,
1332 NULL,
1333 yielder_mark_and_move,
1334 },
1335 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
1336};
1337
1338static struct yielder *
1339yielder_ptr(VALUE obj)
1340{
1341 struct yielder *ptr;
1342
1343 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1344 if (!ptr || UNDEF_P(ptr->proc)) {
1345 rb_raise(rb_eArgError, "uninitialized yielder");
1346 }
1347 return ptr;
1348}
1349
1350/* :nodoc: */
1351static VALUE
1352yielder_allocate(VALUE klass)
1353{
1354 struct yielder *ptr;
1355 VALUE obj;
1356
1357 obj = TypedData_Make_Struct(klass, struct yielder, &yielder_data_type, ptr);
1358 ptr->proc = Qundef;
1359
1360 return obj;
1361}
1362
1363static VALUE
1364yielder_init(VALUE obj, VALUE proc)
1365{
1366 struct yielder *ptr;
1367
1368 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1369
1370 if (!ptr) {
1371 rb_raise(rb_eArgError, "unallocated yielder");
1372 }
1373
1374 RB_OBJ_WRITE(obj, &ptr->proc, proc);
1375
1376 return obj;
1377}
1378
1379/* :nodoc: */
1380static VALUE
1381yielder_initialize(VALUE obj)
1382{
1383 rb_need_block();
1384
1385 return yielder_init(obj, rb_block_proc());
1386}
1387
1388/* :nodoc: */
1389static VALUE
1390yielder_yield(VALUE obj, VALUE args)
1391{
1392 struct yielder *ptr = yielder_ptr(obj);
1393
1394 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1395}
1396
1397/* :nodoc: */
1398static VALUE
1399yielder_yield_push(VALUE obj, VALUE arg)
1400{
1401 struct yielder *ptr = yielder_ptr(obj);
1402
1403 rb_proc_call_with_block(ptr->proc, 1, &arg, Qnil);
1404
1405 return obj;
1406}
1407
1408/*
1409 * Returns a Proc object that takes arguments and yields them.
1410 *
1411 * This method is implemented so that a Yielder object can be directly
1412 * passed to another method as a block argument.
1413 *
1414 * enum = Enumerator.new { |y|
1415 * Dir.glob("*.rb") { |file|
1416 * File.open(file) { |f| f.each_line(&y) }
1417 * }
1418 * }
1419 */
1420static VALUE
1421yielder_to_proc(VALUE obj)
1422{
1423 VALUE method = rb_obj_method(obj, sym_yield);
1424
1425 return rb_funcall(method, idTo_proc, 0);
1426}
1427
1428static VALUE
1429yielder_yield_i(RB_BLOCK_CALL_FUNC_ARGLIST(obj, memo))
1430{
1431 return rb_yield_values_kw(argc, argv, RB_PASS_CALLED_KEYWORDS);
1432}
1433
1434static VALUE
1435yielder_new(void)
1436{
1437 return yielder_init(yielder_allocate(rb_cYielder), rb_proc_new(yielder_yield_i, 0));
1438}
1439
1440/*
1441 * Generator
1442 */
1443static void
1444generator_mark_and_move(void *p)
1445{
1446 struct generator *ptr = p;
1447 rb_gc_mark_and_move(&ptr->proc);
1448 rb_gc_mark_and_move(&ptr->obj);
1449}
1450
1451static const rb_data_type_t generator_data_type = {
1452 "generator",
1453 {
1454 generator_mark_and_move,
1456 NULL,
1457 generator_mark_and_move,
1458 },
1459 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
1460};
1461
1462static struct generator *
1463generator_ptr(VALUE obj)
1464{
1465 struct generator *ptr;
1466
1467 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1468 if (!ptr || UNDEF_P(ptr->proc)) {
1469 rb_raise(rb_eArgError, "uninitialized generator");
1470 }
1471 return ptr;
1472}
1473
1474/* :nodoc: */
1475static VALUE
1476generator_allocate(VALUE klass)
1477{
1478 struct generator *ptr;
1479 VALUE obj;
1480
1481 obj = TypedData_Make_Struct(klass, struct generator, &generator_data_type, ptr);
1482 ptr->proc = Qundef;
1483
1484 return obj;
1485}
1486
1487static VALUE
1488generator_init(VALUE obj, VALUE proc)
1489{
1490 struct generator *ptr;
1491
1492 rb_check_frozen(obj);
1493 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1494
1495 if (!ptr) {
1496 rb_raise(rb_eArgError, "unallocated generator");
1497 }
1498
1499 RB_OBJ_WRITE(obj, &ptr->proc, proc);
1500
1501 return obj;
1502}
1503
1504/* :nodoc: */
1505static VALUE
1506generator_initialize(int argc, VALUE *argv, VALUE obj)
1507{
1508 VALUE proc;
1509
1510 if (argc == 0) {
1511 rb_need_block();
1512
1513 proc = rb_block_proc();
1514 }
1515 else {
1516 rb_scan_args(argc, argv, "1", &proc);
1517
1518 if (!rb_obj_is_proc(proc))
1519 rb_raise(rb_eTypeError,
1520 "wrong argument type %"PRIsVALUE" (expected Proc)",
1521 rb_obj_class(proc));
1522
1523 if (rb_block_given_p()) {
1524 rb_warn("given block not used");
1525 }
1526 }
1527
1528 return generator_init(obj, proc);
1529}
1530
1531/* :nodoc: */
1532static VALUE
1533generator_init_copy(VALUE obj, VALUE orig)
1534{
1535 struct generator *ptr0, *ptr1;
1536
1537 if (!OBJ_INIT_COPY(obj, orig)) return obj;
1538
1539 ptr0 = generator_ptr(orig);
1540
1541 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr1);
1542
1543 if (!ptr1) {
1544 rb_raise(rb_eArgError, "unallocated generator");
1545 }
1546
1547 RB_OBJ_WRITE(obj, &ptr1->proc, ptr0->proc);
1548
1549 return obj;
1550}
1551
1552/* :nodoc: */
1553static VALUE
1554generator_each(int argc, VALUE *argv, VALUE obj)
1555{
1556 struct generator *ptr = generator_ptr(obj);
1557 VALUE args = rb_ary_new2(argc + 1);
1558
1559 rb_ary_push(args, yielder_new());
1560 if (argc > 0) {
1561 rb_ary_cat(args, argv, argc);
1562 }
1563
1564 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1565}
1566
1567/* Lazy Enumerator methods */
1568static VALUE
1569enum_size(VALUE self)
1570{
1571 VALUE r = rb_check_funcall(self, id_size, 0, 0);
1572 return UNDEF_P(r) ? Qnil : r;
1573}
1574
1575static VALUE
1576lazyenum_size(VALUE self, VALUE args, VALUE eobj)
1577{
1578 return enum_size(self);
1579}
1580
1581#define lazy_receiver_size lazy_map_size
1582
1583static VALUE
1584lazy_init_iterator(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1585{
1586 VALUE result;
1587 if (argc == 1) {
1588 VALUE args[2];
1589 args[0] = m;
1590 args[1] = val;
1591 result = rb_yield_values2(2, args);
1592 }
1593 else {
1594 VALUE args;
1595 int len = rb_long2int((long)argc + 1);
1596 VALUE *nargv = ALLOCV_N(VALUE, args, len);
1597
1598 nargv[0] = m;
1599 if (argc > 0) {
1600 MEMCPY(nargv + 1, argv, VALUE, argc);
1601 }
1602 result = rb_yield_values2(len, nargv);
1603 ALLOCV_END(args);
1604 }
1605 if (UNDEF_P(result)) rb_iter_break();
1606 return Qnil;
1607}
1608
1609static VALUE
1610lazy_init_block_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1611{
1612 rb_block_call(m, id_each, argc-1, argv+1, lazy_init_iterator, val);
1613 return Qnil;
1614}
1615
1616#define memo_value v2
1617#define memo_flags u3.state
1618#define LAZY_MEMO_BREAK 1
1619#define LAZY_MEMO_PACKED 2
1620#define LAZY_MEMO_BREAK_P(memo) ((memo)->memo_flags & LAZY_MEMO_BREAK)
1621#define LAZY_MEMO_PACKED_P(memo) ((memo)->memo_flags & LAZY_MEMO_PACKED)
1622#define LAZY_MEMO_SET_BREAK(memo) ((memo)->memo_flags |= LAZY_MEMO_BREAK)
1623#define LAZY_MEMO_RESET_BREAK(memo) ((memo)->memo_flags &= ~LAZY_MEMO_BREAK)
1624#define LAZY_MEMO_SET_VALUE(memo, value) MEMO_V2_SET(memo, value)
1625#define LAZY_MEMO_SET_PACKED(memo) ((memo)->memo_flags |= LAZY_MEMO_PACKED)
1626#define LAZY_MEMO_RESET_PACKED(memo) ((memo)->memo_flags &= ~LAZY_MEMO_PACKED)
1627
1628#define LAZY_NEED_BLOCK(func) \
1629 if (!rb_block_given_p()) { \
1630 rb_raise(rb_eArgError, "tried to call lazy " #func " without a block"); \
1631 }
1632
1633static VALUE lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i);
1634
1635static VALUE
1636lazy_init_yielder(RB_BLOCK_CALL_FUNC_ARGLIST(_, m))
1637{
1638 VALUE yielder = RARRAY_AREF(m, 0);
1639 VALUE procs_array = RARRAY_AREF(m, 1);
1640 VALUE memos = rb_attr_get(yielder, id_memo);
1641 struct MEMO *result;
1642
1643 result = rb_imemo_memo_new(m, rb_enum_values_pack(argc, argv),
1644 argc > 1 ? LAZY_MEMO_PACKED : 0);
1645 return lazy_yielder_result(result, yielder, procs_array, memos, 0);
1646}
1647
1648static VALUE
1649lazy_yielder_yield(struct MEMO *result, long memo_index, int argc, const VALUE *argv)
1650{
1651 VALUE m = result->v1;
1652 VALUE yielder = RARRAY_AREF(m, 0);
1653 VALUE procs_array = RARRAY_AREF(m, 1);
1654 VALUE memos = rb_attr_get(yielder, id_memo);
1655 LAZY_MEMO_SET_VALUE(result, rb_enum_values_pack(argc, argv));
1656 if (argc > 1)
1657 LAZY_MEMO_SET_PACKED(result);
1658 else
1659 LAZY_MEMO_RESET_PACKED(result);
1660 return lazy_yielder_result(result, yielder, procs_array, memos, memo_index);
1661}
1662
1663static VALUE
1664lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i)
1665{
1666 int cont = 1;
1667
1668 for (; i < RARRAY_LEN(procs_array); i++) {
1669 VALUE proc = RARRAY_AREF(procs_array, i);
1670 struct proc_entry *entry = proc_entry_ptr(proc);
1671 if (!(*entry->fn->proc)(proc, result, memos, i)) {
1672 cont = 0;
1673 break;
1674 }
1675 }
1676
1677 if (cont) {
1678 rb_funcall2(yielder, idLTLT, 1, &(result->memo_value));
1679 }
1680 if (LAZY_MEMO_BREAK_P(result)) {
1681 rb_iter_break();
1682 }
1683 return result->memo_value;
1684}
1685
1686static VALUE
1687lazy_init_block(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1688{
1689 VALUE procs = RARRAY_AREF(m, 1);
1690
1691 rb_ivar_set(val, id_memo, rb_ary_new2(RARRAY_LEN(procs)));
1692 rb_block_call(RARRAY_AREF(m, 0), id_each, 0, 0,
1693 lazy_init_yielder, rb_ary_new3(2, val, procs));
1694 return Qnil;
1695}
1696
1697static VALUE
1698lazy_generator_init(VALUE enumerator, VALUE procs)
1699{
1701 VALUE obj;
1702 struct generator *gen_ptr;
1703 struct enumerator *e = enumerator_ptr(enumerator);
1704
1705 if (RARRAY_LEN(procs) > 0) {
1706 struct generator *old_gen_ptr = generator_ptr(e->obj);
1707 obj = old_gen_ptr->obj;
1708 }
1709 else {
1710 obj = enumerator;
1711 }
1712
1713 generator = generator_allocate(rb_cGenerator);
1714
1715 rb_block_call(generator, id_initialize, 0, 0,
1716 lazy_init_block, rb_ary_new3(2, obj, procs));
1717
1718 gen_ptr = generator_ptr(generator);
1719 RB_OBJ_WRITE(generator, &gen_ptr->obj, obj);
1720
1721 return generator;
1722}
1723
1724static int
1725lazy_precheck(VALUE procs)
1726{
1727 if (RTEST(procs)) {
1728 long num_procs = RARRAY_LEN(procs), i = num_procs;
1729 while (i-- > 0) {
1730 VALUE proc = RARRAY_AREF(procs, i);
1731 struct proc_entry *entry = proc_entry_ptr(proc);
1732 lazyenum_precheck_func *precheck = entry->fn->precheck;
1733 if (precheck && !precheck(proc)) return FALSE;
1734 }
1735 }
1736
1737 return TRUE;
1738}
1739
1740/*
1741 * Document-class: Enumerator::Lazy
1742 *
1743 * Enumerator::Lazy is a special type of Enumerator, that allows constructing
1744 * chains of operations without evaluating them immediately, and evaluating
1745 * values on as-needed basis. In order to do so it redefines most of Enumerable
1746 * methods so that they just construct another lazy enumerator.
1747 *
1748 * Enumerator::Lazy can be constructed from any Enumerable with the
1749 * Enumerable#lazy method.
1750 *
1751 * lazy = (1..Float::INFINITY).lazy.select(&:odd?).drop(10).take_while { |i| i < 30 }
1752 * # => #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:select>:drop(10)>:take_while>
1753 *
1754 * The real enumeration is performed when any non-redefined Enumerable method
1755 * is called, like Enumerable#first or Enumerable#to_a (the latter is aliased
1756 * as #force for more semantic code):
1757 *
1758 * lazy.first(2)
1759 * #=> [21, 23]
1760 *
1761 * lazy.force
1762 * #=> [21, 23, 25, 27, 29]
1763 *
1764 * Note that most Enumerable methods that could be called with or without
1765 * a block, on Enumerator::Lazy will always require a block:
1766 *
1767 * [1, 2, 3].map #=> #<Enumerator: [1, 2, 3]:map>
1768 * [1, 2, 3].lazy.map # ArgumentError: tried to call lazy map without a block
1769 *
1770 * This class allows idiomatic calculations on long or infinite sequences, as well
1771 * as chaining of calculations without constructing intermediate arrays.
1772 *
1773 * Example for working with a slowly calculated sequence:
1774 *
1775 * require 'open-uri'
1776 *
1777 * # This will fetch all URLs before selecting
1778 * # necessary data
1779 * URLS.map { |u| JSON.parse(URI.open(u).read) }
1780 * .select { |data| data.key?('stats') }
1781 * .first(5)
1782 *
1783 * # This will fetch URLs one-by-one, only till
1784 * # there is enough data to satisfy the condition
1785 * URLS.lazy.map { |u| JSON.parse(URI.open(u).read) }
1786 * .select { |data| data.key?('stats') }
1787 * .first(5)
1788 *
1789 * Ending a chain with ".eager" generates a non-lazy enumerator, which
1790 * is suitable for returning or passing to another method that expects
1791 * a normal enumerator.
1792 *
1793 * def active_items
1794 * groups
1795 * .lazy
1796 * .flat_map(&:items)
1797 * .reject(&:disabled)
1798 * .eager
1799 * end
1800 *
1801 * # This works lazily; if a checked item is found, it stops
1802 * # iteration and does not look into remaining groups.
1803 * first_checked = active_items.find(&:checked)
1804 *
1805 * # This returns an array of items like a normal enumerator does.
1806 * all_checked = active_items.select(&:checked)
1807 *
1808 */
1809
1810/*
1811 * call-seq:
1812 * Lazy.new(obj, size=nil) { |yielder, *values| block }
1813 *
1814 * Creates a new Lazy enumerator. When the enumerator is actually enumerated
1815 * (e.g. by calling #force), +obj+ will be enumerated and each value passed
1816 * to the given block. The block can yield values back using +yielder+.
1817 * For example, to create a "filter+map" enumerator:
1818 *
1819 * def filter_map(sequence)
1820 * Lazy.new(sequence) do |yielder, *values|
1821 * result = yield *values
1822 * yielder << result if result
1823 * end
1824 * end
1825 *
1826 * filter_map(1..Float::INFINITY) {|i| i*i if i.even?}.first(5)
1827 * #=> [4, 16, 36, 64, 100]
1828 */
1829static VALUE
1830lazy_initialize(int argc, VALUE *argv, VALUE self)
1831{
1832 VALUE obj, size = Qnil;
1834
1835 rb_check_arity(argc, 1, 2);
1836 LAZY_NEED_BLOCK(new);
1837 obj = argv[0];
1838 if (argc > 1) {
1839 size = argv[1];
1840 }
1841 generator = generator_allocate(rb_cGenerator);
1842 rb_block_call(generator, id_initialize, 0, 0, lazy_init_block_i, obj);
1843 enumerator_init(self, generator, sym_each, 0, 0, 0, size, 0);
1844 rb_ivar_set(self, id_receiver, obj);
1845
1846 return self;
1847}
1848
1849#if 0 /* for RDoc */
1850/*
1851 * call-seq:
1852 * lazy.to_a -> array
1853 * lazy.force -> array
1854 *
1855 * Expands +lazy+ enumerator to an array.
1856 * See Enumerable#to_a.
1857 */
1858static VALUE
1859lazy_to_a(VALUE self)
1860{
1861}
1862#endif
1863
1864static void
1865lazy_set_args(VALUE lazy, VALUE args)
1866{
1867 ID id = rb_frame_this_func();
1868 rb_ivar_set(lazy, id_method, ID2SYM(id));
1869 if (NIL_P(args)) {
1870 /* Qfalse indicates that the arguments are empty */
1871 rb_ivar_set(lazy, id_arguments, Qfalse);
1872 }
1873 else {
1874 rb_ivar_set(lazy, id_arguments, args);
1875 }
1876}
1877
1878#if 0
1879static VALUE
1880lazy_set_method(VALUE lazy, VALUE args, rb_enumerator_size_func *size_fn)
1881{
1882 struct enumerator *e = enumerator_ptr(lazy);
1883 lazy_set_args(lazy, args);
1884 e->size_fn = size_fn;
1885 return lazy;
1886}
1887#endif
1888
1889static VALUE
1890lazy_add_method(VALUE obj, int argc, VALUE *argv, VALUE args, VALUE memo,
1891 const lazyenum_funcs *fn)
1892{
1893 struct enumerator *new_e;
1894 VALUE new_obj;
1895 VALUE new_generator;
1896 VALUE new_procs;
1897 struct enumerator *e = enumerator_ptr(obj);
1898 struct proc_entry *entry;
1900 &proc_entry_data_type, entry);
1901 if (rb_block_given_p()) {
1902 RB_OBJ_WRITE(entry_obj, &entry->proc, rb_block_proc());
1903 }
1904 entry->fn = fn;
1905 RB_OBJ_WRITE(entry_obj, &entry->memo, args);
1906
1907 lazy_set_args(entry_obj, memo);
1908
1909 new_procs = RTEST(e->procs) ? rb_ary_dup(e->procs) : rb_ary_new();
1910 new_generator = lazy_generator_init(obj, new_procs);
1911 rb_ary_push(new_procs, entry_obj);
1912
1913 new_obj = enumerator_init_copy(enumerator_allocate(rb_cLazy), obj);
1914 new_e = RTYPEDDATA_GET_DATA(new_obj);
1915 RB_OBJ_WRITE(new_obj, &new_e->obj, new_generator);
1916 RB_OBJ_WRITE(new_obj, &new_e->procs, new_procs);
1917
1918 if (argc > 0) {
1919 new_e->meth = rb_to_id(*argv++);
1920 --argc;
1921 }
1922 else {
1923 new_e->meth = id_each;
1924 }
1925
1926 RB_OBJ_WRITE(new_obj, &new_e->args, rb_ary_new4(argc, argv));
1927
1928 return new_obj;
1929}
1930
1931/*
1932 * call-seq:
1933 * e.lazy -> lazy_enumerator
1934 *
1935 * Returns an Enumerator::Lazy, which redefines most Enumerable
1936 * methods to postpone enumeration and enumerate values only on an
1937 * as-needed basis.
1938 *
1939 * === Example
1940 *
1941 * The following program finds pythagorean triples:
1942 *
1943 * def pythagorean_triples
1944 * (1..Float::INFINITY).lazy.flat_map {|z|
1945 * (1..z).flat_map {|x|
1946 * (x..z).select {|y|
1947 * x**2 + y**2 == z**2
1948 * }.map {|y|
1949 * [x, y, z]
1950 * }
1951 * }
1952 * }
1953 * end
1954 * # show first ten pythagorean triples
1955 * p pythagorean_triples.take(10).force # take is lazy, so force is needed
1956 * p pythagorean_triples.first(10) # first is eager
1957 * # show pythagorean triples less than 100
1958 * p pythagorean_triples.take_while { |*, z| z < 100 }.force
1959 */
1960static VALUE
1961enumerable_lazy(VALUE obj)
1962{
1963 VALUE result = lazy_to_enum_i(obj, sym_each, 0, 0, lazyenum_size, rb_keyword_given_p());
1964 /* Qfalse indicates that the Enumerator::Lazy has no method name */
1965 rb_ivar_set(result, id_method, Qfalse);
1966 return result;
1967}
1968
1969static VALUE
1970lazy_to_enum_i(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
1971{
1972 return enumerator_init(enumerator_allocate(rb_cLazy),
1973 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
1974}
1975
1976/*
1977 * call-seq:
1978 * lzy.to_enum(method = :each, *args) -> lazy_enum
1979 * lzy.enum_for(method = :each, *args) -> lazy_enum
1980 * lzy.to_enum(method = :each, *args) {|*args| block } -> lazy_enum
1981 * lzy.enum_for(method = :each, *args) {|*args| block } -> lazy_enum
1982 *
1983 * Similar to Object#to_enum, except it returns a lazy enumerator.
1984 * This makes it easy to define Enumerable methods that will
1985 * naturally remain lazy if called from a lazy enumerator.
1986 *
1987 * For example, continuing from the example in Object#to_enum:
1988 *
1989 * # See Object#to_enum for the definition of repeat
1990 * r = 1..Float::INFINITY
1991 * r.repeat(2).first(5) # => [1, 1, 2, 2, 3]
1992 * r.repeat(2).class # => Enumerator
1993 * r.repeat(2).map{|n| n ** 2}.first(5) # => endless loop!
1994 * # works naturally on lazy enumerator:
1995 * r.lazy.repeat(2).class # => Enumerator::Lazy
1996 * r.lazy.repeat(2).map{|n| n ** 2}.first(5) # => [1, 1, 4, 4, 9]
1997 */
1998
1999static VALUE
2000lazy_to_enum(int argc, VALUE *argv, VALUE self)
2001{
2002 VALUE lazy, meth = sym_each, super_meth;
2003
2004 if (argc > 0) {
2005 --argc;
2006 meth = *argv++;
2007 }
2008 if (RTEST((super_meth = rb_hash_aref(lazy_use_super_method, meth)))) {
2009 meth = super_meth;
2010 }
2011 lazy = lazy_to_enum_i(self, meth, argc, argv, 0, rb_keyword_given_p());
2012 if (rb_block_given_p()) {
2013 RB_OBJ_WRITE(lazy, &enumerator_ptr(lazy)->size, rb_block_proc());
2014 }
2015 return lazy;
2016}
2017
2018static VALUE
2019lazy_eager_size(VALUE self, VALUE args, VALUE eobj)
2020{
2021 return enum_size(self);
2022}
2023
2024/*
2025 * call-seq:
2026 * lzy.eager -> enum
2027 *
2028 * Returns a non-lazy Enumerator converted from the lazy enumerator.
2029 */
2030
2031static VALUE
2032lazy_eager(VALUE self)
2033{
2034 return enumerator_init(enumerator_allocate(rb_cEnumerator),
2035 self, sym_each, 0, 0, lazy_eager_size, Qnil, 0);
2036}
2037
2038static VALUE
2039lazyenum_yield(VALUE proc_entry, struct MEMO *result)
2040{
2041 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2042 return rb_proc_call_with_block(entry->proc, 1, &result->memo_value, Qnil);
2043}
2044
2045static VALUE
2046lazyenum_yield_values(VALUE proc_entry, struct MEMO *result)
2047{
2048 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2049 int argc = 1;
2050 const VALUE *argv = &result->memo_value;
2051 if (LAZY_MEMO_PACKED_P(result)) {
2052 const VALUE args = *argv;
2053 argc = RARRAY_LENINT(args);
2054 argv = RARRAY_CONST_PTR(args);
2055 }
2056 return rb_proc_call_with_block(entry->proc, argc, argv, Qnil);
2057}
2058
2059static struct MEMO *
2060lazy_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2061{
2062 VALUE value = lazyenum_yield_values(proc_entry, result);
2063 LAZY_MEMO_SET_VALUE(result, value);
2064 LAZY_MEMO_RESET_PACKED(result);
2065 return result;
2066}
2067
2068static VALUE
2069lazy_map_size(VALUE entry, VALUE receiver)
2070{
2071 return receiver;
2072}
2073
2074static const lazyenum_funcs lazy_map_funcs = {
2075 lazy_map_proc, lazy_map_size,
2076};
2077
2078/*
2079 * call-seq:
2080 * lazy.collect { |obj| block } -> lazy_enumerator
2081 * lazy.map { |obj| block } -> lazy_enumerator
2082 *
2083 * Like Enumerable#map, but chains operation to be lazy-evaluated.
2084 *
2085 * (1..Float::INFINITY).lazy.map {|i| i**2 }
2086 * #=> #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:map>
2087 * (1..Float::INFINITY).lazy.map {|i| i**2 }.first(3)
2088 * #=> [1, 4, 9]
2089 */
2090
2091static VALUE
2092lazy_map(VALUE obj)
2093{
2094 LAZY_NEED_BLOCK(map);
2095 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_map_funcs);
2096}
2097
2099 struct MEMO *result;
2100 long index;
2101};
2102
2103static VALUE
2104lazy_flat_map_i(RB_BLOCK_CALL_FUNC_ARGLIST(i, y))
2105{
2106 struct flat_map_i_arg *arg = (struct flat_map_i_arg *)y;
2107
2108 return lazy_yielder_yield(arg->result, arg->index, argc, argv);
2109}
2110
2111static struct MEMO *
2112lazy_flat_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2113{
2114 VALUE value = lazyenum_yield_values(proc_entry, result);
2115 VALUE ary = 0;
2116 const long proc_index = memo_index + 1;
2117 int break_p = LAZY_MEMO_BREAK_P(result);
2118
2119 if (RB_TYPE_P(value, T_ARRAY)) {
2120 ary = value;
2121 }
2122 else if (rb_respond_to(value, id_force) && rb_respond_to(value, id_each)) {
2123 struct flat_map_i_arg arg = {.result = result, .index = proc_index};
2124 LAZY_MEMO_RESET_BREAK(result);
2125 rb_block_call(value, id_each, 0, 0, lazy_flat_map_i, (VALUE)&arg);
2126 if (break_p) LAZY_MEMO_SET_BREAK(result);
2127 return 0;
2128 }
2129
2130 if (ary || !NIL_P(ary = rb_check_array_type(value))) {
2131 long i;
2132 LAZY_MEMO_RESET_BREAK(result);
2133 for (i = 0; i + 1 < RARRAY_LEN(ary); i++) {
2134 const VALUE argv = RARRAY_AREF(ary, i);
2135 lazy_yielder_yield(result, proc_index, 1, &argv);
2136 }
2137 if (break_p) LAZY_MEMO_SET_BREAK(result);
2138 if (i >= RARRAY_LEN(ary)) return 0;
2139 value = RARRAY_AREF(ary, i);
2140 }
2141 LAZY_MEMO_SET_VALUE(result, value);
2142 LAZY_MEMO_RESET_PACKED(result);
2143 return result;
2144}
2145
2146static const lazyenum_funcs lazy_flat_map_funcs = {
2147 lazy_flat_map_proc, 0,
2148};
2149
2150/*
2151 * call-seq:
2152 * lazy.collect_concat { |obj| block } -> a_lazy_enumerator
2153 * lazy.flat_map { |obj| block } -> a_lazy_enumerator
2154 *
2155 * Returns a new lazy enumerator with the concatenated results of running
2156 * +block+ once for every element in the lazy enumerator.
2157 *
2158 * ["foo", "bar"].lazy.flat_map {|i| i.each_char.lazy}.force
2159 * #=> ["f", "o", "o", "b", "a", "r"]
2160 *
2161 * A value +x+ returned by +block+ is decomposed if either of
2162 * the following conditions is true:
2163 *
2164 * * +x+ responds to both each and force, which means that
2165 * +x+ is a lazy enumerator.
2166 * * +x+ is an array or responds to to_ary.
2167 *
2168 * Otherwise, +x+ is contained as-is in the return value.
2169 *
2170 * [{a:1}, {b:2}].lazy.flat_map {|i| i}.force
2171 * #=> [{:a=>1}, {:b=>2}]
2172 */
2173static VALUE
2174lazy_flat_map(VALUE obj)
2175{
2176 LAZY_NEED_BLOCK(flat_map);
2177 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_flat_map_funcs);
2178}
2179
2180static struct MEMO *
2181lazy_select_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2182{
2183 VALUE chain = lazyenum_yield(proc_entry, result);
2184 if (!RTEST(chain)) return 0;
2185 return result;
2186}
2187
2188static const lazyenum_funcs lazy_select_funcs = {
2189 lazy_select_proc, 0,
2190};
2191
2192/*
2193 * call-seq:
2194 * lazy.find_all { |obj| block } -> lazy_enumerator
2195 * lazy.select { |obj| block } -> lazy_enumerator
2196 * lazy.filter { |obj| block } -> lazy_enumerator
2197 *
2198 * Like Enumerable#select, but chains operation to be lazy-evaluated.
2199 */
2200static VALUE
2201lazy_select(VALUE obj)
2202{
2203 LAZY_NEED_BLOCK(select);
2204 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_select_funcs);
2205}
2206
2207static struct MEMO *
2208lazy_filter_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2209{
2210 VALUE value = lazyenum_yield_values(proc_entry, result);
2211 if (!RTEST(value)) return 0;
2212 LAZY_MEMO_SET_VALUE(result, value);
2213 LAZY_MEMO_RESET_PACKED(result);
2214 return result;
2215}
2216
2217static const lazyenum_funcs lazy_filter_map_funcs = {
2218 lazy_filter_map_proc, 0,
2219};
2220
2221/*
2222 * call-seq:
2223 * lazy.filter_map { |obj| block } -> lazy_enumerator
2224 *
2225 * Like Enumerable#filter_map, but chains operation to be lazy-evaluated.
2226 *
2227 * (1..).lazy.filter_map { |i| i * 2 if i.even? }.first(5)
2228 * #=> [4, 8, 12, 16, 20]
2229 */
2230
2231static VALUE
2232lazy_filter_map(VALUE obj)
2233{
2234 LAZY_NEED_BLOCK(filter_map);
2235 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_filter_map_funcs);
2236}
2237
2238static struct MEMO *
2239lazy_reject_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2240{
2241 VALUE chain = lazyenum_yield(proc_entry, result);
2242 if (RTEST(chain)) return 0;
2243 return result;
2244}
2245
2246static const lazyenum_funcs lazy_reject_funcs = {
2247 lazy_reject_proc, 0,
2248};
2249
2250/*
2251 * call-seq:
2252 * lazy.reject { |obj| block } -> lazy_enumerator
2253 *
2254 * Like Enumerable#reject, but chains operation to be lazy-evaluated.
2255 */
2256
2257static VALUE
2258lazy_reject(VALUE obj)
2259{
2260 LAZY_NEED_BLOCK(reject);
2261 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_reject_funcs);
2262}
2263
2264static struct MEMO *
2265lazy_grep_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2266{
2267 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2268 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2269 if (!RTEST(chain)) return 0;
2270 return result;
2271}
2272
2273static struct MEMO *
2274lazy_grep_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2275{
2276 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2277 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2278
2279 if (!RTEST(chain)) return 0;
2280 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2281 LAZY_MEMO_SET_VALUE(result, value);
2282 LAZY_MEMO_RESET_PACKED(result);
2283
2284 return result;
2285}
2286
2287static const lazyenum_funcs lazy_grep_iter_funcs = {
2288 lazy_grep_iter_proc, 0,
2289};
2290
2291static const lazyenum_funcs lazy_grep_funcs = {
2292 lazy_grep_proc, 0,
2293};
2294
2295/*
2296 * call-seq:
2297 * lazy.grep(pattern) -> lazy_enumerator
2298 * lazy.grep(pattern) { |obj| block } -> lazy_enumerator
2299 *
2300 * Like Enumerable#grep, but chains operation to be lazy-evaluated.
2301 */
2302
2303static VALUE
2304lazy_grep(VALUE obj, VALUE pattern)
2305{
2306 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2307 &lazy_grep_iter_funcs : &lazy_grep_funcs;
2308 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2309}
2310
2311static struct MEMO *
2312lazy_grep_v_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2313{
2314 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2315 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2316 if (RTEST(chain)) return 0;
2317 return result;
2318}
2319
2320static struct MEMO *
2321lazy_grep_v_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2322{
2323 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2324 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2325
2326 if (RTEST(chain)) return 0;
2327 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2328 LAZY_MEMO_SET_VALUE(result, value);
2329 LAZY_MEMO_RESET_PACKED(result);
2330
2331 return result;
2332}
2333
2334static const lazyenum_funcs lazy_grep_v_iter_funcs = {
2335 lazy_grep_v_iter_proc, 0,
2336};
2337
2338static const lazyenum_funcs lazy_grep_v_funcs = {
2339 lazy_grep_v_proc, 0,
2340};
2341
2342/*
2343 * call-seq:
2344 * lazy.grep_v(pattern) -> lazy_enumerator
2345 * lazy.grep_v(pattern) { |obj| block } -> lazy_enumerator
2346 *
2347 * Like Enumerable#grep_v, but chains operation to be lazy-evaluated.
2348 */
2349
2350static VALUE
2351lazy_grep_v(VALUE obj, VALUE pattern)
2352{
2353 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2354 &lazy_grep_v_iter_funcs : &lazy_grep_v_funcs;
2355 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2356}
2357
2358static VALUE
2359call_next(VALUE obj)
2360{
2361 return rb_funcall(obj, id_next, 0);
2362}
2363
2364static VALUE
2365next_stopped(VALUE obj, VALUE _)
2366{
2367 return Qnil;
2368}
2369
2370static struct MEMO *
2371lazy_zip_arrays_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2372{
2373 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2374 VALUE ary, arrays = entry->memo;
2375 VALUE memo = rb_ary_entry(memos, memo_index);
2376 long i, count = NIL_P(memo) ? 0 : NUM2LONG(memo);
2377
2378 ary = rb_ary_new2(RARRAY_LEN(arrays) + 1);
2379 rb_ary_push(ary, result->memo_value);
2380 for (i = 0; i < RARRAY_LEN(arrays); i++) {
2381 rb_ary_push(ary, rb_ary_entry(RARRAY_AREF(arrays, i), count));
2382 }
2383 LAZY_MEMO_SET_VALUE(result, ary);
2384 rb_ary_store(memos, memo_index, LONG2NUM(++count));
2385 return result;
2386}
2387
2388static struct MEMO *
2389lazy_zip_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2390{
2391 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2392 VALUE arg = rb_ary_entry(memos, memo_index);
2393 VALUE zip_args = entry->memo;
2394 VALUE ary, v;
2395 long i;
2396
2397 if (NIL_P(arg)) {
2398 arg = rb_ary_new2(RARRAY_LEN(zip_args));
2399 for (i = 0; i < RARRAY_LEN(zip_args); i++) {
2400 rb_ary_push(arg, rb_funcall(RARRAY_AREF(zip_args, i), id_to_enum, 0));
2401 }
2402 rb_ary_store(memos, memo_index, arg);
2403 }
2404
2405 ary = rb_ary_new2(RARRAY_LEN(arg) + 1);
2406 rb_ary_push(ary, result->memo_value);
2407 for (i = 0; i < RARRAY_LEN(arg); i++) {
2408 v = rb_rescue2(call_next, RARRAY_AREF(arg, i), next_stopped, 0,
2410 rb_ary_push(ary, v);
2411 }
2412 LAZY_MEMO_SET_VALUE(result, ary);
2413 return result;
2414}
2415
2416static const lazyenum_funcs lazy_zip_funcs[] = {
2417 {lazy_zip_func, lazy_receiver_size,},
2418 {lazy_zip_arrays_func, lazy_receiver_size,},
2419};
2420
2421/*
2422 * call-seq:
2423 * lazy.zip(arg, ...) -> lazy_enumerator
2424 * lazy.zip(arg, ...) { |arr| block } -> nil
2425 *
2426 * Like Enumerable#zip, but chains operation to be lazy-evaluated.
2427 * However, if a block is given to zip, values are enumerated immediately.
2428 */
2429static VALUE
2430lazy_zip(int argc, VALUE *argv, VALUE obj)
2431{
2432 VALUE ary, v;
2433 long i;
2434 const lazyenum_funcs *funcs = &lazy_zip_funcs[1];
2435
2436 if (rb_block_given_p()) {
2437 return rb_call_super(argc, argv);
2438 }
2439
2440 ary = rb_ary_new2(argc);
2441 for (i = 0; i < argc; i++) {
2442 v = rb_check_array_type(argv[i]);
2443 if (NIL_P(v)) {
2444 for (; i < argc; i++) {
2445 if (!rb_respond_to(argv[i], id_each)) {
2446 rb_raise(rb_eTypeError, "wrong argument type %"PRIsVALUE" (must respond to :each)",
2447 rb_obj_class(argv[i]));
2448 }
2449 }
2450 ary = rb_ary_new4(argc, argv);
2451 funcs = &lazy_zip_funcs[0];
2452 break;
2453 }
2454 rb_ary_push(ary, v);
2455 }
2456
2457 return lazy_add_method(obj, 0, 0, ary, ary, funcs);
2458}
2459
2460static struct MEMO *
2461lazy_take_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2462{
2463 long remain;
2464 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2465 VALUE memo = rb_ary_entry(memos, memo_index);
2466
2467 if (NIL_P(memo)) {
2468 memo = entry->memo;
2469 }
2470
2471 remain = NUM2LONG(memo);
2472 if (--remain == 0) LAZY_MEMO_SET_BREAK(result);
2473 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2474 return result;
2475}
2476
2477static VALUE
2478lazy_take_size(VALUE entry, VALUE receiver)
2479{
2480 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(entry, id_arguments), 0));
2481 if (NIL_P(receiver) || (FIXNUM_P(receiver) && FIX2LONG(receiver) < len))
2482 return receiver;
2483 return LONG2NUM(len);
2484}
2485
2486static int
2487lazy_take_precheck(VALUE proc_entry)
2488{
2489 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2490 return entry->memo != INT2FIX(0);
2491}
2492
2493static const lazyenum_funcs lazy_take_funcs = {
2494 lazy_take_proc, lazy_take_size, lazy_take_precheck,
2495};
2496
2497/*
2498 * call-seq:
2499 * lazy.take(n) -> lazy_enumerator
2500 *
2501 * Like Enumerable#take, but chains operation to be lazy-evaluated.
2502 */
2503
2504static VALUE
2505lazy_take(VALUE obj, VALUE n)
2506{
2507 long len = NUM2LONG(n);
2508
2509 if (len < 0) {
2510 rb_raise(rb_eArgError, "attempt to take negative size");
2511 }
2512
2513 n = LONG2NUM(len); /* no more conversion */
2514
2515 return lazy_add_method(obj, 0, 0, n, rb_ary_new3(1, n), &lazy_take_funcs);
2516}
2517
2518static struct MEMO *
2519lazy_take_while_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2520{
2521 VALUE take = lazyenum_yield_values(proc_entry, result);
2522 if (!RTEST(take)) {
2523 LAZY_MEMO_SET_BREAK(result);
2524 return 0;
2525 }
2526 return result;
2527}
2528
2529static const lazyenum_funcs lazy_take_while_funcs = {
2530 lazy_take_while_proc, 0,
2531};
2532
2533/*
2534 * call-seq:
2535 * lazy.take_while { |obj| block } -> lazy_enumerator
2536 *
2537 * Like Enumerable#take_while, but chains operation to be lazy-evaluated.
2538 */
2539
2540static VALUE
2541lazy_take_while(VALUE obj)
2542{
2543 LAZY_NEED_BLOCK(take_while);
2544 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_take_while_funcs);
2545}
2546
2547static VALUE
2548lazy_drop_size(VALUE proc_entry, VALUE receiver)
2549{
2550 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(proc_entry, id_arguments), 0));
2551 if (NIL_P(receiver))
2552 return receiver;
2553 if (FIXNUM_P(receiver)) {
2554 len = FIX2LONG(receiver) - len;
2555 return LONG2FIX(len < 0 ? 0 : len);
2556 }
2557 return rb_funcall(receiver, '-', 1, LONG2NUM(len));
2558}
2559
2560static struct MEMO *
2561lazy_drop_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2562{
2563 long remain;
2564 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2565 VALUE memo = rb_ary_entry(memos, memo_index);
2566
2567 if (NIL_P(memo)) {
2568 memo = entry->memo;
2569 }
2570 remain = NUM2LONG(memo);
2571 if (remain > 0) {
2572 --remain;
2573 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2574 return 0;
2575 }
2576
2577 return result;
2578}
2579
2580static const lazyenum_funcs lazy_drop_funcs = {
2581 lazy_drop_proc, lazy_drop_size,
2582};
2583
2584/*
2585 * call-seq:
2586 * lazy.drop(n) -> lazy_enumerator
2587 *
2588 * Like Enumerable#drop, but chains operation to be lazy-evaluated.
2589 */
2590
2591static VALUE
2592lazy_drop(VALUE obj, VALUE n)
2593{
2594 long len = NUM2LONG(n);
2595 VALUE argv[2];
2596 argv[0] = sym_each;
2597 argv[1] = n;
2598
2599 if (len < 0) {
2600 rb_raise(rb_eArgError, "attempt to drop negative size");
2601 }
2602
2603 return lazy_add_method(obj, 2, argv, n, rb_ary_new3(1, n), &lazy_drop_funcs);
2604}
2605
2606static struct MEMO *
2607lazy_drop_while_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2608{
2609 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2610 VALUE memo = rb_ary_entry(memos, memo_index);
2611
2612 if (NIL_P(memo)) {
2613 memo = entry->memo;
2614 }
2615
2616 if (!RTEST(memo)) {
2617 VALUE drop = lazyenum_yield_values(proc_entry, result);
2618 if (RTEST(drop)) return 0;
2619 rb_ary_store(memos, memo_index, Qtrue);
2620 }
2621 return result;
2622}
2623
2624static const lazyenum_funcs lazy_drop_while_funcs = {
2625 lazy_drop_while_proc, 0,
2626};
2627
2628/*
2629 * call-seq:
2630 * lazy.drop_while { |obj| block } -> lazy_enumerator
2631 *
2632 * Like Enumerable#drop_while, but chains operation to be lazy-evaluated.
2633 */
2634
2635static VALUE
2636lazy_drop_while(VALUE obj)
2637{
2638 LAZY_NEED_BLOCK(drop_while);
2639 return lazy_add_method(obj, 0, 0, Qfalse, Qnil, &lazy_drop_while_funcs);
2640}
2641
2642static int
2643lazy_uniq_check(VALUE chain, VALUE memos, long memo_index)
2644{
2645 VALUE hash = rb_ary_entry(memos, memo_index);
2646
2647 if (NIL_P(hash)) {
2648 hash = rb_obj_hide(rb_hash_new());
2649 rb_ary_store(memos, memo_index, hash);
2650 }
2651
2652 return rb_hash_add_new_element(hash, chain, Qfalse);
2653}
2654
2655static struct MEMO *
2656lazy_uniq_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2657{
2658 if (lazy_uniq_check(result->memo_value, memos, memo_index)) return 0;
2659 return result;
2660}
2661
2662static struct MEMO *
2663lazy_uniq_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2664{
2665 VALUE chain = lazyenum_yield(proc_entry, result);
2666
2667 if (lazy_uniq_check(chain, memos, memo_index)) return 0;
2668 return result;
2669}
2670
2671static const lazyenum_funcs lazy_uniq_iter_funcs = {
2672 lazy_uniq_iter_proc, 0,
2673};
2674
2675static const lazyenum_funcs lazy_uniq_funcs = {
2676 lazy_uniq_proc, 0,
2677};
2678
2679/*
2680 * call-seq:
2681 * lazy.uniq -> lazy_enumerator
2682 * lazy.uniq { |item| block } -> lazy_enumerator
2683 *
2684 * Like Enumerable#uniq, but chains operation to be lazy-evaluated.
2685 */
2686
2687static VALUE
2688lazy_uniq(VALUE obj)
2689{
2690 const lazyenum_funcs *const funcs =
2691 rb_block_given_p() ? &lazy_uniq_iter_funcs : &lazy_uniq_funcs;
2692 return lazy_add_method(obj, 0, 0, Qnil, Qnil, funcs);
2693}
2694
2695static struct MEMO *
2696lazy_compact_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2697{
2698 if (NIL_P(result->memo_value)) return 0;
2699 return result;
2700}
2701
2702static const lazyenum_funcs lazy_compact_funcs = {
2703 lazy_compact_proc, 0,
2704};
2705
2706/*
2707 * call-seq:
2708 * lazy.compact -> lazy_enumerator
2709 *
2710 * Like Enumerable#compact, but chains operation to be lazy-evaluated.
2711 */
2712
2713static VALUE
2714lazy_compact(VALUE obj)
2715{
2716 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_compact_funcs);
2717}
2718
2719static struct MEMO *
2720lazy_with_index_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2721{
2722 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2723 VALUE memo = rb_ary_entry(memos, memo_index);
2724 VALUE argv[2];
2725
2726 if (NIL_P(memo)) {
2727 memo = entry->memo;
2728 }
2729
2730 argv[0] = result->memo_value;
2731 argv[1] = memo;
2732 if (entry->proc) {
2733 rb_proc_call_with_block(entry->proc, 2, argv, Qnil);
2734 LAZY_MEMO_RESET_PACKED(result);
2735 }
2736 else {
2737 LAZY_MEMO_SET_VALUE(result, rb_ary_new_from_values(2, argv));
2738 LAZY_MEMO_SET_PACKED(result);
2739 }
2740 rb_ary_store(memos, memo_index, LONG2NUM(NUM2LONG(memo) + 1));
2741 return result;
2742}
2743
2744static VALUE
2745lazy_with_index_size(VALUE proc, VALUE receiver)
2746{
2747 return receiver;
2748}
2749
2750static const lazyenum_funcs lazy_with_index_funcs = {
2751 lazy_with_index_proc, lazy_with_index_size,
2752};
2753
2754/*
2755 * call-seq:
2756 * lazy.with_index(offset = 0) {|(*args), idx| block }
2757 * lazy.with_index(offset = 0)
2758 *
2759 * If a block is given, returns a lazy enumerator that will
2760 * iterate over the given block for each element
2761 * with an index, which starts from +offset+, and returns a
2762 * lazy enumerator that yields the same values (without the index).
2763 *
2764 * If a block is not given, returns a new lazy enumerator that
2765 * includes the index, starting from +offset+.
2766 *
2767 * +offset+:: the starting index to use
2768 *
2769 * See Enumerator#with_index.
2770 */
2771static VALUE
2772lazy_with_index(int argc, VALUE *argv, VALUE obj)
2773{
2774 VALUE memo;
2775
2776 rb_scan_args(argc, argv, "01", &memo);
2777 if (NIL_P(memo))
2778 memo = LONG2NUM(0);
2779
2780 return lazy_add_method(obj, 0, 0, memo, rb_ary_new_from_values(1, &memo), &lazy_with_index_funcs);
2781}
2782
2783static struct MEMO *
2784lazy_tap_each_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2785{
2786 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2787
2788 rb_proc_call_with_block(entry->proc, 1, &result->memo_value, Qnil);
2789
2790 return result;
2791}
2792
2793static const lazyenum_funcs lazy_tap_each_funcs = {
2794 lazy_tap_each_proc, 0,
2795};
2796
2797/*
2798 * call-seq:
2799 * lazy.tap_each { |item| ... } -> lazy_enumerator
2800 *
2801 * Passes each element through to the block for side effects only,
2802 * without modifying the element or affecting the enumeration.
2803 * Returns a new lazy enumerator.
2804 *
2805 * This is useful for debugging or logging inside lazy chains,
2806 * without breaking laziness or misusing +map+.
2807 *
2808 * (1..).lazy
2809 * .tap_each { |x| puts "got #{x}" }
2810 * .select(&:even?)
2811 * .first(3)
2812 * # prints: got 1, got 2, ..., got 6
2813 * # returns: [2, 4, 6]
2814 *
2815 * Similar in intent to Java's Stream#peek.
2816 */
2817
2818static VALUE
2819lazy_tap_each(VALUE obj)
2820{
2821 if (!rb_block_given_p())
2822 {
2823 rb_raise(rb_eArgError, "tried to call lazy tap_each without a block");
2824 }
2825
2826 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_tap_each_funcs);
2827}
2828
2829#if 0 /* for RDoc */
2830
2831/*
2832 * call-seq:
2833 * lazy.chunk { |elt| ... } -> lazy_enumerator
2834 *
2835 * Like Enumerable#chunk, but chains operation to be lazy-evaluated.
2836 */
2837static VALUE
2838lazy_chunk(VALUE self)
2839{
2840}
2841
2842/*
2843 * call-seq:
2844 * lazy.chunk_while {|elt_before, elt_after| bool } -> lazy_enumerator
2845 *
2846 * Like Enumerable#chunk_while, but chains operation to be lazy-evaluated.
2847 */
2848static VALUE
2849lazy_chunk_while(VALUE self)
2850{
2851}
2852
2853/*
2854 * call-seq:
2855 * lazy.slice_after(pattern) -> lazy_enumerator
2856 * lazy.slice_after { |elt| bool } -> lazy_enumerator
2857 *
2858 * Like Enumerable#slice_after, but chains operation to be lazy-evaluated.
2859 */
2860static VALUE
2861lazy_slice_after(VALUE self)
2862{
2863}
2864
2865/*
2866 * call-seq:
2867 * lazy.slice_before(pattern) -> lazy_enumerator
2868 * lazy.slice_before { |elt| bool } -> lazy_enumerator
2869 *
2870 * Like Enumerable#slice_before, but chains operation to be lazy-evaluated.
2871 */
2872static VALUE
2873lazy_slice_before(VALUE self)
2874{
2875}
2876
2877/*
2878 * call-seq:
2879 * lazy.slice_when {|elt_before, elt_after| bool } -> lazy_enumerator
2880 *
2881 * Like Enumerable#slice_when, but chains operation to be lazy-evaluated.
2882 */
2883static VALUE
2884lazy_slice_when(VALUE self)
2885{
2886}
2887# endif
2888
2889static VALUE
2890lazy_super(int argc, VALUE *argv, VALUE lazy)
2891{
2892 return enumerable_lazy(rb_call_super(argc, argv));
2893}
2894
2895/*
2896 * call-seq:
2897 * enum.lazy -> lazy_enumerator
2898 *
2899 * Returns self.
2900 */
2901
2902static VALUE
2903lazy_lazy(VALUE obj)
2904{
2905 return obj;
2906}
2907
2908/*
2909 * Document-class: StopIteration
2910 *
2911 * Raised to stop the iteration, in particular by Enumerator#next. It is
2912 * rescued by Kernel#loop.
2913 *
2914 * loop do
2915 * puts "Hello"
2916 * raise StopIteration
2917 * puts "World"
2918 * end
2919 * puts "Done!"
2920 *
2921 * <em>produces:</em>
2922 *
2923 * Hello
2924 * Done!
2925 */
2926
2927/*
2928 * call-seq:
2929 * result -> value
2930 *
2931 * Returns the return value of the iterator.
2932 *
2933 * o = Object.new
2934 * def o.each
2935 * yield 1
2936 * yield 2
2937 * yield 3
2938 * 100
2939 * end
2940 *
2941 * e = o.to_enum
2942 *
2943 * puts e.next #=> 1
2944 * puts e.next #=> 2
2945 * puts e.next #=> 3
2946 *
2947 * begin
2948 * e.next
2949 * rescue StopIteration => ex
2950 * puts ex.result #=> 100
2951 * end
2952 *
2953 */
2954
2955static VALUE
2956stop_result(VALUE self)
2957{
2958 return rb_attr_get(self, id_result);
2959}
2960
2961/*
2962 * Producer
2963 */
2964
2965static void
2966producer_mark_and_move(void *p)
2967{
2968 struct producer *ptr = p;
2969 rb_gc_mark_and_move(&ptr->init);
2970 rb_gc_mark_and_move(&ptr->proc);
2971 rb_gc_mark_and_move(&ptr->size);
2972}
2973
2974#define producer_free RUBY_TYPED_DEFAULT_FREE
2975
2976static size_t
2977producer_memsize(const void *p)
2978{
2979 return sizeof(struct producer);
2980}
2981
2982static const rb_data_type_t producer_data_type = {
2983 "producer",
2984 {
2985 producer_mark_and_move,
2986 producer_free,
2987 producer_memsize,
2988 producer_mark_and_move,
2989 },
2990 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
2991};
2992
2993static struct producer *
2994producer_ptr(VALUE obj)
2995{
2996 struct producer *ptr;
2997
2998 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
2999 if (!ptr || UNDEF_P(ptr->proc)) {
3000 rb_raise(rb_eArgError, "uninitialized producer");
3001 }
3002 return ptr;
3003}
3004
3005/* :nodoc: */
3006static VALUE
3007producer_allocate(VALUE klass)
3008{
3009 struct producer *ptr;
3010 VALUE obj;
3011
3012 obj = TypedData_Make_Struct(klass, struct producer, &producer_data_type, ptr);
3013 ptr->init = Qundef;
3014 ptr->proc = Qundef;
3015 ptr->size = Qnil;
3016
3017 return obj;
3018}
3019
3020static VALUE
3021producer_init(VALUE obj, VALUE init, VALUE proc, VALUE size)
3022{
3023 struct producer *ptr;
3024
3025 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
3026
3027 if (!ptr) {
3028 rb_raise(rb_eArgError, "unallocated producer");
3029 }
3030
3031 RB_OBJ_WRITE(obj, &ptr->init, init);
3032 RB_OBJ_WRITE(obj, &ptr->proc, proc);
3033 RB_OBJ_WRITE(obj, &ptr->size, size);
3034
3035 return obj;
3036}
3037
3038static VALUE
3039producer_each_stop(VALUE dummy, VALUE exc)
3040{
3041 return rb_attr_get(exc, id_result);
3042}
3043
3044NORETURN(static VALUE producer_each_i(VALUE obj));
3045
3046static VALUE
3047producer_each_i(VALUE obj)
3048{
3049 struct producer *ptr;
3050 VALUE init, proc, curr;
3051
3052 ptr = producer_ptr(obj);
3053 init = ptr->init;
3054 proc = ptr->proc;
3055
3056 if (UNDEF_P(init)) {
3057 curr = Qnil;
3058 }
3059 else {
3060 rb_yield(init);
3061 curr = init;
3062 }
3063
3064 for (;;) {
3065 curr = rb_funcall(proc, id_call, 1, curr);
3066 rb_yield(curr);
3067 }
3068
3070}
3071
3072/* :nodoc: */
3073static VALUE
3074producer_each(VALUE obj)
3075{
3076 rb_need_block();
3077
3078 return rb_rescue2(producer_each_i, obj, producer_each_stop, (VALUE)0, rb_eStopIteration, (VALUE)0);
3079}
3080
3081static VALUE
3082producer_size(VALUE obj, VALUE args, VALUE eobj)
3083{
3084 struct producer *ptr = producer_ptr(obj);
3085 VALUE size = ptr->size;
3086
3087 if (NIL_P(size)) return Qnil;
3088 if (RB_INTEGER_TYPE_P(size) || RB_FLOAT_TYPE_P(size)) return size;
3089
3090 return rb_funcall(size, id_call, 0);
3091}
3092
3093/*
3094 * call-seq:
3095 * Enumerator.produce(initial = nil, size: nil) { |prev| block } -> enumerator
3096 *
3097 * Creates an infinite enumerator from any block, just called over and
3098 * over. The result of the previous iteration is passed to the next one.
3099 * If +initial+ is provided, it is passed to the first iteration, and
3100 * becomes the first element of the enumerator; if it is not provided,
3101 * the first iteration receives +nil+, and its result becomes the first
3102 * element of the iterator.
3103 *
3104 * Raising StopIteration from the block stops an iteration.
3105 *
3106 * Enumerator.produce(1, &:succ) # => enumerator of 1, 2, 3, 4, ....
3107 *
3108 * Enumerator.produce { rand(10) } # => infinite random number sequence
3109 *
3110 * ancestors = Enumerator.produce(node) { |prev| node = prev.parent or raise StopIteration }
3111 * enclosing_section = ancestors.find { |n| n.type == :section }
3112 *
3113 * Using ::produce together with Enumerable methods like Enumerable#detect,
3114 * Enumerable#slice_after, Enumerable#take_while can provide Enumerator-based alternatives
3115 * for +while+ and +until+ cycles:
3116 *
3117 * # Find next Tuesday
3118 * require "date"
3119 * Enumerator.produce(Date.today, &:succ).detect(&:tuesday?)
3120 *
3121 * # Simple lexer:
3122 * require "strscan"
3123 * scanner = StringScanner.new("7+38/6")
3124 * PATTERN = %r{\d+|[-/+*]}
3125 * Enumerator.produce { scanner.scan(PATTERN) }.slice_after { scanner.eos? }.first
3126 * # => ["7", "+", "38", "/", "6"]
3127 *
3128 * The optional +size+ keyword argument specifies the size of the enumerator,
3129 * which can be retrieved by Enumerator#size. It can be an integer,
3130 * +Float::INFINITY+, a callable object (such as a lambda), or +nil+ to
3131 * indicate unknown size. When not specified, the size defaults to
3132 * +Float::INFINITY+.
3133 *
3134 * # Infinite enumerator
3135 * enum = Enumerator.produce(1, size: Float::INFINITY, &:succ)
3136 * enum.size # => Float::INFINITY
3137 *
3138 * # Finite enumerator with known/computable size
3139 * abs_dir = File.expand_path("./baz") # => "/foo/bar/baz"
3140 * traverser = Enumerator.produce(abs_dir, size: -> { abs_dir.count("/") + 1 }) {
3141 * raise StopIteration if it == "/"
3142 * File.dirname(it)
3143 * }
3144 * traverser.size # => 4
3145 *
3146 * # Finite enumerator with unknown size
3147 * calendar = Enumerator.produce(Date.today, size: nil) {
3148 * it.monday? ? raise(StopIteration) : it + 1
3149 * }
3150 * calendar.size # => nil
3151 */
3152static VALUE
3153enumerator_s_produce(int argc, VALUE *argv, VALUE klass)
3154{
3155 VALUE init, producer, opts, size;
3156 ID keyword_ids[1];
3157
3158 if (!rb_block_given_p()) rb_raise(rb_eArgError, "no block given");
3159
3160 keyword_ids[0] = rb_intern("size");
3161 rb_scan_args_kw(RB_SCAN_ARGS_LAST_HASH_KEYWORDS, argc, argv, "01:", &init, &opts);
3162 rb_get_kwargs(opts, keyword_ids, 0, 1, &size);
3163
3164 size = UNDEF_P(size) ? DBL2NUM(HUGE_VAL) : convert_to_feasible_size_value(size);
3165
3166 if (argc == 0 || (argc == 1 && !NIL_P(opts))) {
3167 init = Qundef;
3168 }
3169
3170 producer = producer_init(producer_allocate(rb_cEnumProducer), init, rb_block_proc(), size);
3171
3172 return rb_enumeratorize_with_size_kw(producer, sym_each, 0, 0, producer_size, RB_NO_KEYWORDS);
3173}
3174
3175/*
3176 * Document-class: Enumerator::Chain
3177 *
3178 * Enumerator::Chain is a subclass of Enumerator, which represents a
3179 * chain of enumerables that works as a single enumerator.
3180 *
3181 * This type of objects can be created by Enumerable#chain and
3182 * Enumerator#+.
3183 */
3184
3185static void
3186enum_chain_mark_and_move(void *p)
3187{
3188 struct enum_chain *ptr = p;
3189 rb_gc_mark_and_move(&ptr->enums);
3190}
3191
3192#define enum_chain_free RUBY_TYPED_DEFAULT_FREE
3193
3194static size_t
3195enum_chain_memsize(const void *p)
3196{
3197 return sizeof(struct enum_chain);
3198}
3199
3200static const rb_data_type_t enum_chain_data_type = {
3201 "chain",
3202 {
3203 enum_chain_mark_and_move,
3204 enum_chain_free,
3205 enum_chain_memsize,
3206 enum_chain_mark_and_move,
3207 },
3208 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED
3209};
3210
3211static struct enum_chain *
3212enum_chain_ptr(VALUE obj)
3213{
3214 struct enum_chain *ptr;
3215
3216 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3217 if (!ptr || UNDEF_P(ptr->enums)) {
3218 rb_raise(rb_eArgError, "uninitialized chain");
3219 }
3220 return ptr;
3221}
3222
3223/* :nodoc: */
3224static VALUE
3225enum_chain_allocate(VALUE klass)
3226{
3227 struct enum_chain *ptr;
3228 VALUE obj;
3229
3230 obj = TypedData_Make_Struct(klass, struct enum_chain, &enum_chain_data_type, ptr);
3231 ptr->enums = Qundef;
3232 ptr->pos = -1;
3233
3234 return obj;
3235}
3236
3237/*
3238 * call-seq:
3239 * Enumerator::Chain.new(*enums) -> enum
3240 *
3241 * Generates a new enumerator object that iterates over the elements
3242 * of given enumerable objects in sequence.
3243 *
3244 * e = Enumerator::Chain.new(1..3, [4, 5])
3245 * e.to_a #=> [1, 2, 3, 4, 5]
3246 * e.size #=> 5
3247 */
3248static VALUE
3249enum_chain_initialize(VALUE obj, VALUE enums)
3250{
3251 struct enum_chain *ptr;
3252
3253 rb_check_frozen(obj);
3254 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3255
3256 if (!ptr) rb_raise(rb_eArgError, "unallocated chain");
3257
3258 RB_OBJ_WRITE(obj, &ptr->enums, rb_ary_freeze(enums));
3259 ptr->pos = -1;
3260
3261 return obj;
3262}
3263
3264static VALUE
3265new_enum_chain(VALUE enums)
3266{
3267 long i;
3268 VALUE obj = enum_chain_initialize(enum_chain_allocate(rb_cEnumChain), enums);
3269
3270 for (i = 0; i < RARRAY_LEN(enums); i++) {
3271 if (RTEST(rb_obj_is_kind_of(RARRAY_AREF(enums, i), rb_cLazy))) {
3272 return enumerable_lazy(obj);
3273 }
3274 }
3275
3276 return obj;
3277}
3278
3279/* :nodoc: */
3280static VALUE
3281enum_chain_init_copy(VALUE obj, VALUE orig)
3282{
3283 struct enum_chain *ptr0, *ptr1;
3284
3285 if (!OBJ_INIT_COPY(obj, orig)) return obj;
3286 ptr0 = enum_chain_ptr(orig);
3287
3288 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr1);
3289
3290 if (!ptr1) rb_raise(rb_eArgError, "unallocated chain");
3291
3292 RB_OBJ_WRITE(obj, &ptr1->enums, ptr0->enums);
3293 ptr1->pos = ptr0->pos;
3294
3295 return obj;
3296}
3297
3298static VALUE
3299enum_chain_total_size(VALUE enums)
3300{
3301 VALUE total = INT2FIX(0);
3302 long i;
3303
3304 for (i = 0; i < RARRAY_LEN(enums); i++) {
3305 VALUE size = enum_size(RARRAY_AREF(enums, i));
3306
3307 if (NIL_P(size) || (RB_FLOAT_TYPE_P(size) && isinf(NUM2DBL(size)))) {
3308 return size;
3309 }
3310 if (!RB_INTEGER_TYPE_P(size)) {
3311 return Qnil;
3312 }
3313
3314 total = rb_funcall(total, '+', 1, size);
3315 }
3316
3317 return total;
3318}
3319
3320/*
3321 * call-seq:
3322 * obj.size -> int, Float::INFINITY or nil
3323 *
3324 * Returns the total size of the enumerator chain calculated by
3325 * summing up the size of each enumerable in the chain. If any of the
3326 * enumerables reports its size as nil or Float::INFINITY, that value
3327 * is returned as the total size.
3328 */
3329static VALUE
3330enum_chain_size(VALUE obj)
3331{
3332 return enum_chain_total_size(enum_chain_ptr(obj)->enums);
3333}
3334
3335static VALUE
3336enum_chain_enum_size(VALUE obj, VALUE args, VALUE eobj)
3337{
3338 return enum_chain_size(obj);
3339}
3340
3341static VALUE
3342enum_chain_enum_no_size(VALUE obj, VALUE args, VALUE eobj)
3343{
3344 return Qnil;
3345}
3346
3347/*
3348 * call-seq:
3349 * obj.each(*args) { |...| ... } -> obj
3350 * obj.each(*args) -> enumerator
3351 *
3352 * Iterates over the elements of the first enumerable by calling the
3353 * "each" method on it with the given arguments, then proceeds to the
3354 * following enumerables in sequence until all of the enumerables are
3355 * exhausted.
3356 *
3357 * If no block is given, returns an enumerator.
3358 */
3359static VALUE
3360enum_chain_each(int argc, VALUE *argv, VALUE obj)
3361{
3362 VALUE enums, block;
3363 struct enum_chain *objptr;
3364 long i;
3365
3366 RETURN_SIZED_ENUMERATOR(obj, argc, argv, argc > 0 ? enum_chain_enum_no_size : enum_chain_enum_size);
3367
3368 objptr = enum_chain_ptr(obj);
3369 enums = objptr->enums;
3370 block = rb_block_proc();
3371
3372 for (i = 0; i < RARRAY_LEN(enums); i++) {
3373 objptr->pos = i;
3374 rb_funcall_with_block(RARRAY_AREF(enums, i), id_each, argc, argv, block);
3375 }
3376
3377 return obj;
3378}
3379
3380/*
3381 * call-seq:
3382 * obj.rewind -> obj
3383 *
3384 * Rewinds the enumerator chain by calling the "rewind" method on each
3385 * enumerable in reverse order. Each call is performed only if the
3386 * enumerable responds to the method.
3387 */
3388static VALUE
3389enum_chain_rewind(VALUE obj)
3390{
3391 struct enum_chain *objptr = enum_chain_ptr(obj);
3392 VALUE enums = objptr->enums;
3393 long i;
3394
3395 for (i = objptr->pos; 0 <= i && i < RARRAY_LEN(enums); objptr->pos = --i) {
3396 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
3397 }
3398
3399 return obj;
3400}
3401
3402static VALUE
3403inspect_enum_chain(VALUE obj, VALUE dummy, int recur)
3404{
3405 VALUE klass = rb_obj_class(obj);
3406 struct enum_chain *ptr;
3407
3408 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3409
3410 if (!ptr || UNDEF_P(ptr->enums)) {
3411 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
3412 }
3413
3414 if (recur) {
3415 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
3416 }
3417
3418 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
3419}
3420
3421/*
3422 * call-seq:
3423 * obj.inspect -> string
3424 *
3425 * Returns a printable version of the enumerator chain.
3426 */
3427static VALUE
3428enum_chain_inspect(VALUE obj)
3429{
3430 return rb_exec_recursive(inspect_enum_chain, obj, 0);
3431}
3432
3433/*
3434 * call-seq:
3435 * e.chain(*enums) -> enumerator
3436 *
3437 * Returns an enumerator object generated from this enumerator and
3438 * given enumerables.
3439 *
3440 * e = (1..3).chain([4, 5])
3441 * e.to_a #=> [1, 2, 3, 4, 5]
3442 */
3443static VALUE
3444enum_chain(int argc, VALUE *argv, VALUE obj)
3445{
3446 VALUE enums = rb_ary_new_from_values(1, &obj);
3447 rb_ary_cat(enums, argv, argc);
3448 return new_enum_chain(enums);
3449}
3450
3451/*
3452 * call-seq:
3453 * e + enum -> enumerator
3454 *
3455 * Returns an enumerator object generated from this enumerator and a
3456 * given enumerable.
3457 *
3458 * e = (1..3).each + [4, 5]
3459 * e.to_a #=> [1, 2, 3, 4, 5]
3460 */
3461static VALUE
3462enumerator_plus(VALUE obj, VALUE eobj)
3463{
3464 return new_enum_chain(rb_ary_new_from_args(2, obj, eobj));
3465}
3466
3467/*
3468 * Document-class: Enumerator::Product
3469 *
3470 * Enumerator::Product generates a Cartesian product of any number of
3471 * enumerable objects. Iterating over the product of enumerable
3472 * objects is roughly equivalent to nested each_entry loops where the
3473 * loop for the rightmost object is put innermost.
3474 *
3475 * innings = Enumerator::Product.new(1..9, ['top', 'bottom'])
3476 *
3477 * innings.each do |i, h|
3478 * p [i, h]
3479 * end
3480 * # [1, "top"]
3481 * # [1, "bottom"]
3482 * # [2, "top"]
3483 * # [2, "bottom"]
3484 * # [3, "top"]
3485 * # [3, "bottom"]
3486 * # ...
3487 * # [9, "top"]
3488 * # [9, "bottom"]
3489 *
3490 * The method used against each enumerable object is `each_entry`
3491 * instead of `each` so that the product of N enumerable objects
3492 * yields an array of exactly N elements in each iteration.
3493 *
3494 * When no enumerator is given, it calls a given block once yielding
3495 * an empty argument list.
3496 *
3497 * This type of objects can be created by Enumerator.product.
3498 */
3499
3500static void
3501enum_product_mark_and_move(void *p)
3502{
3503 struct enum_product *ptr = p;
3504 rb_gc_mark_and_move(&ptr->enums);
3505}
3506
3507#define enum_product_free RUBY_TYPED_DEFAULT_FREE
3508
3509static size_t
3510enum_product_memsize(const void *p)
3511{
3512 return sizeof(struct enum_product);
3513}
3514
3515static const rb_data_type_t enum_product_data_type = {
3516 "product",
3517 {
3518 enum_product_mark_and_move,
3519 enum_product_free,
3520 enum_product_memsize,
3521 enum_product_mark_and_move,
3522 },
3523 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED
3524};
3525
3526static struct enum_product *
3527enum_product_ptr(VALUE obj)
3528{
3529 struct enum_product *ptr;
3530
3531 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3532 if (!ptr || UNDEF_P(ptr->enums)) {
3533 rb_raise(rb_eArgError, "uninitialized product");
3534 }
3535 return ptr;
3536}
3537
3538/* :nodoc: */
3539static VALUE
3540enum_product_allocate(VALUE klass)
3541{
3542 struct enum_product *ptr;
3543 VALUE obj;
3544
3545 obj = TypedData_Make_Struct(klass, struct enum_product, &enum_product_data_type, ptr);
3546 ptr->enums = Qundef;
3547
3548 return obj;
3549}
3550
3551/*
3552 * call-seq:
3553 * Enumerator::Product.new(*enums) -> enum
3554 *
3555 * Generates a new enumerator object that generates a Cartesian
3556 * product of given enumerable objects.
3557 *
3558 * e = Enumerator::Product.new(1..3, [4, 5])
3559 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3560 * e.size #=> 6
3561 */
3562static VALUE
3563enum_product_initialize(int argc, VALUE *argv, VALUE obj)
3564{
3565 struct enum_product *ptr;
3566 VALUE enums = Qnil, options = Qnil;
3567
3568 rb_scan_args(argc, argv, "*:", &enums, &options);
3569
3570 if (!NIL_P(options) && !RHASH_EMPTY_P(options)) {
3571 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options)));
3572 }
3573
3574 rb_check_frozen(obj);
3575 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3576
3577 if (!ptr) rb_raise(rb_eArgError, "unallocated product");
3578
3579 RB_OBJ_WRITE(obj, &ptr->enums, rb_ary_freeze(enums));
3580
3581 return obj;
3582}
3583
3584/* :nodoc: */
3585static VALUE
3586enum_product_init_copy(VALUE obj, VALUE orig)
3587{
3588 struct enum_product *ptr0, *ptr1;
3589
3590 if (!OBJ_INIT_COPY(obj, orig)) return obj;
3591 ptr0 = enum_product_ptr(orig);
3592
3593 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr1);
3594
3595 if (!ptr1) rb_raise(rb_eArgError, "unallocated product");
3596
3597 RB_OBJ_WRITE(obj, &ptr1->enums, ptr0->enums);
3598
3599 return obj;
3600}
3601
3602static VALUE
3603enum_product_total_size(VALUE enums)
3604{
3605 VALUE total = INT2FIX(1);
3606 VALUE sizes = rb_ary_hidden_new(RARRAY_LEN(enums));
3607 long i;
3608
3609 for (i = 0; i < RARRAY_LEN(enums); i++) {
3610 VALUE size = enum_size(RARRAY_AREF(enums, i));
3611 if (size == INT2FIX(0)) {
3612 rb_ary_resize(sizes, 0);
3613 return size;
3614 }
3615 rb_ary_push(sizes, size);
3616 }
3617 for (i = 0; i < RARRAY_LEN(sizes); i++) {
3618 VALUE size = RARRAY_AREF(sizes, i);
3619
3620 if (NIL_P(size) || (RB_TYPE_P(size, T_FLOAT) && isinf(NUM2DBL(size)))) {
3621 return size;
3622 }
3623 if (!RB_INTEGER_TYPE_P(size)) {
3624 return Qnil;
3625 }
3626
3627 total = rb_funcall(total, '*', 1, size);
3628 }
3629
3630 return total;
3631}
3632
3633/*
3634 * call-seq:
3635 * obj.size -> int, Float::INFINITY or nil
3636 *
3637 * Returns the total size of the enumerator product calculated by
3638 * multiplying the sizes of enumerables in the product. If any of the
3639 * enumerables reports its size as nil or Float::INFINITY, that value
3640 * is returned as the size.
3641 */
3642static VALUE
3643enum_product_size(VALUE obj)
3644{
3645 return enum_product_total_size(enum_product_ptr(obj)->enums);
3646}
3647
3648static VALUE
3649enum_product_enum_size(VALUE obj, VALUE args, VALUE eobj)
3650{
3651 return enum_product_size(obj);
3652}
3653
3655 VALUE obj;
3656 VALUE block;
3657 int index;
3658 int argc;
3659 VALUE *argv;
3660};
3661
3662static VALUE product_each(VALUE, struct product_state *);
3663
3664static VALUE
3665product_each_i(RB_BLOCK_CALL_FUNC_ARGLIST(value, state))
3666{
3667 struct product_state *pstate = (struct product_state *)state;
3668 pstate->argv[pstate->index++] = value;
3669
3670 VALUE val = product_each(pstate->obj, pstate);
3671 pstate->index--;
3672 return val;
3673}
3674
3675static VALUE
3676product_each(VALUE obj, struct product_state *pstate)
3677{
3678 struct enum_product *ptr = enum_product_ptr(obj);
3679 VALUE enums = ptr->enums;
3680
3681 if (pstate->index < pstate->argc) {
3682 VALUE eobj = RARRAY_AREF(enums, pstate->index);
3683
3684 rb_block_call(eobj, id_each_entry, 0, NULL, product_each_i, (VALUE)pstate);
3685 }
3686 else {
3687 rb_funcall(pstate->block, id_call, 1, rb_ary_new_from_values(pstate->argc, pstate->argv));
3688 }
3689
3690 return obj;
3691}
3692
3693static VALUE
3694enum_product_run(VALUE obj, VALUE block)
3695{
3696 struct enum_product *ptr = enum_product_ptr(obj);
3697 int argc = RARRAY_LENINT(ptr->enums);
3698 if (argc == 0) { /* no need to allocate state.argv */
3699 rb_funcall(block, id_call, 1, rb_ary_new());
3700 return obj;
3701 }
3702
3703 VALUE argsbuf = 0;
3704 struct product_state state = {
3705 .obj = obj,
3706 .block = block,
3707 .index = 0,
3708 .argc = argc,
3709 .argv = ALLOCV_N(VALUE, argsbuf, argc),
3710 };
3711
3712 VALUE ret = product_each(obj, &state);
3713 ALLOCV_END(argsbuf);
3714 return ret;
3715}
3716
3717/*
3718 * call-seq:
3719 * obj.each { |...| ... } -> obj
3720 * obj.each -> enumerator
3721 *
3722 * Iterates over the elements of the first enumerable by calling the
3723 * "each_entry" method on it with the given arguments, then proceeds
3724 * to the following enumerables in sequence until all of the
3725 * enumerables are exhausted.
3726 *
3727 * If no block is given, returns an enumerator. Otherwise, returns self.
3728 */
3729static VALUE
3730enum_product_each(VALUE obj)
3731{
3732 RETURN_SIZED_ENUMERATOR(obj, 0, 0, enum_product_enum_size);
3733
3734 return enum_product_run(obj, rb_block_proc());
3735}
3736
3737/*
3738 * call-seq:
3739 * obj.rewind -> obj
3740 *
3741 * Rewinds the product enumerator by calling the "rewind" method on
3742 * each enumerable in reverse order. Each call is performed only if
3743 * the enumerable responds to the method.
3744 */
3745static VALUE
3746enum_product_rewind(VALUE obj)
3747{
3748 struct enum_product *ptr = enum_product_ptr(obj);
3749 VALUE enums = ptr->enums;
3750 long i;
3751
3752 for (i = 0; i < RARRAY_LEN(enums); i++) {
3753 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
3754 }
3755
3756 return obj;
3757}
3758
3759static VALUE
3760inspect_enum_product(VALUE obj, VALUE dummy, int recur)
3761{
3762 VALUE klass = rb_obj_class(obj);
3763 struct enum_product *ptr;
3764
3765 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3766
3767 if (!ptr || UNDEF_P(ptr->enums)) {
3768 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
3769 }
3770
3771 if (recur) {
3772 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
3773 }
3774
3775 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
3776}
3777
3778/*
3779 * call-seq:
3780 * obj.inspect -> string
3781 *
3782 * Returns a printable version of the product enumerator.
3783 */
3784static VALUE
3785enum_product_inspect(VALUE obj)
3786{
3787 return rb_exec_recursive(inspect_enum_product, obj, 0);
3788}
3789
3790/*
3791 * call-seq:
3792 * Enumerator.product(*enums) -> enumerator
3793 * Enumerator.product(*enums) { |elts| ... } -> enumerator
3794 *
3795 * Generates a new enumerator object that generates a Cartesian
3796 * product of given enumerable objects. This is equivalent to
3797 * Enumerator::Product.new.
3798 *
3799 * e = Enumerator.product(1..3, [4, 5])
3800 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3801 * e.size #=> 6
3802 *
3803 * When a block is given, calls the block with each N-element array
3804 * generated and returns +nil+.
3805 */
3806static VALUE
3807enumerator_s_product(int argc, VALUE *argv, VALUE klass)
3808{
3809 VALUE enums = Qnil, options = Qnil, block = Qnil;
3810
3811 rb_scan_args(argc, argv, "*:&", &enums, &options, &block);
3812
3813 if (!NIL_P(options) && !RHASH_EMPTY_P(options)) {
3814 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options)));
3815 }
3816
3817 VALUE obj = enum_product_initialize(argc, argv, enum_product_allocate(rb_cEnumProduct));
3818
3819 if (!NIL_P(block)) {
3820 enum_product_run(obj, block);
3821 return Qnil;
3822 }
3823
3824 return obj;
3825}
3826
3828 struct enumerator enumerator;
3829 VALUE begin;
3830 VALUE end;
3831 VALUE step;
3832 bool exclude_end;
3833};
3834
3835RUBY_REFERENCES(arith_seq_refs) = {
3836 RUBY_REF_EDGE(struct enumerator, obj),
3837 RUBY_REF_EDGE(struct enumerator, args),
3838 RUBY_REF_EDGE(struct enumerator, fib),
3839 RUBY_REF_EDGE(struct enumerator, dst),
3840 RUBY_REF_EDGE(struct enumerator, lookahead),
3841 RUBY_REF_EDGE(struct enumerator, feedvalue),
3842 RUBY_REF_EDGE(struct enumerator, stop_exc),
3843 RUBY_REF_EDGE(struct enumerator, size),
3844 RUBY_REF_EDGE(struct enumerator, procs),
3845
3846 RUBY_REF_EDGE(struct arith_seq, begin),
3847 RUBY_REF_EDGE(struct arith_seq, end),
3848 RUBY_REF_EDGE(struct arith_seq, step),
3849 RUBY_REF_END
3850};
3851
3852static const rb_data_type_t arith_seq_data_type = {
3853 "arithmetic_sequence",
3854 {
3855 RUBY_REFS_LIST_PTR(arith_seq_refs),
3857 NULL, // Nothing allocated externally, so don't need a memsize function
3858 NULL,
3859 },
3860 .parent = &enumerator_data_type,
3861 .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_DECL_MARKING | RUBY_TYPED_EMBEDDABLE
3862};
3863
3864static VALUE
3865arith_seq_allocate(VALUE klass)
3866{
3867 struct arith_seq *ptr;
3868 VALUE enum_obj;
3869
3870 enum_obj = TypedData_Make_Struct(klass, struct arith_seq, &arith_seq_data_type, ptr);
3871 ptr->enumerator.obj = Qundef;
3872
3873 return enum_obj;
3874}
3875
3876/*
3877 * Document-class: Enumerator::ArithmeticSequence
3878 *
3879 * Enumerator::ArithmeticSequence is a subclass of Enumerator,
3880 * that is a representation of sequences of numbers with common difference.
3881 * Instances of this class can be generated by the Range#step and Numeric#step
3882 * methods.
3883 *
3884 * The class can be used for slicing Array (see Array#slice) or custom
3885 * collections.
3886 */
3887
3888VALUE
3889rb_arith_seq_new(VALUE obj, VALUE meth, int argc, VALUE const *argv,
3890 rb_enumerator_size_func *size_fn,
3891 VALUE beg, VALUE end, VALUE step, int excl)
3892{
3893 VALUE aseq = enumerator_init(arith_seq_allocate(rb_cArithSeq),
3894 obj, meth, argc, argv, size_fn, Qnil, rb_keyword_given_p());
3895 struct arith_seq *ptr;
3896 TypedData_Get_Struct(aseq, struct arith_seq, &enumerator_data_type, ptr);
3897
3898 RB_OBJ_WRITE(aseq, &ptr->begin, beg);
3899 RB_OBJ_WRITE(aseq, &ptr->end, end);
3900 RB_OBJ_WRITE(aseq, &ptr->step, step);
3901 ptr->exclude_end = excl;
3902
3903 return aseq;
3904}
3905
3906/*
3907 * call-seq: aseq.begin -> num or nil
3908 *
3909 * Returns the number that defines the first element of this arithmetic
3910 * sequence.
3911 */
3912static inline VALUE
3913arith_seq_begin(VALUE self)
3914{
3915 struct arith_seq *ptr;
3916 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3917 return ptr->begin;
3918}
3919
3920/*
3921 * call-seq: aseq.end -> num or nil
3922 *
3923 * Returns the number that defines the end of this arithmetic sequence.
3924 */
3925static inline VALUE
3926arith_seq_end(VALUE self)
3927{
3928 struct arith_seq *ptr;
3929 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3930 return ptr->end;
3931}
3932
3933/*
3934 * call-seq: aseq.step -> num
3935 *
3936 * Returns the number that defines the common difference between
3937 * two adjacent elements in this arithmetic sequence.
3938 */
3939static inline VALUE
3940arith_seq_step(VALUE self)
3941{
3942 struct arith_seq *ptr;
3943 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3944 return ptr->step;
3945}
3946
3947/*
3948 * call-seq: aseq.exclude_end? -> true or false
3949 *
3950 * Returns <code>true</code> if this arithmetic sequence excludes its end value.
3951 */
3952static inline VALUE
3953arith_seq_exclude_end(VALUE self)
3954{
3955 struct arith_seq *ptr;
3956 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3957 return RBOOL(ptr->exclude_end);
3958}
3959
3960static inline int
3961arith_seq_exclude_end_p(VALUE self)
3962{
3963 struct arith_seq *ptr;
3964 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3965 return ptr->exclude_end;
3966}
3967
3968int
3969rb_arithmetic_sequence_extract(VALUE obj, rb_arithmetic_sequence_components_t *component)
3970{
3971 if (rb_obj_is_kind_of(obj, rb_cArithSeq)) {
3972 component->begin = arith_seq_begin(obj);
3973 component->end = arith_seq_end(obj);
3974 component->step = arith_seq_step(obj);
3975 component->exclude_end = arith_seq_exclude_end_p(obj);
3976 return 1;
3977 }
3978 else if (rb_range_values(obj, &component->begin, &component->end, &component->exclude_end)) {
3979 component->step = INT2FIX(1);
3980 return 1;
3981 }
3982
3983 return 0;
3984}
3985
3986VALUE
3987rb_arithmetic_sequence_beg_len_step(VALUE obj, long *begp, long *lenp, long *stepp, long len, int err)
3988{
3989 RBIMPL_NONNULL_ARG(begp);
3990 RBIMPL_NONNULL_ARG(lenp);
3991 RBIMPL_NONNULL_ARG(stepp);
3992
3994 if (!rb_arithmetic_sequence_extract(obj, &aseq)) {
3995 return Qfalse;
3996 }
3997
3998 long step = NIL_P(aseq.step) ? 1 : NUM2LONG(aseq.step);
3999 *stepp = step;
4000
4001 if (step < 0) {
4002 if (aseq.exclude_end && !NIL_P(aseq.end)) {
4003 /* Handle exclusion before range reversal */
4004 aseq.end = LONG2NUM(NUM2LONG(aseq.end) + 1);
4005
4006 /* Don't exclude the previous beginning */
4007 aseq.exclude_end = 0;
4008 }
4009 VALUE tmp = aseq.begin;
4010 aseq.begin = aseq.end;
4011 aseq.end = tmp;
4012 }
4013
4014 if (err == 0 && (step < -1 || step > 1)) {
4015 if (rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, 1) == Qtrue) {
4016 if (*begp > len)
4017 goto out_of_range;
4018 if (*lenp > len)
4019 goto out_of_range;
4020 return Qtrue;
4021 }
4022 }
4023 else {
4024 return rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, err);
4025 }
4026
4027 out_of_range:
4028 rb_raise(rb_eRangeError, "%+"PRIsVALUE" out of range", obj);
4029 return Qnil;
4030}
4031
4032static VALUE
4033arith_seq_take(VALUE self, VALUE num)
4034{
4035 VALUE b, e, s, ary;
4036 long n;
4037 int x;
4038
4039 n = NUM2LONG(num);
4040 if (n < 0) {
4041 rb_raise(rb_eArgError, "attempt to take negative size");
4042 }
4043 if (n == 0) {
4044 return rb_ary_new_capa(0);
4045 }
4046
4047 b = arith_seq_begin(self);
4048 e = arith_seq_end(self);
4049 s = arith_seq_step(self);
4050 x = arith_seq_exclude_end_p(self);
4051
4052 if (FIXNUM_P(b) && NIL_P(e) && FIXNUM_P(s)) {
4053 long i = FIX2LONG(b), unit = FIX2LONG(s);
4054 ary = rb_ary_new_capa(n);
4055 while (n > 0 && FIXABLE(i)) {
4056 rb_ary_push(ary, LONG2FIX(i));
4057 i += unit; // FIXABLE + FIXABLE never overflow;
4058 --n;
4059 }
4060 if (n > 0) {
4061 b = LONG2NUM(i);
4062 while (n > 0) {
4063 rb_ary_push(ary, b);
4064 b = rb_big_plus(b, s);
4065 --n;
4066 }
4067 }
4068 return ary;
4069 }
4070 else if (FIXNUM_P(b) && FIXNUM_P(e) && FIXNUM_P(s)) {
4071 long i = FIX2LONG(b);
4072 long end = FIX2LONG(e);
4073 long unit = FIX2LONG(s);
4074 long len;
4075
4076 if (unit >= 0) {
4077 if (!x) end += 1;
4078
4079 len = end - i;
4080 if (len < 0) len = 0;
4081 ary = rb_ary_new_capa((n < len) ? n : len);
4082 while (n > 0 && i < end) {
4083 rb_ary_push(ary, LONG2FIX(i));
4084 if (i > LONG_MAX - unit) break;
4085 i += unit;
4086 --n;
4087 }
4088 }
4089 else {
4090 if (!x) end -= 1;
4091
4092 len = i - end;
4093 if (len < 0) len = 0;
4094 ary = rb_ary_new_capa((n < len) ? n : len);
4095 while (n > 0 && i > end) {
4096 rb_ary_push(ary, LONG2FIX(i));
4097 if (i < LONG_MIN - unit) break;
4098 i += unit;
4099 --n;
4100 }
4101 }
4102 return ary;
4103 }
4104 else if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
4105 /* generate values like ruby_float_step */
4106
4107 double unit = NUM2DBL(s);
4108 double beg = NUM2DBL(b);
4109 double end = NIL_P(e) ? (unit < 0 ? -1 : 1)*HUGE_VAL : NUM2DBL(e);
4110 double len = ruby_float_step_size(beg, end, unit, x);
4111 long i;
4112
4113 if (n > len)
4114 n = (long)len;
4115
4116 if (isinf(unit)) {
4117 if (len > 0) {
4118 ary = rb_ary_new_capa(1);
4119 rb_ary_push(ary, DBL2NUM(beg));
4120 }
4121 else {
4122 ary = rb_ary_new_capa(0);
4123 }
4124 }
4125 else if (unit == 0) {
4126 VALUE val = DBL2NUM(beg);
4127 ary = rb_ary_new_capa(n);
4128 for (i = 0; i < len; ++i) {
4129 rb_ary_push(ary, val);
4130 }
4131 }
4132 else {
4133 ary = rb_ary_new_capa(n);
4134 for (i = 0; i < n; ++i) {
4135 double d = i*unit+beg;
4136 if (unit >= 0 ? end < d : d < end) d = end;
4137 rb_ary_push(ary, DBL2NUM(d));
4138 }
4139 }
4140
4141 return ary;
4142 }
4143
4144 {
4145 VALUE argv[1];
4146 argv[0] = num;
4147 return rb_call_super(1, argv);
4148 }
4149}
4150
4151/*
4152 * call-seq:
4153 * aseq.first -> num or nil
4154 * aseq.first(n) -> an_array
4155 *
4156 * Returns the first number in this arithmetic sequence,
4157 * or an array of the first +n+ elements.
4158 */
4159static VALUE
4160arith_seq_first(int argc, VALUE *argv, VALUE self)
4161{
4162 VALUE b, e, s;
4163
4164 rb_check_arity(argc, 0, 1);
4165
4166 b = arith_seq_begin(self);
4167 e = arith_seq_end(self);
4168 s = arith_seq_step(self);
4169 if (argc == 0) {
4170 if (NIL_P(b)) {
4171 return Qnil;
4172 }
4173 if (!NIL_P(e)) {
4174 VALUE zero = INT2FIX(0);
4175 int r = rb_cmpint(rb_num_coerce_cmp(s, zero, idCmp), s, zero);
4176 if (r > 0 && RTEST(rb_funcall(b, '>', 1, e))) {
4177 return Qnil;
4178 }
4179 if (r < 0 && RTEST(rb_funcall(b, '<', 1, e))) {
4180 return Qnil;
4181 }
4182 }
4183 return b;
4184 }
4185
4186 return arith_seq_take(self, argv[0]);
4187}
4188
4189static inline VALUE
4190num_plus(VALUE a, VALUE b)
4191{
4192 if (RB_INTEGER_TYPE_P(a)) {
4193 return rb_int_plus(a, b);
4194 }
4195 else if (RB_FLOAT_TYPE_P(a)) {
4196 return rb_float_plus(a, b);
4197 }
4198 else if (RB_TYPE_P(a, T_RATIONAL)) {
4199 return rb_rational_plus(a, b);
4200 }
4201 else {
4202 return rb_funcallv(a, '+', 1, &b);
4203 }
4204}
4205
4206static inline VALUE
4207num_minus(VALUE a, VALUE b)
4208{
4209 if (RB_INTEGER_TYPE_P(a)) {
4210 return rb_int_minus(a, b);
4211 }
4212 else if (RB_FLOAT_TYPE_P(a)) {
4213 return rb_float_minus(a, b);
4214 }
4215 else if (RB_TYPE_P(a, T_RATIONAL)) {
4216 return rb_rational_minus(a, b);
4217 }
4218 else {
4219 return rb_funcallv(a, '-', 1, &b);
4220 }
4221}
4222
4223static inline VALUE
4224num_mul(VALUE a, VALUE b)
4225{
4226 if (RB_INTEGER_TYPE_P(a)) {
4227 return rb_int_mul(a, b);
4228 }
4229 else if (RB_FLOAT_TYPE_P(a)) {
4230 return rb_float_mul(a, b);
4231 }
4232 else if (RB_TYPE_P(a, T_RATIONAL)) {
4233 return rb_rational_mul(a, b);
4234 }
4235 else {
4236 return rb_funcallv(a, '*', 1, &b);
4237 }
4238}
4239
4240static inline VALUE
4241num_idiv(VALUE a, VALUE b)
4242{
4243 VALUE q;
4244 if (RB_INTEGER_TYPE_P(a)) {
4245 q = rb_int_idiv(a, b);
4246 }
4247 else if (RB_FLOAT_TYPE_P(a)) {
4248 q = rb_float_div(a, b);
4249 }
4250 else if (RB_TYPE_P(a, T_RATIONAL)) {
4251 q = rb_rational_div(a, b);
4252 }
4253 else {
4254 q = rb_funcallv(a, idDiv, 1, &b);
4255 }
4256
4257 if (RB_INTEGER_TYPE_P(q)) {
4258 return q;
4259 }
4260 else if (RB_FLOAT_TYPE_P(q)) {
4261 return rb_float_floor(q, 0);
4262 }
4263 else if (RB_TYPE_P(q, T_RATIONAL)) {
4264 return rb_rational_floor(q, 0);
4265 }
4266 else {
4267 return rb_funcall(q, rb_intern("floor"), 0);
4268 }
4269}
4270
4271/*
4272 * call-seq:
4273 * aseq.last -> num or nil
4274 * aseq.last(n) -> an_array
4275 *
4276 * Returns the last number in this arithmetic sequence,
4277 * or an array of the last +n+ elements.
4278 */
4279static VALUE
4280arith_seq_last(int argc, VALUE *argv, VALUE self)
4281{
4282 VALUE b, e, s, len_1, len, last, nv, ary;
4283 int last_is_adjusted;
4284 long n;
4285
4286 e = arith_seq_end(self);
4287 if (NIL_P(e)) {
4288 rb_raise(rb_eRangeError,
4289 "cannot get the last element of endless arithmetic sequence");
4290 }
4291
4292 b = arith_seq_begin(self);
4293 s = arith_seq_step(self);
4294
4295 len_1 = num_idiv(num_minus(e, b), s);
4296 if (rb_num_negative_int_p(len_1)) {
4297 if (argc == 0) {
4298 return Qnil;
4299 }
4300 return rb_ary_new_capa(0);
4301 }
4302
4303 last = num_plus(b, num_mul(s, len_1));
4304 if ((last_is_adjusted = arith_seq_exclude_end_p(self) && rb_equal(last, e))) {
4305 last = num_minus(last, s);
4306 }
4307
4308 if (argc == 0) {
4309 return last;
4310 }
4311
4312 if (last_is_adjusted) {
4313 len = len_1;
4314 }
4315 else {
4316 len = rb_int_plus(len_1, INT2FIX(1));
4317 }
4318
4319 rb_scan_args(argc, argv, "1", &nv);
4320 if (!RB_INTEGER_TYPE_P(nv)) {
4321 nv = rb_to_int(nv);
4322 }
4323 if (RTEST(rb_int_gt(nv, len))) {
4324 nv = len;
4325 }
4326 n = NUM2LONG(nv);
4327 if (n < 0) {
4328 rb_raise(rb_eArgError, "negative array size");
4329 }
4330
4331 ary = rb_ary_new_capa(n);
4332 b = rb_int_minus(last, rb_int_mul(s, nv));
4333 while (n) {
4334 b = rb_int_plus(b, s);
4335 rb_ary_push(ary, b);
4336 --n;
4337 }
4338
4339 return ary;
4340}
4341
4342/*
4343 * call-seq:
4344 * aseq.inspect -> string
4345 *
4346 * Convert this arithmetic sequence to a printable form.
4347 */
4348static VALUE
4349arith_seq_inspect(VALUE self)
4350{
4351 struct enumerator *e;
4352 VALUE eobj, str;
4353 int range_p;
4354
4355 TypedData_Get_Struct(self, struct enumerator, &enumerator_data_type, e);
4356
4357 eobj = rb_attr_get(self, id_receiver);
4358 if (NIL_P(eobj)) {
4359 eobj = e->obj;
4360 }
4361
4362 range_p = RTEST(rb_obj_is_kind_of(eobj, rb_cRange));
4363 str = rb_sprintf("(%s%"PRIsVALUE"%s.", range_p ? "(" : "", eobj, range_p ? ")" : "");
4364
4365 rb_str_buf_append(str, rb_id2str(e->meth));
4366 append_method_args(eobj, str, e->args);
4367
4368 rb_str_buf_cat2(str, ")");
4369
4370 return str;
4371}
4372
4373/*
4374 * call-seq:
4375 * aseq == obj -> true or false
4376 *
4377 * Returns <code>true</code> only if +obj+ is an Enumerator::ArithmeticSequence,
4378 * has equivalent begin, end, step, and exclude_end? settings.
4379 */
4380static VALUE
4381arith_seq_eq(VALUE self, VALUE other)
4382{
4383 if (!RTEST(rb_obj_is_kind_of(other, rb_cArithSeq))) {
4384 return Qfalse;
4385 }
4386
4387 if (!rb_equal(arith_seq_begin(self), arith_seq_begin(other))) {
4388 return Qfalse;
4389 }
4390
4391 if (!rb_equal(arith_seq_end(self), arith_seq_end(other))) {
4392 return Qfalse;
4393 }
4394
4395 if (!rb_equal(arith_seq_step(self), arith_seq_step(other))) {
4396 return Qfalse;
4397 }
4398
4399 if (arith_seq_exclude_end_p(self) != arith_seq_exclude_end_p(other)) {
4400 return Qfalse;
4401 }
4402
4403 return Qtrue;
4404}
4405
4406/*
4407 * call-seq:
4408 * aseq.hash -> integer
4409 *
4410 * Compute a hash-value for this arithmetic sequence.
4411 * Two arithmetic sequences with same begin, end, step, and exclude_end?
4412 * values will generate the same hash-value.
4413 *
4414 * See also Object#hash.
4415 */
4416static VALUE
4417arith_seq_hash(VALUE self)
4418{
4419 st_index_t hash;
4420 VALUE v;
4421
4422 hash = rb_hash_start(arith_seq_exclude_end_p(self));
4423 v = rb_hash(arith_seq_begin(self));
4424 hash = rb_hash_uint(hash, NUM2LONG(v));
4425 v = rb_hash(arith_seq_end(self));
4426 hash = rb_hash_uint(hash, NUM2LONG(v));
4427 v = rb_hash(arith_seq_step(self));
4428 hash = rb_hash_uint(hash, NUM2LONG(v));
4429 hash = rb_hash_end(hash);
4430
4431 return ST2FIX(hash);
4432}
4433
4434#define NUM_GE(x, y) RTEST(rb_num_coerce_relop((x), (y), idGE))
4435
4437 VALUE current;
4438 VALUE end;
4439 VALUE step;
4440 int excl;
4441};
4442
4443/*
4444 * call-seq:
4445 * aseq.each {|i| block } -> aseq
4446 * aseq.each -> aseq
4447 */
4448static VALUE
4449arith_seq_each(VALUE self)
4450{
4451 VALUE c, e, s, len_1, last;
4452 int x;
4453
4454 if (!rb_block_given_p()) return self;
4455
4456 c = arith_seq_begin(self);
4457 e = arith_seq_end(self);
4458 s = arith_seq_step(self);
4459 x = arith_seq_exclude_end_p(self);
4460
4461 if (!RB_TYPE_P(s, T_COMPLEX) && ruby_float_step(c, e, s, x, TRUE)) {
4462 return self;
4463 }
4464
4465 if (NIL_P(e)) {
4466 while (1) {
4467 rb_yield(c);
4468 c = rb_int_plus(c, s);
4469 }
4470
4471 return self;
4472 }
4473
4474 if (rb_equal(s, INT2FIX(0))) {
4475 while (1) {
4476 rb_yield(c);
4477 }
4478
4479 return self;
4480 }
4481
4482 len_1 = num_idiv(num_minus(e, c), s);
4483 last = num_plus(c, num_mul(s, len_1));
4484 if (x && rb_equal(last, e)) {
4485 last = num_minus(last, s);
4486 }
4487
4488 if (rb_num_negative_int_p(s)) {
4489 while (NUM_GE(c, last)) {
4490 rb_yield(c);
4491 c = num_plus(c, s);
4492 }
4493 }
4494 else {
4495 while (NUM_GE(last, c)) {
4496 rb_yield(c);
4497 c = num_plus(c, s);
4498 }
4499 }
4500
4501 return self;
4502}
4503
4504/*
4505 * call-seq:
4506 * aseq.size -> num or nil
4507 *
4508 * Returns the number of elements in this arithmetic sequence if it is a finite
4509 * sequence. Otherwise, returns <code>nil</code>.
4510 */
4511static VALUE
4512arith_seq_size(VALUE self)
4513{
4514 VALUE b, e, s, len_1, len, last;
4515 int x;
4516
4517 b = arith_seq_begin(self);
4518 e = arith_seq_end(self);
4519 s = arith_seq_step(self);
4520 x = arith_seq_exclude_end_p(self);
4521
4522 if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
4523 double ee, n;
4524
4525 if (NIL_P(e)) {
4526 if (rb_num_negative_int_p(s)) {
4527 ee = -HUGE_VAL;
4528 }
4529 else {
4530 ee = HUGE_VAL;
4531 }
4532 }
4533 else {
4534 ee = NUM2DBL(e);
4535 }
4536
4537 n = ruby_float_step_size(NUM2DBL(b), ee, NUM2DBL(s), x);
4538 if (isinf(n)) return DBL2NUM(n);
4539 if (POSFIXABLE(n)) return LONG2FIX((long)n);
4540 return rb_dbl2big(n);
4541 }
4542
4543 if (NIL_P(e)) {
4544 return DBL2NUM(HUGE_VAL);
4545 }
4546
4547 if (!rb_obj_is_kind_of(s, rb_cNumeric)) {
4548 s = rb_to_int(s);
4549 }
4550
4551 if (rb_equal(s, INT2FIX(0))) {
4552 return DBL2NUM(HUGE_VAL);
4553 }
4554
4555 len_1 = rb_int_idiv(rb_int_minus(e, b), s);
4556 if (rb_num_negative_int_p(len_1)) {
4557 return INT2FIX(0);
4558 }
4559
4560 last = rb_int_plus(b, rb_int_mul(s, len_1));
4561 if (x && rb_equal(last, e)) {
4562 len = len_1;
4563 }
4564 else {
4565 len = rb_int_plus(len_1, INT2FIX(1));
4566 }
4567
4568 return len;
4569}
4570
4571#define sym(name) ID2SYM(rb_intern_const(name))
4572void
4573InitVM_Enumerator(void)
4574{
4575 ID id_private = rb_intern_const("private");
4576
4577 rb_define_method(rb_mKernel, "to_enum", obj_to_enum, -1);
4578 rb_define_method(rb_mKernel, "enum_for", obj_to_enum, -1);
4579
4580 rb_cEnumerator = rb_define_class("Enumerator", rb_cObject);
4582
4583 rb_define_alloc_func(rb_cEnumerator, enumerator_allocate);
4584 rb_define_method(rb_cEnumerator, "initialize", enumerator_initialize, -1);
4585 rb_define_method(rb_cEnumerator, "initialize_copy", enumerator_init_copy, 1);
4586 rb_define_method(rb_cEnumerator, "each", enumerator_each, -1);
4587 rb_define_method(rb_cEnumerator, "each_with_index", enumerator_each_with_index, 0);
4588 rb_define_method(rb_cEnumerator, "each_with_object", enumerator_with_object, 1);
4589 rb_define_method(rb_cEnumerator, "with_index", enumerator_with_index, -1);
4590 rb_define_method(rb_cEnumerator, "with_object", enumerator_with_object, 1);
4591 rb_define_method(rb_cEnumerator, "next_values", enumerator_next_values, 0);
4592 rb_define_method(rb_cEnumerator, "peek_values", enumerator_peek_values_m, 0);
4593 rb_define_method(rb_cEnumerator, "next", enumerator_next, 0);
4594 rb_define_method(rb_cEnumerator, "peek", enumerator_peek, 0);
4595 rb_define_method(rb_cEnumerator, "feed", enumerator_feed, 1);
4596 rb_define_method(rb_cEnumerator, "rewind", enumerator_rewind, 0);
4597 rb_define_method(rb_cEnumerator, "inspect", enumerator_inspect, 0);
4598 rb_define_method(rb_cEnumerator, "size", enumerator_size, 0);
4599 rb_define_method(rb_cEnumerator, "+", enumerator_plus, 1);
4601
4602 /* Lazy */
4604 rb_define_method(rb_mEnumerable, "lazy", enumerable_lazy, 0);
4605
4606 rb_define_alias(rb_cLazy, "_enumerable_map", "map");
4607 rb_define_alias(rb_cLazy, "_enumerable_collect", "collect");
4608 rb_define_alias(rb_cLazy, "_enumerable_flat_map", "flat_map");
4609 rb_define_alias(rb_cLazy, "_enumerable_collect_concat", "collect_concat");
4610 rb_define_alias(rb_cLazy, "_enumerable_select", "select");
4611 rb_define_alias(rb_cLazy, "_enumerable_find_all", "find_all");
4612 rb_define_alias(rb_cLazy, "_enumerable_filter", "filter");
4613 rb_define_alias(rb_cLazy, "_enumerable_filter_map", "filter_map");
4614 rb_define_alias(rb_cLazy, "_enumerable_reject", "reject");
4615 rb_define_alias(rb_cLazy, "_enumerable_grep", "grep");
4616 rb_define_alias(rb_cLazy, "_enumerable_grep_v", "grep_v");
4617 rb_define_alias(rb_cLazy, "_enumerable_zip", "zip");
4618 rb_define_alias(rb_cLazy, "_enumerable_take", "take");
4619 rb_define_alias(rb_cLazy, "_enumerable_take_while", "take_while");
4620 rb_define_alias(rb_cLazy, "_enumerable_drop", "drop");
4621 rb_define_alias(rb_cLazy, "_enumerable_drop_while", "drop_while");
4622 rb_define_alias(rb_cLazy, "_enumerable_uniq", "uniq");
4623 rb_define_private_method(rb_cLazy, "_enumerable_with_index", enumerator_with_index, -1);
4624
4625 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_map"));
4626 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect"));
4627 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_flat_map"));
4628 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect_concat"));
4629 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_select"));
4630 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_find_all"));
4631 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter"));
4632 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter_map"));
4633 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_reject"));
4634 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep"));
4635 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep_v"));
4636 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_zip"));
4637 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take"));
4638 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take_while"));
4639 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop"));
4640 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop_while"));
4641 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_uniq"));
4642
4643 rb_define_method(rb_cLazy, "initialize", lazy_initialize, -1);
4644 rb_define_method(rb_cLazy, "to_enum", lazy_to_enum, -1);
4645 rb_define_method(rb_cLazy, "enum_for", lazy_to_enum, -1);
4646 rb_define_method(rb_cLazy, "eager", lazy_eager, 0);
4647 rb_define_method(rb_cLazy, "map", lazy_map, 0);
4648 rb_define_method(rb_cLazy, "collect", lazy_map, 0);
4649 rb_define_method(rb_cLazy, "flat_map", lazy_flat_map, 0);
4650 rb_define_method(rb_cLazy, "collect_concat", lazy_flat_map, 0);
4651 rb_define_method(rb_cLazy, "select", lazy_select, 0);
4652 rb_define_method(rb_cLazy, "find_all", lazy_select, 0);
4653 rb_define_method(rb_cLazy, "filter", lazy_select, 0);
4654 rb_define_method(rb_cLazy, "filter_map", lazy_filter_map, 0);
4655 rb_define_method(rb_cLazy, "reject", lazy_reject, 0);
4656 rb_define_method(rb_cLazy, "grep", lazy_grep, 1);
4657 rb_define_method(rb_cLazy, "grep_v", lazy_grep_v, 1);
4658 rb_define_method(rb_cLazy, "zip", lazy_zip, -1);
4659 rb_define_method(rb_cLazy, "take", lazy_take, 1);
4660 rb_define_method(rb_cLazy, "take_while", lazy_take_while, 0);
4661 rb_define_method(rb_cLazy, "drop", lazy_drop, 1);
4662 rb_define_method(rb_cLazy, "drop_while", lazy_drop_while, 0);
4663 rb_define_method(rb_cLazy, "lazy", lazy_lazy, 0);
4664 rb_define_method(rb_cLazy, "chunk", lazy_super, -1);
4665 rb_define_method(rb_cLazy, "slice_before", lazy_super, -1);
4666 rb_define_method(rb_cLazy, "slice_after", lazy_super, -1);
4667 rb_define_method(rb_cLazy, "slice_when", lazy_super, -1);
4668 rb_define_method(rb_cLazy, "chunk_while", lazy_super, -1);
4669 rb_define_method(rb_cLazy, "uniq", lazy_uniq, 0);
4670 rb_define_method(rb_cLazy, "compact", lazy_compact, 0);
4671 rb_define_method(rb_cLazy, "with_index", lazy_with_index, -1);
4672 rb_define_method(rb_cLazy, "tap_each", lazy_tap_each, 0);
4673
4674 lazy_use_super_method = rb_hash_new_with_size(18);
4675 rb_hash_aset(lazy_use_super_method, sym("map"), sym("_enumerable_map"));
4676 rb_hash_aset(lazy_use_super_method, sym("collect"), sym("_enumerable_collect"));
4677 rb_hash_aset(lazy_use_super_method, sym("flat_map"), sym("_enumerable_flat_map"));
4678 rb_hash_aset(lazy_use_super_method, sym("collect_concat"), sym("_enumerable_collect_concat"));
4679 rb_hash_aset(lazy_use_super_method, sym("select"), sym("_enumerable_select"));
4680 rb_hash_aset(lazy_use_super_method, sym("find_all"), sym("_enumerable_find_all"));
4681 rb_hash_aset(lazy_use_super_method, sym("filter"), sym("_enumerable_filter"));
4682 rb_hash_aset(lazy_use_super_method, sym("filter_map"), sym("_enumerable_filter_map"));
4683 rb_hash_aset(lazy_use_super_method, sym("reject"), sym("_enumerable_reject"));
4684 rb_hash_aset(lazy_use_super_method, sym("grep"), sym("_enumerable_grep"));
4685 rb_hash_aset(lazy_use_super_method, sym("grep_v"), sym("_enumerable_grep_v"));
4686 rb_hash_aset(lazy_use_super_method, sym("zip"), sym("_enumerable_zip"));
4687 rb_hash_aset(lazy_use_super_method, sym("take"), sym("_enumerable_take"));
4688 rb_hash_aset(lazy_use_super_method, sym("take_while"), sym("_enumerable_take_while"));
4689 rb_hash_aset(lazy_use_super_method, sym("drop"), sym("_enumerable_drop"));
4690 rb_hash_aset(lazy_use_super_method, sym("drop_while"), sym("_enumerable_drop_while"));
4691 rb_hash_aset(lazy_use_super_method, sym("uniq"), sym("_enumerable_uniq"));
4692 rb_hash_aset(lazy_use_super_method, sym("with_index"), sym("_enumerable_with_index"));
4693 rb_obj_freeze(lazy_use_super_method);
4694 rb_vm_register_global_object(lazy_use_super_method);
4695
4696#if 0 /* for RDoc */
4697 rb_define_method(rb_cLazy, "to_a", lazy_to_a, 0);
4698 rb_define_method(rb_cLazy, "chunk", lazy_chunk, 0);
4699 rb_define_method(rb_cLazy, "chunk_while", lazy_chunk_while, 0);
4700 rb_define_method(rb_cLazy, "slice_after", lazy_slice_after, 0);
4701 rb_define_method(rb_cLazy, "slice_before", lazy_slice_before, 0);
4702 rb_define_method(rb_cLazy, "slice_when", lazy_slice_when, 0);
4703#endif
4704 rb_define_alias(rb_cLazy, "force", "to_a");
4705
4707 rb_define_method(rb_eStopIteration, "result", stop_result, 0);
4708
4709 /* Generator */
4710 rb_cGenerator = rb_define_class_under(rb_cEnumerator, "Generator", rb_cObject);
4711 rb_include_module(rb_cGenerator, rb_mEnumerable);
4712 rb_define_alloc_func(rb_cGenerator, generator_allocate);
4713 rb_define_method(rb_cGenerator, "initialize", generator_initialize, -1);
4714 rb_define_method(rb_cGenerator, "initialize_copy", generator_init_copy, 1);
4715 rb_define_method(rb_cGenerator, "each", generator_each, -1);
4716
4717 /* Yielder */
4718 rb_cYielder = rb_define_class_under(rb_cEnumerator, "Yielder", rb_cObject);
4719 rb_define_alloc_func(rb_cYielder, yielder_allocate);
4720 rb_define_method(rb_cYielder, "initialize", yielder_initialize, 0);
4721 rb_define_method(rb_cYielder, "yield", yielder_yield, -2);
4722 rb_define_method(rb_cYielder, "<<", yielder_yield_push, 1);
4723 rb_define_method(rb_cYielder, "to_proc", yielder_to_proc, 0);
4724
4725 /* Producer */
4726 rb_cEnumProducer = rb_define_class_under(rb_cEnumerator, "Producer", rb_cObject);
4727 rb_define_alloc_func(rb_cEnumProducer, producer_allocate);
4728 rb_define_method(rb_cEnumProducer, "each", producer_each, 0);
4729 rb_define_singleton_method(rb_cEnumerator, "produce", enumerator_s_produce, -1);
4730
4731 /* Chain */
4732 rb_cEnumChain = rb_define_class_under(rb_cEnumerator, "Chain", rb_cEnumerator);
4733 rb_define_alloc_func(rb_cEnumChain, enum_chain_allocate);
4734 rb_define_method(rb_cEnumChain, "initialize", enum_chain_initialize, -2);
4735 rb_define_method(rb_cEnumChain, "initialize_copy", enum_chain_init_copy, 1);
4736 rb_define_method(rb_cEnumChain, "each", enum_chain_each, -1);
4737 rb_define_method(rb_cEnumChain, "size", enum_chain_size, 0);
4738 rb_define_method(rb_cEnumChain, "rewind", enum_chain_rewind, 0);
4739 rb_define_method(rb_cEnumChain, "inspect", enum_chain_inspect, 0);
4740 rb_undef_method(rb_cEnumChain, "feed");
4741 rb_undef_method(rb_cEnumChain, "next");
4742 rb_undef_method(rb_cEnumChain, "next_values");
4743 rb_undef_method(rb_cEnumChain, "peek");
4744 rb_undef_method(rb_cEnumChain, "peek_values");
4745
4746 /* Product */
4747 rb_cEnumProduct = rb_define_class_under(rb_cEnumerator, "Product", rb_cEnumerator);
4748 rb_define_alloc_func(rb_cEnumProduct, enum_product_allocate);
4749 rb_define_method(rb_cEnumProduct, "initialize", enum_product_initialize, -1);
4750 rb_define_method(rb_cEnumProduct, "initialize_copy", enum_product_init_copy, 1);
4751 rb_define_method(rb_cEnumProduct, "each", enum_product_each, 0);
4752 rb_define_method(rb_cEnumProduct, "size", enum_product_size, 0);
4753 rb_define_method(rb_cEnumProduct, "rewind", enum_product_rewind, 0);
4754 rb_define_method(rb_cEnumProduct, "inspect", enum_product_inspect, 0);
4755 rb_undef_method(rb_cEnumProduct, "feed");
4756 rb_undef_method(rb_cEnumProduct, "next");
4757 rb_undef_method(rb_cEnumProduct, "next_values");
4758 rb_undef_method(rb_cEnumProduct, "peek");
4759 rb_undef_method(rb_cEnumProduct, "peek_values");
4760 rb_define_singleton_method(rb_cEnumerator, "product", enumerator_s_product, -1);
4761
4762 /* ArithmeticSequence */
4763 rb_cArithSeq = rb_define_class_under(rb_cEnumerator, "ArithmeticSequence", rb_cEnumerator);
4764 rb_undef_alloc_func(rb_cArithSeq);
4765 rb_undef_method(CLASS_OF(rb_cArithSeq), "new");
4766 rb_define_method(rb_cArithSeq, "begin", arith_seq_begin, 0);
4767 rb_define_method(rb_cArithSeq, "end", arith_seq_end, 0);
4768 rb_define_method(rb_cArithSeq, "exclude_end?", arith_seq_exclude_end, 0);
4769 rb_define_method(rb_cArithSeq, "step", arith_seq_step, 0);
4770 rb_define_method(rb_cArithSeq, "first", arith_seq_first, -1);
4771 rb_define_method(rb_cArithSeq, "last", arith_seq_last, -1);
4772 rb_define_method(rb_cArithSeq, "inspect", arith_seq_inspect, 0);
4773 rb_define_method(rb_cArithSeq, "==", arith_seq_eq, 1);
4774 rb_define_method(rb_cArithSeq, "===", arith_seq_eq, 1);
4775 rb_define_method(rb_cArithSeq, "eql?", arith_seq_eq, 1);
4776 rb_define_method(rb_cArithSeq, "hash", arith_seq_hash, 0);
4777 rb_define_method(rb_cArithSeq, "each", arith_seq_each, 0);
4778 rb_define_method(rb_cArithSeq, "size", arith_seq_size, 0);
4779
4780 rb_provide("enumerator.so"); /* for backward compatibility */
4781}
4782#undef sym
4783
4784void
4785Init_Enumerator(void)
4786{
4787 id_rewind = rb_intern_const("rewind");
4788 id_next = rb_intern_const("next");
4789 id_result = rb_intern_const("result");
4790 id_receiver = rb_intern_const("receiver");
4791 id_arguments = rb_intern_const("arguments");
4792 id_memo = rb_intern_const("memo");
4793 id_method = rb_intern_const("method");
4794 id_force = rb_intern_const("force");
4795 id_to_enum = rb_intern_const("to_enum");
4796 id_each_entry = rb_intern_const("each_entry");
4797 sym_each = ID2SYM(id_each);
4798 sym_yield = ID2SYM(rb_intern_const("yield"));
4799
4800 InitVM(Enumerator);
4801}
#define rb_define_method(klass, mid, func, arity)
Defines klass#mid.
#define rb_define_singleton_method(klass, mid, func, arity)
Defines klass.mid.
#define rb_define_private_method(klass, mid, func, arity)
Defines klass#mid and makes it private.
void rb_include_module(VALUE klass, VALUE module)
Includes a module to a class.
Definition class.c:1603
VALUE rb_define_class(const char *name, VALUE super)
Defines a top-level class.
Definition class.c:1396
VALUE rb_define_class_under(VALUE outer, const char *name, VALUE super)
Defines a class under the namespace of outer.
Definition class.c:1427
void rb_define_alias(VALUE klass, const char *name1, const char *name2)
Defines an alias of a method.
Definition class.c:2771
void rb_need_block(void)
Declares that the current method needs a block.
Definition eval.c:1039
void rb_undef_method(VALUE klass, const char *name)
Defines an undef of a method.
Definition class.c:2581
int rb_scan_args_kw(int kw_flag, int argc, const VALUE *argv, const char *fmt,...)
Identical to rb_scan_args(), except it also accepts kw_splat.
Definition class.c:3074
int rb_scan_args(int argc, const VALUE *argv, const char *fmt,...)
Retrieves argument from argc and argv to given VALUE references according to the format string.
Definition class.c:3061
int rb_keyword_given_p(void)
Determines if the current method is given a keyword argument.
Definition eval.c:1031
int rb_block_given_p(void)
Determines if the current method is given a block.
Definition eval.c:1018
int rb_get_kwargs(VALUE keyword_hash, const ID *table, int required, int optional, VALUE *values)
Keyword argument deconstructor.
Definition class.c:2850
#define T_COMPLEX
Old name of RUBY_T_COMPLEX.
Definition value_type.h:59
#define RB_INTEGER_TYPE_P
Old name of rb_integer_type_p.
Definition value_type.h:87
#define rb_str_buf_cat2
Old name of rb_usascii_str_new_cstr.
Definition string.h:1683
#define OBJ_INIT_COPY(obj, orig)
Old name of RB_OBJ_INIT_COPY.
Definition object.h:41
#define RFLOAT_VALUE
Old name of rb_float_value.
Definition double.h:28
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition long.h:48
#define T_FLOAT
Old name of RUBY_T_FLOAT.
Definition value_type.h:64
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
Definition assume.h:29
#define CLASS_OF
Old name of rb_class_of.
Definition globals.h:205
#define rb_ary_new4
Old name of rb_ary_new_from_values.
Definition array.h:659
#define FIXABLE
Old name of RB_FIXABLE.
Definition fixnum.h:25
#define rb_exc_new2
Old name of rb_exc_new_cstr.
Definition error.h:37
#define LONG2FIX
Old name of RB_INT2FIX.
Definition long.h:49
#define T_RATIONAL
Old name of RUBY_T_RATIONAL.
Definition value_type.h:76
#define T_HASH
Old name of RUBY_T_HASH.
Definition value_type.h:65
#define NUM2DBL
Old name of rb_num2dbl.
Definition double.h:27
#define rb_ary_new3
Old name of rb_ary_new_from_args.
Definition array.h:658
#define LONG2NUM
Old name of RB_LONG2NUM.
Definition long.h:50
#define Qtrue
Old name of RUBY_Qtrue.
#define ST2FIX
Old name of RB_ST2FIX.
Definition st_data_t.h:33
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
Definition long.h:46
#define T_ARRAY
Old name of RUBY_T_ARRAY.
Definition value_type.h:56
#define NIL_P
Old name of RB_NIL_P.
#define ALLOCV_N
Old name of RB_ALLOCV_N.
Definition memory.h:405
#define POSFIXABLE
Old name of RB_POSFIXABLE.
Definition fixnum.h:29
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
Definition value_type.h:80
#define DBL2NUM
Old name of rb_float_new.
Definition double.h:29
#define NUM2LONG
Old name of RB_NUM2LONG.
Definition long.h:51
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define rb_ary_new2
Old name of rb_ary_new_capa.
Definition array.h:657
#define ALLOCV_END
Old name of RB_ALLOCV_END.
Definition memory.h:406
#define SYMBOL_P
Old name of RB_SYMBOL_P.
Definition value_type.h:88
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
Definition eval.c:661
void rb_iter_break(void)
Breaks from a block.
Definition vm.c:2287
VALUE rb_eRangeError
RangeError exception.
Definition error.c:1431
VALUE rb_eTypeError
TypeError exception.
Definition error.c:1427
VALUE rb_eRuntimeError
RuntimeError exception.
Definition error.c:1425
VALUE rb_eStopIteration
StopIteration exception.
Definition enumerator.c:195
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
Definition error.c:467
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
Definition error.c:1478
VALUE rb_eIndexError
IndexError exception.
Definition error.c:1429
VALUE rb_mKernel
Kernel module.
Definition object.c:60
VALUE rb_cObject
Object class.
Definition object.c:61
VALUE rb_mEnumerable
Enumerable module.
Definition enum.c:27
VALUE rb_cEnumerator
Enumerator class.
Definition enumerator.c:178
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
Definition object.c:95
VALUE rb_cNumeric
Numeric class.
Definition numeric.c:197
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
Definition object.c:235
VALUE rb_obj_dup(VALUE obj)
Duplicates the given object.
Definition object.c:553
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
Definition object.c:657
VALUE rb_cRange
Range class.
Definition range.c:31
VALUE rb_equal(VALUE lhs, VALUE rhs)
This function is an optimised version of calling #==.
Definition object.c:141
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
Definition object.c:894
VALUE rb_obj_freeze(VALUE obj)
Just calls rb_obj_freeze_inline() inside.
Definition object.c:1313
VALUE rb_to_int(VALUE val)
Identical to rb_check_to_int(), except it raises in case of conversion mismatch.
Definition object.c:3335
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition gc.h:456
VALUE rb_funcall(VALUE recv, ID mid, int n,...)
Calls a method.
Definition vm_eval.c:1121
VALUE rb_funcall_with_block(VALUE recv, ID mid, int argc, const VALUE *argv, VALUE procval)
Identical to rb_funcallv_public(), except you can pass a block.
Definition vm_eval.c:1198
#define rb_funcall2
Definition eval.h:207
VALUE rb_call_super(int argc, const VALUE *argv)
This resembles ruby's super.
Definition vm_eval.c:362
VALUE rb_ary_new_from_values(long n, const VALUE *elts)
Identical to rb_ary_new_from_args(), except how objects are passed.
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_new_capa(long capa)
Identical to rb_ary_new(), except it additionally specifies how many rooms of objects it should alloc...
VALUE rb_ary_resize(VALUE ary, long len)
Expands or shrinks the passed array to the passed length.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_freeze(VALUE obj)
Freeze an array, preventing further modifications.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define RETURN_SIZED_ENUMERATOR(obj, argc, argv, size_fn)
This roughly resembles return enum_for(__callee__) unless block_given?.
Definition enumerator.h:208
VALUE rb_enumerator_size_func(VALUE recv, VALUE argv, VALUE eobj)
This is the type of functions that rb_enumeratorize_with_size() expects.
Definition enumerator.h:45
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
Definition error.h:284
void rb_provide(const char *feature)
Declares that the given feature is already provided by someone else.
Definition load.c:695
VALUE rb_num_coerce_cmp(VALUE lhs, VALUE rhs, ID op)
Identical to rb_num_coerce_bin(), except for return values.
Definition numeric.c:485
VALUE rb_obj_method(VALUE recv, VALUE mid)
Creates a method object.
Definition proc.c:2266
VALUE rb_block_proc(void)
Constructs a Proc object from implicitly passed components.
Definition proc.c:988
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
Definition proc.c:1174
VALUE rb_proc_call_kw(VALUE recv, VALUE args, int kw_splat)
Identical to rb_proc_call(), except you can specify how to handle the last element of the given array...
Definition proc.c:1130
VALUE rb_obj_is_proc(VALUE recv)
Queries if the given object is a proc.
Definition proc.c:122
int rb_range_values(VALUE range, VALUE *begp, VALUE *endp, int *exclp)
Deconstructs a range into its components.
Definition range.c:1861
#define rb_hash_uint(h, i)
Just another name of st_hash_uint.
Definition string.h:943
#define rb_hash_end(h)
Just another name of st_hash_end.
Definition string.h:946
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
Definition string.c:3836
VALUE rb_str_dup(VALUE str)
Duplicates a string.
Definition string.c:1996
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
Definition string.c:3802
void rb_str_set_len(VALUE str, long len)
Overwrites the length of the string.
Definition string.c:3423
st_index_t rb_hash_start(st_index_t i)
Starts a series of hashing.
Definition random.c:1785
VALUE rb_exec_recursive(VALUE(*f)(VALUE g, VALUE h, int r), VALUE g, VALUE h)
"Recursion" API entry point.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
Definition variable.c:2064
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
Definition variable.c:1515
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
Definition variable.c:380
int rb_respond_to(VALUE obj, ID mid)
Queries if the object responds to the method.
Definition vm_method.c:3485
void rb_undef_alloc_func(VALUE klass)
Deletes the allocator function of a class.
Definition vm_method.c:1742
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
Definition vm_eval.c:690
VALUE rb_check_funcall_kw(VALUE recv, ID mid, int argc, const VALUE *argv, int kw_splat)
Identical to rb_check_funcall(), except you can specify how to handle the last element of the given a...
Definition vm_eval.c:684
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
static ID rb_intern_const(const char *str)
This is a "tiny optimisation" over rb_intern().
Definition symbol.h:285
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
Definition symbol.c:1024
ID rb_to_id(VALUE str)
Identical to rb_intern_str(), except it tries to convert the parameter object to an instance of rb_cS...
Definition string.c:12698
int len
Length of the buffer.
Definition io.h:8
#define RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg)
Shim for block function parameters.
Definition iterator.h:58
VALUE rb_yield_values(int n,...)
Identical to rb_yield(), except it takes variadic number of parameters and pass them to the block.
Definition vm_eval.c:1399
VALUE rb_yield_values2(int n, const VALUE *argv)
Identical to rb_yield_values(), except it takes the parameters as a C array instead of variadic argum...
Definition vm_eval.c:1421
VALUE rb_yield(VALUE val)
Yields the block.
Definition vm_eval.c:1376
VALUE rb_yield_values_kw(int n, const VALUE *argv, int kw_splat)
Identical to rb_yield_values2(), except you can specify how to handle the last element of the given a...
Definition vm_eval.c:1427
VALUE rb_block_call_func(RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg))
This is the type of a function that the interpreter expect for C-backended blocks.
Definition iterator.h:83
VALUE rb_block_call_kw(VALUE obj, ID mid, int argc, const VALUE *argv, rb_block_call_func_t proc, VALUE data2, int kw_splat)
Identical to rb_funcallv_kw(), except it additionally passes a function as a block.
Definition vm_eval.c:1567
#define rb_long2int
Just another name of rb_long2int_inline.
Definition long.h:62
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
Definition memory.h:372
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
Definition memory.h:167
VALUE rb_block_call(VALUE q, ID w, int e, const VALUE *r, type *t, VALUE y)
Call a method with a block.
VALUE rb_proc_new(type *q, VALUE w)
Creates a rb_cProc instance.
VALUE rb_fiber_new(type *q, VALUE w)
Creates a rb_cFiber instance.
void rb_hash_foreach(VALUE q, int_type *w, VALUE e)
Iteration over the given hash.
VALUE rb_rescue2(type *q, VALUE w, type *e, VALUE r,...)
An equivalent of rescue clause.
#define RARRAY_LEN
Just another name of rb_array_len.
Definition rarray.h:51
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
Definition rarray.h:281
#define RARRAY_AREF(a, i)
Definition rarray.h:403
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
Definition rarray.h:52
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
Definition rhash.h:79
#define RUBY_TYPED_DEFAULT_FREE
This is a value you can set to rb_data_type_struct::dfree.
Definition rtypeddata.h:81
#define RUBY_TYPED_FREE_IMMEDIATELY
Macros to see if each corresponding flag is defined.
Definition rtypeddata.h:122
#define TypedData_Get_Struct(obj, type, data_type, sval)
Obtains a C struct from inside of a wrapper Ruby object.
Definition rtypeddata.h:769
#define TypedData_Make_Struct(klass, type, data_type, sval)
Identical to TypedData_Wrap_Struct, except it allocates a new data region internally instead of takin...
Definition rtypeddata.h:578
#define InitVM(ext)
This macro is for internal use.
Definition ruby.h:231
#define RB_SCAN_ARGS_LAST_HASH_KEYWORDS
Treat a final argument as keywords if it is a hash, and not as keywords otherwise.
Definition scan_args.h:59
#define RB_PASS_CALLED_KEYWORDS
Pass keywords if current method is called with keywords, useful for argument delegation.
Definition scan_args.h:78
#define RB_NO_KEYWORDS
Do not pass keywords.
Definition scan_args.h:69
#define RTEST
This is an old name of RB_TEST.
#define _(args)
This was a transition path from K&R to ANSI.
Definition stdarg.h:35
MEMO.
Definition imemo.h:109
Definition enumerator.c:251
Decomposed Enumerator::ArithmeicSequence.
Definition enumerator.h:53
int exclude_end
Whether the endpoint is open or closed.
Definition enumerator.h:57
VALUE end
"Right" or "highest" endpoint of the sequence.
Definition enumerator.h:55
VALUE step
Step between a sequence.
Definition enumerator.h:56
VALUE begin
"Left" or "lowest" endpoint of the sequence.
Definition enumerator.h:54
This is the struct that holds necessary info for a struct.
Definition rtypeddata.h:229
VALUE flags
Type-specific behavioural characteristics.
Definition rtypeddata.h:343
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
Definition value_type.h:264
static void Check_Type(VALUE v, enum ruby_value_type t)
Identical to RB_TYPE_P(), except it raises exceptions on predication failure.
Definition value_type.h:433
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
Definition value_type.h:376