Ruby 3.2.4p170 (2024-04-23 revision af471c0e0127eea0cafa6f308c0425bbfab0acf5)
enumerator.c
1/************************************************
2
3 enumerator.c - provides Enumerator class
4
5 $Author$
6
7 Copyright (C) 2001-2003 Akinori MUSHA
8
9 $Idaemons: /home/cvs/rb/enumerator/enumerator.c,v 1.1.1.1 2001/07/15 10:12:48 knu Exp $
10 $RoughId: enumerator.c,v 1.6 2003/07/27 11:03:24 nobu Exp $
11 $Id$
12
13************************************************/
14
15#include "ruby/internal/config.h"
16
17#ifdef HAVE_FLOAT_H
18#include <float.h>
19#endif
20
21#include "id.h"
22#include "internal.h"
23#include "internal/class.h"
24#include "internal/enumerator.h"
25#include "internal/error.h"
26#include "internal/hash.h"
27#include "internal/imemo.h"
28#include "internal/numeric.h"
29#include "internal/range.h"
30#include "internal/rational.h"
31#include "ruby/ruby.h"
32
33/*
34 * Document-class: Enumerator
35 *
36 * A class which allows both internal and external iteration.
37 *
38 * An Enumerator can be created by the following methods.
39 * - Object#to_enum
40 * - Object#enum_for
41 * - Enumerator.new
42 *
43 * Most methods have two forms: a block form where the contents
44 * are evaluated for each item in the enumeration, and a non-block form
45 * which returns a new Enumerator wrapping the iteration.
46 *
47 * enumerator = %w(one two three).each
48 * puts enumerator.class # => Enumerator
49 *
50 * enumerator.each_with_object("foo") do |item, obj|
51 * puts "#{obj}: #{item}"
52 * end
53 *
54 * # foo: one
55 * # foo: two
56 * # foo: three
57 *
58 * enum_with_obj = enumerator.each_with_object("foo")
59 * puts enum_with_obj.class # => Enumerator
60 *
61 * enum_with_obj.each do |item, obj|
62 * puts "#{obj}: #{item}"
63 * end
64 *
65 * # foo: one
66 * # foo: two
67 * # foo: three
68 *
69 * This allows you to chain Enumerators together. For example, you
70 * can map a list's elements to strings containing the index
71 * and the element as a string via:
72 *
73 * puts %w[foo bar baz].map.with_index { |w, i| "#{i}:#{w}" }
74 * # => ["0:foo", "1:bar", "2:baz"]
75 *
76 * == External Iteration
77 *
78 * An Enumerator can also be used as an external iterator.
79 * For example, Enumerator#next returns the next value of the iterator
80 * or raises StopIteration if the Enumerator is at the end.
81 *
82 * e = [1,2,3].each # returns an enumerator object.
83 * puts e.next # => 1
84 * puts e.next # => 2
85 * puts e.next # => 3
86 * puts e.next # raises StopIteration
87 *
88 * +next+, +next_values+, +peek+ and +peek_values+ are the only methods
89 * which use external iteration (and Array#zip(Enumerable-not-Array) which uses +next+).
90 *
91 * These methods do not affect other internal enumeration methods,
92 * unless the underlying iteration method itself has side-effect, e.g. IO#each_line.
93 *
94 * External iteration differs *significantly* from internal iteration
95 * due to using a Fiber:
96 * - The Fiber adds some overhead compared to internal enumeration.
97 * - The stacktrace will only include the stack from the Enumerator, not above.
98 * - Fiber-local variables are *not* inherited inside the Enumerator Fiber,
99 * which instead starts with no Fiber-local variables.
100 * - Fiber storage variables *are* inherited and are designed
101 * to handle Enumerator Fibers. Assigning to a Fiber storage variable
102 * only affects the current Fiber, so if you want to change state
103 * in the caller Fiber of the Enumerator Fiber, you need to use an
104 * extra indirection (e.g., use some object in the Fiber storage
105 * variable and mutate some ivar of it).
106 *
107 * Concretely:
108 * Thread.current[:fiber_local] = 1
109 * Fiber[:storage_var] = 1
110 * e = Enumerator.new do |y|
111 * p Thread.current[:fiber_local] # for external iteration: nil, for internal iteration: 1
112 * p Fiber[:storage_var] # => 1, inherited
113 * Fiber[:storage_var] += 1
114 * y << 42
115 * end
116 *
117 * p e.next # => 42
118 * p Fiber[:storage_var] # => 1 (it ran in a different Fiber)
119 *
120 * e.each { p _1 }
121 * p Fiber[:storage_var] # => 2 (it ran in the same Fiber/"stack" as the current Fiber)
122 *
123 * == Convert External Iteration to Internal Iteration
124 *
125 * You can use an external iterator to implement an internal iterator as follows:
126 *
127 * def ext_each(e)
128 * while true
129 * begin
130 * vs = e.next_values
131 * rescue StopIteration
132 * return $!.result
133 * end
134 * y = yield(*vs)
135 * e.feed y
136 * end
137 * end
138 *
139 * o = Object.new
140 *
141 * def o.each
142 * puts yield
143 * puts yield(1)
144 * puts yield(1, 2)
145 * 3
146 * end
147 *
148 * # use o.each as an internal iterator directly.
149 * puts o.each {|*x| puts x; [:b, *x] }
150 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
151 *
152 * # convert o.each to an external iterator for
153 * # implementing an internal iterator.
154 * puts ext_each(o.to_enum) {|*x| puts x; [:b, *x] }
155 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
156 *
157 */
159static VALUE rb_cLazy;
160static ID id_rewind, id_new, id_to_enum, id_each_entry;
161static ID id_next, id_result, id_receiver, id_arguments, id_memo, id_method, id_force;
162static ID id_begin, id_end, id_step, id_exclude_end;
163static VALUE sym_each, sym_cycle, sym_yield;
164
165static VALUE lazy_use_super_method;
166
167extern ID ruby_static_id_cause;
168
169#define id_call idCall
170#define id_cause ruby_static_id_cause
171#define id_each idEach
172#define id_eqq idEqq
173#define id_initialize idInitialize
174#define id_size idSize
175
177
179 VALUE obj;
180 ID meth;
181 VALUE args;
182 VALUE fib;
183 VALUE dst;
184 VALUE lookahead;
185 VALUE feedvalue;
186 VALUE stop_exc;
187 VALUE size;
188 VALUE procs;
190 int kw_splat;
191};
192
193static VALUE rb_cGenerator, rb_cYielder, rb_cEnumProducer;
194
195struct generator {
196 VALUE proc;
197 VALUE obj;
198};
199
200struct yielder {
201 VALUE proc;
202};
203
204struct producer {
205 VALUE init;
206 VALUE proc;
207};
208
209typedef struct MEMO *lazyenum_proc_func(VALUE, struct MEMO *, VALUE, long);
210typedef VALUE lazyenum_size_func(VALUE, VALUE);
211typedef int lazyenum_precheck_func(VALUE proc_entry);
212typedef struct {
213 lazyenum_proc_func *proc;
214 lazyenum_size_func *size;
215 lazyenum_precheck_func *precheck;
217
219 VALUE proc;
220 VALUE memo;
221 const lazyenum_funcs *fn;
222};
223
224static VALUE generator_allocate(VALUE klass);
225static VALUE generator_init(VALUE obj, VALUE proc);
226
227static VALUE rb_cEnumChain;
228
230 VALUE enums;
231 long pos;
232};
233
234static VALUE rb_cEnumProduct;
235
237 VALUE enums;
238};
239
240VALUE rb_cArithSeq;
241
242/*
243 * Enumerator
244 */
245static void
246enumerator_mark(void *p)
247{
248 struct enumerator *ptr = p;
249 rb_gc_mark_movable(ptr->obj);
250 rb_gc_mark_movable(ptr->args);
251 rb_gc_mark_movable(ptr->fib);
252 rb_gc_mark_movable(ptr->dst);
253 rb_gc_mark_movable(ptr->lookahead);
254 rb_gc_mark_movable(ptr->feedvalue);
255 rb_gc_mark_movable(ptr->stop_exc);
256 rb_gc_mark_movable(ptr->size);
257 rb_gc_mark_movable(ptr->procs);
258}
259
260static void
261enumerator_compact(void *p)
262{
263 struct enumerator *ptr = p;
264 ptr->obj = rb_gc_location(ptr->obj);
265 ptr->args = rb_gc_location(ptr->args);
266 ptr->fib = rb_gc_location(ptr->fib);
267 ptr->dst = rb_gc_location(ptr->dst);
268 ptr->lookahead = rb_gc_location(ptr->lookahead);
269 ptr->feedvalue = rb_gc_location(ptr->feedvalue);
270 ptr->stop_exc = rb_gc_location(ptr->stop_exc);
271 ptr->size = rb_gc_location(ptr->size);
272 ptr->procs = rb_gc_location(ptr->procs);
273}
274
275#define enumerator_free RUBY_TYPED_DEFAULT_FREE
276
277static size_t
278enumerator_memsize(const void *p)
279{
280 return sizeof(struct enumerator);
281}
282
283static const rb_data_type_t enumerator_data_type = {
284 "enumerator",
285 {
286 enumerator_mark,
287 enumerator_free,
288 enumerator_memsize,
289 enumerator_compact,
290 },
291 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
292};
293
294static struct enumerator *
295enumerator_ptr(VALUE obj)
296{
297 struct enumerator *ptr;
298
299 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr);
300 if (!ptr || UNDEF_P(ptr->obj)) {
301 rb_raise(rb_eArgError, "uninitialized enumerator");
302 }
303 return ptr;
304}
305
306static void
307proc_entry_mark(void *p)
308{
309 struct proc_entry *ptr = p;
310 rb_gc_mark_movable(ptr->proc);
311 rb_gc_mark_movable(ptr->memo);
312}
313
314static void
315proc_entry_compact(void *p)
316{
317 struct proc_entry *ptr = p;
318 ptr->proc = rb_gc_location(ptr->proc);
319 ptr->memo = rb_gc_location(ptr->memo);
320}
321
322#define proc_entry_free RUBY_TYPED_DEFAULT_FREE
323
324static size_t
325proc_entry_memsize(const void *p)
326{
327 return p ? sizeof(struct proc_entry) : 0;
328}
329
330static const rb_data_type_t proc_entry_data_type = {
331 "proc_entry",
332 {
333 proc_entry_mark,
334 proc_entry_free,
335 proc_entry_memsize,
336 proc_entry_compact,
337 },
338};
339
340static struct proc_entry *
341proc_entry_ptr(VALUE proc_entry)
342{
343 struct proc_entry *ptr;
344
345 TypedData_Get_Struct(proc_entry, struct proc_entry, &proc_entry_data_type, ptr);
346
347 return ptr;
348}
349
350/*
351 * call-seq:
352 * obj.to_enum(method = :each, *args) -> enum
353 * obj.enum_for(method = :each, *args) -> enum
354 * obj.to_enum(method = :each, *args) {|*args| block} -> enum
355 * obj.enum_for(method = :each, *args){|*args| block} -> enum
356 *
357 * Creates a new Enumerator which will enumerate by calling +method+ on
358 * +obj+, passing +args+ if any. What was _yielded_ by method becomes
359 * values of enumerator.
360 *
361 * If a block is given, it will be used to calculate the size of
362 * the enumerator without the need to iterate it (see Enumerator#size).
363 *
364 * === Examples
365 *
366 * str = "xyz"
367 *
368 * enum = str.enum_for(:each_byte)
369 * enum.each { |b| puts b }
370 * # => 120
371 * # => 121
372 * # => 122
373 *
374 * # protect an array from being modified by some_method
375 * a = [1, 2, 3]
376 * some_method(a.to_enum)
377 *
378 * # String#split in block form is more memory-effective:
379 * very_large_string.split("|") { |chunk| return chunk if chunk.include?('DATE') }
380 * # This could be rewritten more idiomatically with to_enum:
381 * very_large_string.to_enum(:split, "|").lazy.grep(/DATE/).first
382 *
383 * It is typical to call to_enum when defining methods for
384 * a generic Enumerable, in case no block is passed.
385 *
386 * Here is such an example, with parameter passing and a sizing block:
387 *
388 * module Enumerable
389 * # a generic method to repeat the values of any enumerable
390 * def repeat(n)
391 * raise ArgumentError, "#{n} is negative!" if n < 0
392 * unless block_given?
393 * return to_enum(__method__, n) do # __method__ is :repeat here
394 * sz = size # Call size and multiply by n...
395 * sz * n if sz # but return nil if size itself is nil
396 * end
397 * end
398 * each do |*val|
399 * n.times { yield *val }
400 * end
401 * end
402 * end
403 *
404 * %i[hello world].repeat(2) { |w| puts w }
405 * # => Prints 'hello', 'hello', 'world', 'world'
406 * enum = (1..14).repeat(3)
407 * # => returns an Enumerator when called without a block
408 * enum.first(4) # => [1, 1, 1, 2]
409 * enum.size # => 42
410 */
411static VALUE
412obj_to_enum(int argc, VALUE *argv, VALUE obj)
413{
414 VALUE enumerator, meth = sym_each;
415
416 if (argc > 0) {
417 --argc;
418 meth = *argv++;
419 }
420 enumerator = rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
421 if (rb_block_given_p()) {
422 enumerator_ptr(enumerator)->size = rb_block_proc();
423 }
424 return enumerator;
425}
426
427static VALUE
428enumerator_allocate(VALUE klass)
429{
430 struct enumerator *ptr;
431 VALUE enum_obj;
432
433 enum_obj = TypedData_Make_Struct(klass, struct enumerator, &enumerator_data_type, ptr);
434 ptr->obj = Qundef;
435
436 return enum_obj;
437}
438
439static VALUE
440enumerator_init(VALUE enum_obj, VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, VALUE size, int kw_splat)
441{
442 struct enumerator *ptr;
443
444 rb_check_frozen(enum_obj);
445 TypedData_Get_Struct(enum_obj, struct enumerator, &enumerator_data_type, ptr);
446
447 if (!ptr) {
448 rb_raise(rb_eArgError, "unallocated enumerator");
449 }
450
451 ptr->obj = obj;
452 ptr->meth = rb_to_id(meth);
453 if (argc) ptr->args = rb_ary_new4(argc, argv);
454 ptr->fib = 0;
455 ptr->dst = Qnil;
456 ptr->lookahead = Qundef;
457 ptr->feedvalue = Qundef;
458 ptr->stop_exc = Qfalse;
459 ptr->size = size;
460 ptr->size_fn = size_fn;
461 ptr->kw_splat = kw_splat;
462
463 return enum_obj;
464}
465
466static VALUE
467convert_to_feasible_size_value(VALUE obj)
468{
469 if (NIL_P(obj)) {
470 return obj;
471 }
472 else if (rb_respond_to(obj, id_call)) {
473 return obj;
474 }
475 else if (RB_FLOAT_TYPE_P(obj) && RFLOAT_VALUE(obj) == HUGE_VAL) {
476 return obj;
477 }
478 else {
479 return rb_to_int(obj);
480 }
481}
482
483/*
484 * call-seq:
485 * Enumerator.new(size = nil) { |yielder| ... }
486 *
487 * Creates a new Enumerator object, which can be used as an
488 * Enumerable.
489 *
490 * Iteration is defined by the given block, in
491 * which a "yielder" object, given as block parameter, can be used to
492 * yield a value by calling the +yield+ method (aliased as <code><<</code>):
493 *
494 * fib = Enumerator.new do |y|
495 * a = b = 1
496 * loop do
497 * y << a
498 * a, b = b, a + b
499 * end
500 * end
501 *
502 * fib.take(10) # => [1, 1, 2, 3, 5, 8, 13, 21, 34, 55]
503 *
504 * The optional parameter can be used to specify how to calculate the size
505 * in a lazy fashion (see Enumerator#size). It can either be a value or
506 * a callable object.
507 */
508static VALUE
509enumerator_initialize(int argc, VALUE *argv, VALUE obj)
510{
511 VALUE iter = rb_block_proc();
512 VALUE recv = generator_init(generator_allocate(rb_cGenerator), iter);
513 VALUE arg0 = rb_check_arity(argc, 0, 1) ? argv[0] : Qnil;
514 VALUE size = convert_to_feasible_size_value(arg0);
515
516 return enumerator_init(obj, recv, sym_each, 0, 0, 0, size, false);
517}
518
519/* :nodoc: */
520static VALUE
521enumerator_init_copy(VALUE obj, VALUE orig)
522{
523 struct enumerator *ptr0, *ptr1;
524
525 if (!OBJ_INIT_COPY(obj, orig)) return obj;
526 ptr0 = enumerator_ptr(orig);
527 if (ptr0->fib) {
528 /* Fibers cannot be copied */
529 rb_raise(rb_eTypeError, "can't copy execution context");
530 }
531
532 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr1);
533
534 if (!ptr1) {
535 rb_raise(rb_eArgError, "unallocated enumerator");
536 }
537
538 ptr1->obj = ptr0->obj;
539 ptr1->meth = ptr0->meth;
540 ptr1->args = ptr0->args;
541 ptr1->fib = 0;
542 ptr1->lookahead = Qundef;
543 ptr1->feedvalue = Qundef;
544 ptr1->size = ptr0->size;
545 ptr1->size_fn = ptr0->size_fn;
546
547 return obj;
548}
549
550/*
551 * For backwards compatibility; use rb_enumeratorize_with_size
552 */
553VALUE
554rb_enumeratorize(VALUE obj, VALUE meth, int argc, const VALUE *argv)
555{
556 return rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
557}
558
559static VALUE lazy_to_enum_i(VALUE self, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat);
560static int lazy_precheck(VALUE procs);
561
562VALUE
563rb_enumeratorize_with_size_kw(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
564{
565 VALUE base_class = rb_cEnumerator;
566
567 if (RTEST(rb_obj_is_kind_of(obj, rb_cLazy))) {
568 base_class = rb_cLazy;
569 }
570 else if (RTEST(rb_obj_is_kind_of(obj, rb_cEnumChain))) {
571 obj = enumerator_init(enumerator_allocate(rb_cEnumerator), obj, sym_each, 0, 0, 0, Qnil, false);
572 }
573
574 return enumerator_init(enumerator_allocate(base_class),
575 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
576}
577
578VALUE
579rb_enumeratorize_with_size(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn)
580{
581 return rb_enumeratorize_with_size_kw(obj, meth, argc, argv, size_fn, rb_keyword_given_p());
582}
583
584static VALUE
585enumerator_block_call(VALUE obj, rb_block_call_func *func, VALUE arg)
586{
587 int argc = 0;
588 const VALUE *argv = 0;
589 const struct enumerator *e = enumerator_ptr(obj);
590 ID meth = e->meth;
591
592 if (e->args) {
593 argc = RARRAY_LENINT(e->args);
594 argv = RARRAY_CONST_PTR(e->args);
595 }
596 return rb_block_call_kw(e->obj, meth, argc, argv, func, arg, e->kw_splat);
597}
598
599/*
600 * call-seq:
601 * enum.each { |elm| block } -> obj
602 * enum.each -> enum
603 * enum.each(*appending_args) { |elm| block } -> obj
604 * enum.each(*appending_args) -> an_enumerator
605 *
606 * Iterates over the block according to how this Enumerator was constructed.
607 * If no block and no arguments are given, returns self.
608 *
609 * === Examples
610 *
611 * "Hello, world!".scan(/\w+/) #=> ["Hello", "world"]
612 * "Hello, world!".to_enum(:scan, /\w+/).to_a #=> ["Hello", "world"]
613 * "Hello, world!".to_enum(:scan).each(/\w+/).to_a #=> ["Hello", "world"]
614 *
615 * obj = Object.new
616 *
617 * def obj.each_arg(a, b=:b, *rest)
618 * yield a
619 * yield b
620 * yield rest
621 * :method_returned
622 * end
623 *
624 * enum = obj.to_enum :each_arg, :a, :x
625 *
626 * enum.each.to_a #=> [:a, :x, []]
627 * enum.each.equal?(enum) #=> true
628 * enum.each { |elm| elm } #=> :method_returned
629 *
630 * enum.each(:y, :z).to_a #=> [:a, :x, [:y, :z]]
631 * enum.each(:y, :z).equal?(enum) #=> false
632 * enum.each(:y, :z) { |elm| elm } #=> :method_returned
633 *
634 */
635static VALUE
636enumerator_each(int argc, VALUE *argv, VALUE obj)
637{
638 struct enumerator *e = enumerator_ptr(obj);
639
640 if (argc > 0) {
641 VALUE args = (e = enumerator_ptr(obj = rb_obj_dup(obj)))->args;
642 if (args) {
643#if SIZEOF_INT < SIZEOF_LONG
644 /* check int range overflow */
645 rb_long2int(RARRAY_LEN(args) + argc);
646#endif
647 args = rb_ary_dup(args);
648 rb_ary_cat(args, argv, argc);
649 }
650 else {
651 args = rb_ary_new4(argc, argv);
652 }
653 e->args = args;
654 e->size = Qnil;
655 e->size_fn = 0;
656 }
657 if (!rb_block_given_p()) return obj;
658
659 if (!lazy_precheck(e->procs)) return Qnil;
660
661 return enumerator_block_call(obj, 0, obj);
662}
663
664static VALUE
665enumerator_with_index_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
666{
667 struct MEMO *memo = (struct MEMO *)m;
668 VALUE idx = memo->v1;
669 MEMO_V1_SET(memo, rb_int_succ(idx));
670
671 if (argc <= 1)
672 return rb_yield_values(2, val, idx);
673
674 return rb_yield_values(2, rb_ary_new4(argc, argv), idx);
675}
676
677static VALUE
678enumerator_size(VALUE obj);
679
680static VALUE
681enumerator_enum_size(VALUE obj, VALUE args, VALUE eobj)
682{
683 return enumerator_size(obj);
684}
685
686/*
687 * call-seq:
688 * e.with_index(offset = 0) {|(*args), idx| ... }
689 * e.with_index(offset = 0)
690 *
691 * Iterates the given block for each element with an index, which
692 * starts from +offset+. If no block is given, returns a new Enumerator
693 * that includes the index, starting from +offset+
694 *
695 * +offset+:: the starting index to use
696 *
697 */
698static VALUE
699enumerator_with_index(int argc, VALUE *argv, VALUE obj)
700{
701 VALUE memo;
702
703 rb_check_arity(argc, 0, 1);
704 RETURN_SIZED_ENUMERATOR(obj, argc, argv, enumerator_enum_size);
705 memo = (!argc || NIL_P(memo = argv[0])) ? INT2FIX(0) : rb_to_int(memo);
706 return enumerator_block_call(obj, enumerator_with_index_i, (VALUE)MEMO_NEW(memo, 0, 0));
707}
708
709/*
710 * call-seq:
711 * e.each_with_index {|(*args), idx| ... }
712 * e.each_with_index
713 *
714 * Same as Enumerator#with_index(0), i.e. there is no starting offset.
715 *
716 * If no block is given, a new Enumerator is returned that includes the index.
717 *
718 */
719static VALUE
720enumerator_each_with_index(VALUE obj)
721{
722 return enumerator_with_index(0, NULL, obj);
723}
724
725static VALUE
726enumerator_with_object_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, memo))
727{
728 if (argc <= 1)
729 return rb_yield_values(2, val, memo);
730
731 return rb_yield_values(2, rb_ary_new4(argc, argv), memo);
732}
733
734/*
735 * call-seq:
736 * e.each_with_object(obj) {|(*args), obj| ... }
737 * e.each_with_object(obj)
738 * e.with_object(obj) {|(*args), obj| ... }
739 * e.with_object(obj)
740 *
741 * Iterates the given block for each element with an arbitrary object, +obj+,
742 * and returns +obj+
743 *
744 * If no block is given, returns a new Enumerator.
745 *
746 * === Example
747 *
748 * to_three = Enumerator.new do |y|
749 * 3.times do |x|
750 * y << x
751 * end
752 * end
753 *
754 * to_three_with_string = to_three.with_object("foo")
755 * to_three_with_string.each do |x,string|
756 * puts "#{string}: #{x}"
757 * end
758 *
759 * # => foo: 0
760 * # => foo: 1
761 * # => foo: 2
762 */
763static VALUE
764enumerator_with_object(VALUE obj, VALUE memo)
765{
766 RETURN_SIZED_ENUMERATOR(obj, 1, &memo, enumerator_enum_size);
767 enumerator_block_call(obj, enumerator_with_object_i, memo);
768
769 return memo;
770}
771
772static VALUE
773next_ii(RB_BLOCK_CALL_FUNC_ARGLIST(i, obj))
774{
775 struct enumerator *e = enumerator_ptr(obj);
776 VALUE feedvalue = Qnil;
777 VALUE args = rb_ary_new4(argc, argv);
778 rb_fiber_yield(1, &args);
779 if (!UNDEF_P(e->feedvalue)) {
780 feedvalue = e->feedvalue;
781 e->feedvalue = Qundef;
782 }
783 return feedvalue;
784}
785
786static VALUE
787next_i(RB_BLOCK_CALL_FUNC_ARGLIST(_, obj))
788{
789 struct enumerator *e = enumerator_ptr(obj);
790 VALUE nil = Qnil;
791 VALUE result;
792
793 result = rb_block_call(obj, id_each, 0, 0, next_ii, obj);
794 e->stop_exc = rb_exc_new2(rb_eStopIteration, "iteration reached an end");
795 rb_ivar_set(e->stop_exc, id_result, result);
796 return rb_fiber_yield(1, &nil);
797}
798
799static void
800next_init(VALUE obj, struct enumerator *e)
801{
802 VALUE curr = rb_fiber_current();
803 e->dst = curr;
804 e->fib = rb_fiber_new(next_i, obj);
805 e->lookahead = Qundef;
806}
807
808static VALUE
809get_next_values(VALUE obj, struct enumerator *e)
810{
811 VALUE curr, vs;
812
813 if (e->stop_exc) {
814 VALUE exc = e->stop_exc;
815 VALUE result = rb_attr_get(exc, id_result);
816 VALUE mesg = rb_attr_get(exc, idMesg);
817 if (!NIL_P(mesg)) mesg = rb_str_dup(mesg);
818 VALUE stop_exc = rb_exc_new_str(rb_eStopIteration, mesg);
819 rb_ivar_set(stop_exc, id_cause, exc);
820 rb_ivar_set(stop_exc, id_result, result);
821 rb_exc_raise(stop_exc);
822 }
823
824 curr = rb_fiber_current();
825
826 if (!e->fib || !rb_fiber_alive_p(e->fib)) {
827 next_init(obj, e);
828 }
829
830 vs = rb_fiber_resume(e->fib, 1, &curr);
831 if (e->stop_exc) {
832 e->fib = 0;
833 e->dst = Qnil;
834 e->lookahead = Qundef;
835 e->feedvalue = Qundef;
836 rb_exc_raise(e->stop_exc);
837 }
838 return vs;
839}
840
841/*
842 * call-seq:
843 * e.next_values -> array
844 *
845 * Returns the next object as an array in the enumerator, and move the
846 * internal position forward. When the position reached at the end,
847 * StopIteration is raised.
848 *
849 * See class-level notes about external iterators.
850 *
851 * This method can be used to distinguish <code>yield</code> and <code>yield
852 * nil</code>.
853 *
854 * === Example
855 *
856 * o = Object.new
857 * def o.each
858 * yield
859 * yield 1
860 * yield 1, 2
861 * yield nil
862 * yield [1, 2]
863 * end
864 * e = o.to_enum
865 * p e.next_values
866 * p e.next_values
867 * p e.next_values
868 * p e.next_values
869 * p e.next_values
870 * e = o.to_enum
871 * p e.next
872 * p e.next
873 * p e.next
874 * p e.next
875 * p e.next
876 *
877 * ## yield args next_values next
878 * # yield [] nil
879 * # yield 1 [1] 1
880 * # yield 1, 2 [1, 2] [1, 2]
881 * # yield nil [nil] nil
882 * # yield [1, 2] [[1, 2]] [1, 2]
883 *
884 */
885
886static VALUE
887enumerator_next_values(VALUE obj)
888{
889 struct enumerator *e = enumerator_ptr(obj);
890 VALUE vs;
891
892 if (!UNDEF_P(e->lookahead)) {
893 vs = e->lookahead;
894 e->lookahead = Qundef;
895 return vs;
896 }
897
898 return get_next_values(obj, e);
899}
900
901static VALUE
902ary2sv(VALUE args, int dup)
903{
904 if (!RB_TYPE_P(args, T_ARRAY))
905 return args;
906
907 switch (RARRAY_LEN(args)) {
908 case 0:
909 return Qnil;
910
911 case 1:
912 return RARRAY_AREF(args, 0);
913
914 default:
915 if (dup)
916 return rb_ary_dup(args);
917 return args;
918 }
919}
920
921/*
922 * call-seq:
923 * e.next -> object
924 *
925 * Returns the next object in the enumerator, and move the internal position
926 * forward. When the position reached at the end, StopIteration is raised.
927 *
928 * === Example
929 *
930 * a = [1,2,3]
931 * e = a.to_enum
932 * p e.next #=> 1
933 * p e.next #=> 2
934 * p e.next #=> 3
935 * p e.next #raises StopIteration
936 *
937 * See class-level notes about external iterators.
938 *
939 */
940
941static VALUE
942enumerator_next(VALUE obj)
943{
944 VALUE vs = enumerator_next_values(obj);
945 return ary2sv(vs, 0);
946}
947
948static VALUE
949enumerator_peek_values(VALUE obj)
950{
951 struct enumerator *e = enumerator_ptr(obj);
952
953 if (UNDEF_P(e->lookahead)) {
954 e->lookahead = get_next_values(obj, e);
955 }
956 return e->lookahead;
957}
958
959/*
960 * call-seq:
961 * e.peek_values -> array
962 *
963 * Returns the next object as an array, similar to Enumerator#next_values, but
964 * doesn't move the internal position forward. If the position is already at
965 * the end, StopIteration is raised.
966 *
967 * See class-level notes about external iterators.
968 *
969 * === Example
970 *
971 * o = Object.new
972 * def o.each
973 * yield
974 * yield 1
975 * yield 1, 2
976 * end
977 * e = o.to_enum
978 * p e.peek_values #=> []
979 * e.next
980 * p e.peek_values #=> [1]
981 * p e.peek_values #=> [1]
982 * e.next
983 * p e.peek_values #=> [1, 2]
984 * e.next
985 * p e.peek_values # raises StopIteration
986 *
987 */
988
989static VALUE
990enumerator_peek_values_m(VALUE obj)
991{
992 return rb_ary_dup(enumerator_peek_values(obj));
993}
994
995/*
996 * call-seq:
997 * e.peek -> object
998 *
999 * Returns the next object in the enumerator, but doesn't move the internal
1000 * position forward. If the position is already at the end, StopIteration
1001 * is raised.
1002 *
1003 * See class-level notes about external iterators.
1004 *
1005 * === Example
1006 *
1007 * a = [1,2,3]
1008 * e = a.to_enum
1009 * p e.next #=> 1
1010 * p e.peek #=> 2
1011 * p e.peek #=> 2
1012 * p e.peek #=> 2
1013 * p e.next #=> 2
1014 * p e.next #=> 3
1015 * p e.peek #raises StopIteration
1016 *
1017 */
1018
1019static VALUE
1020enumerator_peek(VALUE obj)
1021{
1022 VALUE vs = enumerator_peek_values(obj);
1023 return ary2sv(vs, 1);
1024}
1025
1026/*
1027 * call-seq:
1028 * e.feed obj -> nil
1029 *
1030 * Sets the value to be returned by the next yield inside +e+.
1031 *
1032 * If the value is not set, the yield returns nil.
1033 *
1034 * This value is cleared after being yielded.
1035 *
1036 * # Array#map passes the array's elements to "yield" and collects the
1037 * # results of "yield" as an array.
1038 * # Following example shows that "next" returns the passed elements and
1039 * # values passed to "feed" are collected as an array which can be
1040 * # obtained by StopIteration#result.
1041 * e = [1,2,3].map
1042 * p e.next #=> 1
1043 * e.feed "a"
1044 * p e.next #=> 2
1045 * e.feed "b"
1046 * p e.next #=> 3
1047 * e.feed "c"
1048 * begin
1049 * e.next
1050 * rescue StopIteration
1051 * p $!.result #=> ["a", "b", "c"]
1052 * end
1053 *
1054 * o = Object.new
1055 * def o.each
1056 * x = yield # (2) blocks
1057 * p x # (5) => "foo"
1058 * x = yield # (6) blocks
1059 * p x # (8) => nil
1060 * x = yield # (9) blocks
1061 * p x # not reached w/o another e.next
1062 * end
1063 *
1064 * e = o.to_enum
1065 * e.next # (1)
1066 * e.feed "foo" # (3)
1067 * e.next # (4)
1068 * e.next # (7)
1069 * # (10)
1070 */
1071
1072static VALUE
1073enumerator_feed(VALUE obj, VALUE v)
1074{
1075 struct enumerator *e = enumerator_ptr(obj);
1076
1077 if (!UNDEF_P(e->feedvalue)) {
1078 rb_raise(rb_eTypeError, "feed value already set");
1079 }
1080 e->feedvalue = v;
1081
1082 return Qnil;
1083}
1084
1085/*
1086 * call-seq:
1087 * e.rewind -> e
1088 *
1089 * Rewinds the enumeration sequence to the beginning.
1090 *
1091 * If the enclosed object responds to a "rewind" method, it is called.
1092 */
1093
1094static VALUE
1095enumerator_rewind(VALUE obj)
1096{
1097 struct enumerator *e = enumerator_ptr(obj);
1098
1099 rb_check_funcall(e->obj, id_rewind, 0, 0);
1100
1101 e->fib = 0;
1102 e->dst = Qnil;
1103 e->lookahead = Qundef;
1104 e->feedvalue = Qundef;
1105 e->stop_exc = Qfalse;
1106 return obj;
1107}
1108
1109static struct generator *generator_ptr(VALUE obj);
1110static VALUE append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args);
1111
1112static VALUE
1113inspect_enumerator(VALUE obj, VALUE dummy, int recur)
1114{
1115 struct enumerator *e;
1116 VALUE eobj, str, cname;
1117
1118 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, e);
1119
1120 cname = rb_obj_class(obj);
1121
1122 if (!e || UNDEF_P(e->obj)) {
1123 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(cname));
1124 }
1125
1126 if (recur) {
1127 str = rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(cname));
1128 return str;
1129 }
1130
1131 if (e->procs) {
1132 long i;
1133
1134 eobj = generator_ptr(e->obj)->obj;
1135 /* In case procs chained enumerator traversing all proc entries manually */
1136 if (rb_obj_class(eobj) == cname) {
1137 str = rb_inspect(eobj);
1138 }
1139 else {
1140 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(cname), eobj);
1141 }
1142 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1143 str = rb_sprintf("#<%"PRIsVALUE": %"PRIsVALUE, cname, str);
1144 append_method(RARRAY_AREF(e->procs, i), str, e->meth, e->args);
1145 rb_str_buf_cat2(str, ">");
1146 }
1147 return str;
1148 }
1149
1150 eobj = rb_attr_get(obj, id_receiver);
1151 if (NIL_P(eobj)) {
1152 eobj = e->obj;
1153 }
1154
1155 /* (1..100).each_cons(2) => "#<Enumerator: 1..100:each_cons(2)>" */
1156 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE, rb_class_path(cname), eobj);
1157 append_method(obj, str, e->meth, e->args);
1158
1159 rb_str_buf_cat2(str, ">");
1160
1161 return str;
1162}
1163
1164static int
1165key_symbol_p(VALUE key, VALUE val, VALUE arg)
1166{
1167 if (SYMBOL_P(key)) return ST_CONTINUE;
1168 *(int *)arg = FALSE;
1169 return ST_STOP;
1170}
1171
1172static int
1173kwd_append(VALUE key, VALUE val, VALUE str)
1174{
1175 if (!SYMBOL_P(key)) rb_raise(rb_eRuntimeError, "non-symbol key inserted");
1176 rb_str_catf(str, "% "PRIsVALUE": %"PRIsVALUE", ", key, val);
1177 return ST_CONTINUE;
1178}
1179
1180static VALUE
1181append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args)
1182{
1183 VALUE method, eargs;
1184
1185 method = rb_attr_get(obj, id_method);
1186 if (method != Qfalse) {
1187 if (!NIL_P(method)) {
1188 Check_Type(method, T_SYMBOL);
1189 method = rb_sym2str(method);
1190 }
1191 else {
1192 method = rb_id2str(default_method);
1193 }
1194 rb_str_buf_cat2(str, ":");
1195 rb_str_buf_append(str, method);
1196 }
1197
1198 eargs = rb_attr_get(obj, id_arguments);
1199 if (NIL_P(eargs)) {
1200 eargs = default_args;
1201 }
1202 if (eargs != Qfalse) {
1203 long argc = RARRAY_LEN(eargs);
1204 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
1205
1206 if (argc > 0) {
1207 VALUE kwds = Qnil;
1208
1209 rb_str_buf_cat2(str, "(");
1210
1211 if (RB_TYPE_P(argv[argc-1], T_HASH) && !RHASH_EMPTY_P(argv[argc-1])) {
1212 int all_key = TRUE;
1213 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
1214 if (all_key) kwds = argv[--argc];
1215 }
1216
1217 while (argc--) {
1218 VALUE arg = *argv++;
1219
1220 rb_str_append(str, rb_inspect(arg));
1221 rb_str_buf_cat2(str, ", ");
1222 }
1223 if (!NIL_P(kwds)) {
1224 rb_hash_foreach(kwds, kwd_append, str);
1225 }
1226 rb_str_set_len(str, RSTRING_LEN(str)-2);
1227 rb_str_buf_cat2(str, ")");
1228 }
1229 }
1230
1231 return str;
1232}
1233
1234/*
1235 * call-seq:
1236 * e.inspect -> string
1237 *
1238 * Creates a printable version of <i>e</i>.
1239 */
1240
1241static VALUE
1242enumerator_inspect(VALUE obj)
1243{
1244 return rb_exec_recursive(inspect_enumerator, obj, 0);
1245}
1246
1247/*
1248 * call-seq:
1249 * e.size -> int, Float::INFINITY or nil
1250 *
1251 * Returns the size of the enumerator, or +nil+ if it can't be calculated lazily.
1252 *
1253 * (1..100).to_a.permutation(4).size # => 94109400
1254 * loop.size # => Float::INFINITY
1255 * (1..100).drop_while.size # => nil
1256 */
1257
1258static VALUE
1259enumerator_size(VALUE obj)
1260{
1261 struct enumerator *e = enumerator_ptr(obj);
1262 int argc = 0;
1263 const VALUE *argv = NULL;
1264 VALUE size;
1265
1266 if (e->procs) {
1267 struct generator *g = generator_ptr(e->obj);
1268 VALUE receiver = rb_check_funcall(g->obj, id_size, 0, 0);
1269 long i = 0;
1270
1271 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1272 VALUE proc = RARRAY_AREF(e->procs, i);
1273 struct proc_entry *entry = proc_entry_ptr(proc);
1274 lazyenum_size_func *size_fn = entry->fn->size;
1275 if (!size_fn) {
1276 return Qnil;
1277 }
1278 receiver = (*size_fn)(proc, receiver);
1279 }
1280 return receiver;
1281 }
1282
1283 if (e->size_fn) {
1284 return (*e->size_fn)(e->obj, e->args, obj);
1285 }
1286 if (e->args) {
1287 argc = (int)RARRAY_LEN(e->args);
1288 argv = RARRAY_CONST_PTR(e->args);
1289 }
1290 size = rb_check_funcall_kw(e->size, id_call, argc, argv, e->kw_splat);
1291 if (!UNDEF_P(size)) return size;
1292 return e->size;
1293}
1294
1295/*
1296 * Yielder
1297 */
1298static void
1299yielder_mark(void *p)
1300{
1301 struct yielder *ptr = p;
1302 rb_gc_mark_movable(ptr->proc);
1303}
1304
1305static void
1306yielder_compact(void *p)
1307{
1308 struct yielder *ptr = p;
1309 ptr->proc = rb_gc_location(ptr->proc);
1310}
1311
1312#define yielder_free RUBY_TYPED_DEFAULT_FREE
1313
1314static size_t
1315yielder_memsize(const void *p)
1316{
1317 return sizeof(struct yielder);
1318}
1319
1320static const rb_data_type_t yielder_data_type = {
1321 "yielder",
1322 {
1323 yielder_mark,
1324 yielder_free,
1325 yielder_memsize,
1326 yielder_compact,
1327 },
1328 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
1329};
1330
1331static struct yielder *
1332yielder_ptr(VALUE obj)
1333{
1334 struct yielder *ptr;
1335
1336 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1337 if (!ptr || UNDEF_P(ptr->proc)) {
1338 rb_raise(rb_eArgError, "uninitialized yielder");
1339 }
1340 return ptr;
1341}
1342
1343/* :nodoc: */
1344static VALUE
1345yielder_allocate(VALUE klass)
1346{
1347 struct yielder *ptr;
1348 VALUE obj;
1349
1350 obj = TypedData_Make_Struct(klass, struct yielder, &yielder_data_type, ptr);
1351 ptr->proc = Qundef;
1352
1353 return obj;
1354}
1355
1356static VALUE
1357yielder_init(VALUE obj, VALUE proc)
1358{
1359 struct yielder *ptr;
1360
1361 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1362
1363 if (!ptr) {
1364 rb_raise(rb_eArgError, "unallocated yielder");
1365 }
1366
1367 ptr->proc = proc;
1368
1369 return obj;
1370}
1371
1372/* :nodoc: */
1373static VALUE
1374yielder_initialize(VALUE obj)
1375{
1376 rb_need_block();
1377
1378 return yielder_init(obj, rb_block_proc());
1379}
1380
1381/* :nodoc: */
1382static VALUE
1383yielder_yield(VALUE obj, VALUE args)
1384{
1385 struct yielder *ptr = yielder_ptr(obj);
1386
1387 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1388}
1389
1390/* :nodoc: */
1391static VALUE
1392yielder_yield_push(VALUE obj, VALUE arg)
1393{
1394 struct yielder *ptr = yielder_ptr(obj);
1395
1396 rb_proc_call_with_block(ptr->proc, 1, &arg, Qnil);
1397
1398 return obj;
1399}
1400
1401/*
1402 * Returns a Proc object that takes arguments and yields them.
1403 *
1404 * This method is implemented so that a Yielder object can be directly
1405 * passed to another method as a block argument.
1406 *
1407 * enum = Enumerator.new { |y|
1408 * Dir.glob("*.rb") { |file|
1409 * File.open(file) { |f| f.each_line(&y) }
1410 * }
1411 * }
1412 */
1413static VALUE
1414yielder_to_proc(VALUE obj)
1415{
1416 VALUE method = rb_obj_method(obj, sym_yield);
1417
1418 return rb_funcall(method, idTo_proc, 0);
1419}
1420
1421static VALUE
1422yielder_yield_i(RB_BLOCK_CALL_FUNC_ARGLIST(obj, memo))
1423{
1424 return rb_yield_values_kw(argc, argv, RB_PASS_CALLED_KEYWORDS);
1425}
1426
1427static VALUE
1428yielder_new(void)
1429{
1430 return yielder_init(yielder_allocate(rb_cYielder), rb_proc_new(yielder_yield_i, 0));
1431}
1432
1433/*
1434 * Generator
1435 */
1436static void
1437generator_mark(void *p)
1438{
1439 struct generator *ptr = p;
1440 rb_gc_mark_movable(ptr->proc);
1441 rb_gc_mark_movable(ptr->obj);
1442}
1443
1444static void
1445generator_compact(void *p)
1446{
1447 struct generator *ptr = p;
1448 ptr->proc = rb_gc_location(ptr->proc);
1449 ptr->obj = rb_gc_location(ptr->obj);
1450}
1451
1452#define generator_free RUBY_TYPED_DEFAULT_FREE
1453
1454static size_t
1455generator_memsize(const void *p)
1456{
1457 return sizeof(struct generator);
1458}
1459
1460static const rb_data_type_t generator_data_type = {
1461 "generator",
1462 {
1463 generator_mark,
1464 generator_free,
1465 generator_memsize,
1466 generator_compact,
1467 },
1468 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
1469};
1470
1471static struct generator *
1472generator_ptr(VALUE obj)
1473{
1474 struct generator *ptr;
1475
1476 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1477 if (!ptr || UNDEF_P(ptr->proc)) {
1478 rb_raise(rb_eArgError, "uninitialized generator");
1479 }
1480 return ptr;
1481}
1482
1483/* :nodoc: */
1484static VALUE
1485generator_allocate(VALUE klass)
1486{
1487 struct generator *ptr;
1488 VALUE obj;
1489
1490 obj = TypedData_Make_Struct(klass, struct generator, &generator_data_type, ptr);
1491 ptr->proc = Qundef;
1492
1493 return obj;
1494}
1495
1496static VALUE
1497generator_init(VALUE obj, VALUE proc)
1498{
1499 struct generator *ptr;
1500
1501 rb_check_frozen(obj);
1502 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1503
1504 if (!ptr) {
1505 rb_raise(rb_eArgError, "unallocated generator");
1506 }
1507
1508 ptr->proc = proc;
1509
1510 return obj;
1511}
1512
1513/* :nodoc: */
1514static VALUE
1515generator_initialize(int argc, VALUE *argv, VALUE obj)
1516{
1517 VALUE proc;
1518
1519 if (argc == 0) {
1520 rb_need_block();
1521
1522 proc = rb_block_proc();
1523 }
1524 else {
1525 rb_scan_args(argc, argv, "1", &proc);
1526
1527 if (!rb_obj_is_proc(proc))
1529 "wrong argument type %"PRIsVALUE" (expected Proc)",
1530 rb_obj_class(proc));
1531
1532 if (rb_block_given_p()) {
1533 rb_warn("given block not used");
1534 }
1535 }
1536
1537 return generator_init(obj, proc);
1538}
1539
1540/* :nodoc: */
1541static VALUE
1542generator_init_copy(VALUE obj, VALUE orig)
1543{
1544 struct generator *ptr0, *ptr1;
1545
1546 if (!OBJ_INIT_COPY(obj, orig)) return obj;
1547
1548 ptr0 = generator_ptr(orig);
1549
1550 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr1);
1551
1552 if (!ptr1) {
1553 rb_raise(rb_eArgError, "unallocated generator");
1554 }
1555
1556 ptr1->proc = ptr0->proc;
1557
1558 return obj;
1559}
1560
1561/* :nodoc: */
1562static VALUE
1563generator_each(int argc, VALUE *argv, VALUE obj)
1564{
1565 struct generator *ptr = generator_ptr(obj);
1566 VALUE args = rb_ary_new2(argc + 1);
1567
1568 rb_ary_push(args, yielder_new());
1569 if (argc > 0) {
1570 rb_ary_cat(args, argv, argc);
1571 }
1572
1573 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1574}
1575
1576/* Lazy Enumerator methods */
1577static VALUE
1578enum_size(VALUE self)
1579{
1580 VALUE r = rb_check_funcall(self, id_size, 0, 0);
1581 return UNDEF_P(r) ? Qnil : r;
1582}
1583
1584static VALUE
1585lazyenum_size(VALUE self, VALUE args, VALUE eobj)
1586{
1587 return enum_size(self);
1588}
1589
1590#define lazy_receiver_size lazy_map_size
1591
1592static VALUE
1593lazy_init_iterator(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1594{
1595 VALUE result;
1596 if (argc == 1) {
1597 VALUE args[2];
1598 args[0] = m;
1599 args[1] = val;
1600 result = rb_yield_values2(2, args);
1601 }
1602 else {
1603 VALUE args;
1604 int len = rb_long2int((long)argc + 1);
1605 VALUE *nargv = ALLOCV_N(VALUE, args, len);
1606
1607 nargv[0] = m;
1608 if (argc > 0) {
1609 MEMCPY(nargv + 1, argv, VALUE, argc);
1610 }
1611 result = rb_yield_values2(len, nargv);
1612 ALLOCV_END(args);
1613 }
1614 if (UNDEF_P(result)) rb_iter_break();
1615 return Qnil;
1616}
1617
1618static VALUE
1619lazy_init_block_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1620{
1621 rb_block_call(m, id_each, argc-1, argv+1, lazy_init_iterator, val);
1622 return Qnil;
1623}
1624
1625#define memo_value v2
1626#define memo_flags u3.state
1627#define LAZY_MEMO_BREAK 1
1628#define LAZY_MEMO_PACKED 2
1629#define LAZY_MEMO_BREAK_P(memo) ((memo)->memo_flags & LAZY_MEMO_BREAK)
1630#define LAZY_MEMO_PACKED_P(memo) ((memo)->memo_flags & LAZY_MEMO_PACKED)
1631#define LAZY_MEMO_SET_BREAK(memo) ((memo)->memo_flags |= LAZY_MEMO_BREAK)
1632#define LAZY_MEMO_RESET_BREAK(memo) ((memo)->memo_flags &= ~LAZY_MEMO_BREAK)
1633#define LAZY_MEMO_SET_VALUE(memo, value) MEMO_V2_SET(memo, value)
1634#define LAZY_MEMO_SET_PACKED(memo) ((memo)->memo_flags |= LAZY_MEMO_PACKED)
1635#define LAZY_MEMO_RESET_PACKED(memo) ((memo)->memo_flags &= ~LAZY_MEMO_PACKED)
1636
1637static VALUE lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i);
1638
1639static VALUE
1640lazy_init_yielder(RB_BLOCK_CALL_FUNC_ARGLIST(_, m))
1641{
1642 VALUE yielder = RARRAY_AREF(m, 0);
1643 VALUE procs_array = RARRAY_AREF(m, 1);
1644 VALUE memos = rb_attr_get(yielder, id_memo);
1645 struct MEMO *result;
1646
1647 result = MEMO_NEW(m, rb_enum_values_pack(argc, argv),
1648 argc > 1 ? LAZY_MEMO_PACKED : 0);
1649 return lazy_yielder_result(result, yielder, procs_array, memos, 0);
1650}
1651
1652static VALUE
1653lazy_yielder_yield(struct MEMO *result, long memo_index, int argc, const VALUE *argv)
1654{
1655 VALUE m = result->v1;
1656 VALUE yielder = RARRAY_AREF(m, 0);
1657 VALUE procs_array = RARRAY_AREF(m, 1);
1658 VALUE memos = rb_attr_get(yielder, id_memo);
1659 LAZY_MEMO_SET_VALUE(result, rb_enum_values_pack(argc, argv));
1660 if (argc > 1)
1661 LAZY_MEMO_SET_PACKED(result);
1662 else
1663 LAZY_MEMO_RESET_PACKED(result);
1664 return lazy_yielder_result(result, yielder, procs_array, memos, memo_index);
1665}
1666
1667static VALUE
1668lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i)
1669{
1670 int cont = 1;
1671
1672 for (; i < RARRAY_LEN(procs_array); i++) {
1673 VALUE proc = RARRAY_AREF(procs_array, i);
1674 struct proc_entry *entry = proc_entry_ptr(proc);
1675 if (!(*entry->fn->proc)(proc, result, memos, i)) {
1676 cont = 0;
1677 break;
1678 }
1679 }
1680
1681 if (cont) {
1682 rb_funcall2(yielder, idLTLT, 1, &(result->memo_value));
1683 }
1684 if (LAZY_MEMO_BREAK_P(result)) {
1685 rb_iter_break();
1686 }
1687 return result->memo_value;
1688}
1689
1690static VALUE
1691lazy_init_block(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1692{
1693 VALUE procs = RARRAY_AREF(m, 1);
1694
1695 rb_ivar_set(val, id_memo, rb_ary_new2(RARRAY_LEN(procs)));
1696 rb_block_call(RARRAY_AREF(m, 0), id_each, 0, 0,
1697 lazy_init_yielder, rb_ary_new3(2, val, procs));
1698 return Qnil;
1699}
1700
1701static VALUE
1702lazy_generator_init(VALUE enumerator, VALUE procs)
1703{
1705 VALUE obj;
1706 struct generator *gen_ptr;
1707 struct enumerator *e = enumerator_ptr(enumerator);
1708
1709 if (RARRAY_LEN(procs) > 0) {
1710 struct generator *old_gen_ptr = generator_ptr(e->obj);
1711 obj = old_gen_ptr->obj;
1712 }
1713 else {
1714 obj = enumerator;
1715 }
1716
1717 generator = generator_allocate(rb_cGenerator);
1718
1719 rb_block_call(generator, id_initialize, 0, 0,
1720 lazy_init_block, rb_ary_new3(2, obj, procs));
1721
1722 gen_ptr = generator_ptr(generator);
1723 gen_ptr->obj = obj;
1724
1725 return generator;
1726}
1727
1728static int
1729lazy_precheck(VALUE procs)
1730{
1731 if (RTEST(procs)) {
1732 long num_procs = RARRAY_LEN(procs), i = num_procs;
1733 while (i-- > 0) {
1734 VALUE proc = RARRAY_AREF(procs, i);
1735 struct proc_entry *entry = proc_entry_ptr(proc);
1736 lazyenum_precheck_func *precheck = entry->fn->precheck;
1737 if (precheck && !precheck(proc)) return FALSE;
1738 }
1739 }
1740
1741 return TRUE;
1742}
1743
1744/*
1745 * Document-class: Enumerator::Lazy
1746 *
1747 * Enumerator::Lazy is a special type of Enumerator, that allows constructing
1748 * chains of operations without evaluating them immediately, and evaluating
1749 * values on as-needed basis. In order to do so it redefines most of Enumerable
1750 * methods so that they just construct another lazy enumerator.
1751 *
1752 * Enumerator::Lazy can be constructed from any Enumerable with the
1753 * Enumerable#lazy method.
1754 *
1755 * lazy = (1..Float::INFINITY).lazy.select(&:odd?).drop(10).take_while { |i| i < 30 }
1756 * # => #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:select>:drop(10)>:take_while>
1757 *
1758 * The real enumeration is performed when any non-redefined Enumerable method
1759 * is called, like Enumerable#first or Enumerable#to_a (the latter is aliased
1760 * as #force for more semantic code):
1761 *
1762 * lazy.first(2)
1763 * #=> [21, 23]
1764 *
1765 * lazy.force
1766 * #=> [21, 23, 25, 27, 29]
1767 *
1768 * Note that most Enumerable methods that could be called with or without
1769 * a block, on Enumerator::Lazy will always require a block:
1770 *
1771 * [1, 2, 3].map #=> #<Enumerator: [1, 2, 3]:map>
1772 * [1, 2, 3].lazy.map # ArgumentError: tried to call lazy map without a block
1773 *
1774 * This class allows idiomatic calculations on long or infinite sequences, as well
1775 * as chaining of calculations without constructing intermediate arrays.
1776 *
1777 * Example for working with a slowly calculated sequence:
1778 *
1779 * require 'open-uri'
1780 *
1781 * # This will fetch all URLs before selecting
1782 * # necessary data
1783 * URLS.map { |u| JSON.parse(URI.open(u).read) }
1784 * .select { |data| data.key?('stats') }
1785 * .first(5)
1786 *
1787 * # This will fetch URLs one-by-one, only till
1788 * # there is enough data to satisfy the condition
1789 * URLS.lazy.map { |u| JSON.parse(URI.open(u).read) }
1790 * .select { |data| data.key?('stats') }
1791 * .first(5)
1792 *
1793 * Ending a chain with ".eager" generates a non-lazy enumerator, which
1794 * is suitable for returning or passing to another method that expects
1795 * a normal enumerator.
1796 *
1797 * def active_items
1798 * groups
1799 * .lazy
1800 * .flat_map(&:items)
1801 * .reject(&:disabled)
1802 * .eager
1803 * end
1804 *
1805 * # This works lazily; if a checked item is found, it stops
1806 * # iteration and does not look into remaining groups.
1807 * first_checked = active_items.find(&:checked)
1808 *
1809 * # This returns an array of items like a normal enumerator does.
1810 * all_checked = active_items.select(&:checked)
1811 *
1812 */
1813
1814/*
1815 * call-seq:
1816 * Lazy.new(obj, size=nil) { |yielder, *values| block }
1817 *
1818 * Creates a new Lazy enumerator. When the enumerator is actually enumerated
1819 * (e.g. by calling #force), +obj+ will be enumerated and each value passed
1820 * to the given block. The block can yield values back using +yielder+.
1821 * For example, to create a "filter+map" enumerator:
1822 *
1823 * def filter_map(sequence)
1824 * Lazy.new(sequence) do |yielder, *values|
1825 * result = yield *values
1826 * yielder << result if result
1827 * end
1828 * end
1829 *
1830 * filter_map(1..Float::INFINITY) {|i| i*i if i.even?}.first(5)
1831 * #=> [4, 16, 36, 64, 100]
1832 */
1833static VALUE
1834lazy_initialize(int argc, VALUE *argv, VALUE self)
1835{
1836 VALUE obj, size = Qnil;
1838
1839 rb_check_arity(argc, 1, 2);
1840 if (!rb_block_given_p()) {
1841 rb_raise(rb_eArgError, "tried to call lazy new without a block");
1842 }
1843 obj = argv[0];
1844 if (argc > 1) {
1845 size = argv[1];
1846 }
1847 generator = generator_allocate(rb_cGenerator);
1848 rb_block_call(generator, id_initialize, 0, 0, lazy_init_block_i, obj);
1849 enumerator_init(self, generator, sym_each, 0, 0, 0, size, 0);
1850 rb_ivar_set(self, id_receiver, obj);
1851
1852 return self;
1853}
1854
1855#if 0 /* for RDoc */
1856/*
1857 * call-seq:
1858 * lazy.to_a -> array
1859 * lazy.force -> array
1860 *
1861 * Expands +lazy+ enumerator to an array.
1862 * See Enumerable#to_a.
1863 */
1864static VALUE
1865lazy_to_a(VALUE self)
1866{
1867}
1868#endif
1869
1870static void
1871lazy_set_args(VALUE lazy, VALUE args)
1872{
1873 ID id = rb_frame_this_func();
1874 rb_ivar_set(lazy, id_method, ID2SYM(id));
1875 if (NIL_P(args)) {
1876 /* Qfalse indicates that the arguments are empty */
1877 rb_ivar_set(lazy, id_arguments, Qfalse);
1878 }
1879 else {
1880 rb_ivar_set(lazy, id_arguments, args);
1881 }
1882}
1883
1884#if 0
1885static VALUE
1886lazy_set_method(VALUE lazy, VALUE args, rb_enumerator_size_func *size_fn)
1887{
1888 struct enumerator *e = enumerator_ptr(lazy);
1889 lazy_set_args(lazy, args);
1890 e->size_fn = size_fn;
1891 return lazy;
1892}
1893#endif
1894
1895static VALUE
1896lazy_add_method(VALUE obj, int argc, VALUE *argv, VALUE args, VALUE memo,
1897 const lazyenum_funcs *fn)
1898{
1899 struct enumerator *new_e;
1900 VALUE new_obj;
1901 VALUE new_generator;
1902 VALUE new_procs;
1903 struct enumerator *e = enumerator_ptr(obj);
1904 struct proc_entry *entry;
1905 VALUE entry_obj = TypedData_Make_Struct(rb_cObject, struct proc_entry,
1906 &proc_entry_data_type, entry);
1907 if (rb_block_given_p()) {
1908 entry->proc = rb_block_proc();
1909 }
1910 entry->fn = fn;
1911 entry->memo = args;
1912
1913 lazy_set_args(entry_obj, memo);
1914
1915 new_procs = RTEST(e->procs) ? rb_ary_dup(e->procs) : rb_ary_new();
1916 new_generator = lazy_generator_init(obj, new_procs);
1917 rb_ary_push(new_procs, entry_obj);
1918
1919 new_obj = enumerator_init_copy(enumerator_allocate(rb_cLazy), obj);
1920 new_e = DATA_PTR(new_obj);
1921 new_e->obj = new_generator;
1922 new_e->procs = new_procs;
1923
1924 if (argc > 0) {
1925 new_e->meth = rb_to_id(*argv++);
1926 --argc;
1927 }
1928 else {
1929 new_e->meth = id_each;
1930 }
1931 new_e->args = rb_ary_new4(argc, argv);
1932 return new_obj;
1933}
1934
1935/*
1936 * call-seq:
1937 * e.lazy -> lazy_enumerator
1938 *
1939 * Returns an Enumerator::Lazy, which redefines most Enumerable
1940 * methods to postpone enumeration and enumerate values only on an
1941 * as-needed basis.
1942 *
1943 * === Example
1944 *
1945 * The following program finds pythagorean triples:
1946 *
1947 * def pythagorean_triples
1948 * (1..Float::INFINITY).lazy.flat_map {|z|
1949 * (1..z).flat_map {|x|
1950 * (x..z).select {|y|
1951 * x**2 + y**2 == z**2
1952 * }.map {|y|
1953 * [x, y, z]
1954 * }
1955 * }
1956 * }
1957 * end
1958 * # show first ten pythagorean triples
1959 * p pythagorean_triples.take(10).force # take is lazy, so force is needed
1960 * p pythagorean_triples.first(10) # first is eager
1961 * # show pythagorean triples less than 100
1962 * p pythagorean_triples.take_while { |*, z| z < 100 }.force
1963 */
1964static VALUE
1965enumerable_lazy(VALUE obj)
1966{
1967 VALUE result = lazy_to_enum_i(obj, sym_each, 0, 0, lazyenum_size, rb_keyword_given_p());
1968 /* Qfalse indicates that the Enumerator::Lazy has no method name */
1969 rb_ivar_set(result, id_method, Qfalse);
1970 return result;
1971}
1972
1973static VALUE
1974lazy_to_enum_i(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
1975{
1976 return enumerator_init(enumerator_allocate(rb_cLazy),
1977 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
1978}
1979
1980/*
1981 * call-seq:
1982 * lzy.to_enum(method = :each, *args) -> lazy_enum
1983 * lzy.enum_for(method = :each, *args) -> lazy_enum
1984 * lzy.to_enum(method = :each, *args) {|*args| block } -> lazy_enum
1985 * lzy.enum_for(method = :each, *args) {|*args| block } -> lazy_enum
1986 *
1987 * Similar to Object#to_enum, except it returns a lazy enumerator.
1988 * This makes it easy to define Enumerable methods that will
1989 * naturally remain lazy if called from a lazy enumerator.
1990 *
1991 * For example, continuing from the example in Object#to_enum:
1992 *
1993 * # See Object#to_enum for the definition of repeat
1994 * r = 1..Float::INFINITY
1995 * r.repeat(2).first(5) # => [1, 1, 2, 2, 3]
1996 * r.repeat(2).class # => Enumerator
1997 * r.repeat(2).map{|n| n ** 2}.first(5) # => endless loop!
1998 * # works naturally on lazy enumerator:
1999 * r.lazy.repeat(2).class # => Enumerator::Lazy
2000 * r.lazy.repeat(2).map{|n| n ** 2}.first(5) # => [1, 1, 4, 4, 9]
2001 */
2002
2003static VALUE
2004lazy_to_enum(int argc, VALUE *argv, VALUE self)
2005{
2006 VALUE lazy, meth = sym_each, super_meth;
2007
2008 if (argc > 0) {
2009 --argc;
2010 meth = *argv++;
2011 }
2012 if (RTEST((super_meth = rb_hash_aref(lazy_use_super_method, meth)))) {
2013 meth = super_meth;
2014 }
2015 lazy = lazy_to_enum_i(self, meth, argc, argv, 0, rb_keyword_given_p());
2016 if (rb_block_given_p()) {
2017 enumerator_ptr(lazy)->size = rb_block_proc();
2018 }
2019 return lazy;
2020}
2021
2022static VALUE
2023lazy_eager_size(VALUE self, VALUE args, VALUE eobj)
2024{
2025 return enum_size(self);
2026}
2027
2028/*
2029 * call-seq:
2030 * lzy.eager -> enum
2031 *
2032 * Returns a non-lazy Enumerator converted from the lazy enumerator.
2033 */
2034
2035static VALUE
2036lazy_eager(VALUE self)
2037{
2038 return enumerator_init(enumerator_allocate(rb_cEnumerator),
2039 self, sym_each, 0, 0, lazy_eager_size, Qnil, 0);
2040}
2041
2042static VALUE
2043lazyenum_yield(VALUE proc_entry, struct MEMO *result)
2044{
2045 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2046 return rb_proc_call_with_block(entry->proc, 1, &result->memo_value, Qnil);
2047}
2048
2049static VALUE
2050lazyenum_yield_values(VALUE proc_entry, struct MEMO *result)
2051{
2052 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2053 int argc = 1;
2054 const VALUE *argv = &result->memo_value;
2055 if (LAZY_MEMO_PACKED_P(result)) {
2056 const VALUE args = *argv;
2057 argc = RARRAY_LENINT(args);
2058 argv = RARRAY_CONST_PTR(args);
2059 }
2060 return rb_proc_call_with_block(entry->proc, argc, argv, Qnil);
2061}
2062
2063static struct MEMO *
2064lazy_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2065{
2066 VALUE value = lazyenum_yield_values(proc_entry, result);
2067 LAZY_MEMO_SET_VALUE(result, value);
2068 LAZY_MEMO_RESET_PACKED(result);
2069 return result;
2070}
2071
2072static VALUE
2073lazy_map_size(VALUE entry, VALUE receiver)
2074{
2075 return receiver;
2076}
2077
2078static const lazyenum_funcs lazy_map_funcs = {
2079 lazy_map_proc, lazy_map_size,
2080};
2081
2082/*
2083 * call-seq:
2084 * lazy.collect { |obj| block } -> lazy_enumerator
2085 * lazy.map { |obj| block } -> lazy_enumerator
2086 *
2087 * Like Enumerable#map, but chains operation to be lazy-evaluated.
2088 *
2089 * (1..Float::INFINITY).lazy.map {|i| i**2 }
2090 * #=> #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:map>
2091 * (1..Float::INFINITY).lazy.map {|i| i**2 }.first(3)
2092 * #=> [1, 4, 9]
2093 */
2094
2095static VALUE
2096lazy_map(VALUE obj)
2097{
2098 if (!rb_block_given_p()) {
2099 rb_raise(rb_eArgError, "tried to call lazy map without a block");
2100 }
2101
2102 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_map_funcs);
2103}
2104
2106 struct MEMO *result;
2107 long index;
2108};
2109
2110static VALUE
2111lazy_flat_map_i(RB_BLOCK_CALL_FUNC_ARGLIST(i, y))
2112{
2113 struct flat_map_i_arg *arg = (struct flat_map_i_arg *)y;
2114
2115 return lazy_yielder_yield(arg->result, arg->index, argc, argv);
2116}
2117
2118static struct MEMO *
2119lazy_flat_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2120{
2121 VALUE value = lazyenum_yield_values(proc_entry, result);
2122 VALUE ary = 0;
2123 const long proc_index = memo_index + 1;
2124 int break_p = LAZY_MEMO_BREAK_P(result);
2125
2126 if (RB_TYPE_P(value, T_ARRAY)) {
2127 ary = value;
2128 }
2129 else if (rb_respond_to(value, id_force) && rb_respond_to(value, id_each)) {
2130 struct flat_map_i_arg arg = {.result = result, .index = proc_index};
2131 LAZY_MEMO_RESET_BREAK(result);
2132 rb_block_call(value, id_each, 0, 0, lazy_flat_map_i, (VALUE)&arg);
2133 if (break_p) LAZY_MEMO_SET_BREAK(result);
2134 return 0;
2135 }
2136
2137 if (ary || !NIL_P(ary = rb_check_array_type(value))) {
2138 long i;
2139 LAZY_MEMO_RESET_BREAK(result);
2140 for (i = 0; i + 1 < RARRAY_LEN(ary); i++) {
2141 const VALUE argv = RARRAY_AREF(ary, i);
2142 lazy_yielder_yield(result, proc_index, 1, &argv);
2143 }
2144 if (break_p) LAZY_MEMO_SET_BREAK(result);
2145 if (i >= RARRAY_LEN(ary)) return 0;
2146 value = RARRAY_AREF(ary, i);
2147 }
2148 LAZY_MEMO_SET_VALUE(result, value);
2149 LAZY_MEMO_RESET_PACKED(result);
2150 return result;
2151}
2152
2153static const lazyenum_funcs lazy_flat_map_funcs = {
2154 lazy_flat_map_proc, 0,
2155};
2156
2157/*
2158 * call-seq:
2159 * lazy.collect_concat { |obj| block } -> a_lazy_enumerator
2160 * lazy.flat_map { |obj| block } -> a_lazy_enumerator
2161 *
2162 * Returns a new lazy enumerator with the concatenated results of running
2163 * +block+ once for every element in the lazy enumerator.
2164 *
2165 * ["foo", "bar"].lazy.flat_map {|i| i.each_char.lazy}.force
2166 * #=> ["f", "o", "o", "b", "a", "r"]
2167 *
2168 * A value +x+ returned by +block+ is decomposed if either of
2169 * the following conditions is true:
2170 *
2171 * * +x+ responds to both each and force, which means that
2172 * +x+ is a lazy enumerator.
2173 * * +x+ is an array or responds to to_ary.
2174 *
2175 * Otherwise, +x+ is contained as-is in the return value.
2176 *
2177 * [{a:1}, {b:2}].lazy.flat_map {|i| i}.force
2178 * #=> [{:a=>1}, {:b=>2}]
2179 */
2180static VALUE
2181lazy_flat_map(VALUE obj)
2182{
2183 if (!rb_block_given_p()) {
2184 rb_raise(rb_eArgError, "tried to call lazy flat_map without a block");
2185 }
2186
2187 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_flat_map_funcs);
2188}
2189
2190static struct MEMO *
2191lazy_select_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2192{
2193 VALUE chain = lazyenum_yield(proc_entry, result);
2194 if (!RTEST(chain)) return 0;
2195 return result;
2196}
2197
2198static const lazyenum_funcs lazy_select_funcs = {
2199 lazy_select_proc, 0,
2200};
2201
2202/*
2203 * call-seq:
2204 * lazy.find_all { |obj| block } -> lazy_enumerator
2205 * lazy.select { |obj| block } -> lazy_enumerator
2206 * lazy.filter { |obj| block } -> lazy_enumerator
2207 *
2208 * Like Enumerable#select, but chains operation to be lazy-evaluated.
2209 */
2210static VALUE
2211lazy_select(VALUE obj)
2212{
2213 if (!rb_block_given_p()) {
2214 rb_raise(rb_eArgError, "tried to call lazy select without a block");
2215 }
2216
2217 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_select_funcs);
2218}
2219
2220static struct MEMO *
2221lazy_filter_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2222{
2223 VALUE value = lazyenum_yield_values(proc_entry, result);
2224 if (!RTEST(value)) return 0;
2225 LAZY_MEMO_SET_VALUE(result, value);
2226 LAZY_MEMO_RESET_PACKED(result);
2227 return result;
2228}
2229
2230static const lazyenum_funcs lazy_filter_map_funcs = {
2231 lazy_filter_map_proc, 0,
2232};
2233
2234/*
2235 * call-seq:
2236 * lazy.filter_map { |obj| block } -> lazy_enumerator
2237 *
2238 * Like Enumerable#filter_map, but chains operation to be lazy-evaluated.
2239 *
2240 * (1..).lazy.filter_map { |i| i * 2 if i.even? }.first(5)
2241 * #=> [4, 8, 12, 16, 20]
2242 */
2243
2244static VALUE
2245lazy_filter_map(VALUE obj)
2246{
2247 if (!rb_block_given_p()) {
2248 rb_raise(rb_eArgError, "tried to call lazy filter_map without a block");
2249 }
2250
2251 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_filter_map_funcs);
2252}
2253
2254static struct MEMO *
2255lazy_reject_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2256{
2257 VALUE chain = lazyenum_yield(proc_entry, result);
2258 if (RTEST(chain)) return 0;
2259 return result;
2260}
2261
2262static const lazyenum_funcs lazy_reject_funcs = {
2263 lazy_reject_proc, 0,
2264};
2265
2266/*
2267 * call-seq:
2268 * lazy.reject { |obj| block } -> lazy_enumerator
2269 *
2270 * Like Enumerable#reject, but chains operation to be lazy-evaluated.
2271 */
2272
2273static VALUE
2274lazy_reject(VALUE obj)
2275{
2276 if (!rb_block_given_p()) {
2277 rb_raise(rb_eArgError, "tried to call lazy reject without a block");
2278 }
2279
2280 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_reject_funcs);
2281}
2282
2283static struct MEMO *
2284lazy_grep_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2285{
2286 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2287 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2288 if (!RTEST(chain)) return 0;
2289 return result;
2290}
2291
2292static struct MEMO *
2293lazy_grep_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2294{
2295 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2296 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2297
2298 if (!RTEST(chain)) return 0;
2299 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2300 LAZY_MEMO_SET_VALUE(result, value);
2301 LAZY_MEMO_RESET_PACKED(result);
2302
2303 return result;
2304}
2305
2306static const lazyenum_funcs lazy_grep_iter_funcs = {
2307 lazy_grep_iter_proc, 0,
2308};
2309
2310static const lazyenum_funcs lazy_grep_funcs = {
2311 lazy_grep_proc, 0,
2312};
2313
2314/*
2315 * call-seq:
2316 * lazy.grep(pattern) -> lazy_enumerator
2317 * lazy.grep(pattern) { |obj| block } -> lazy_enumerator
2318 *
2319 * Like Enumerable#grep, but chains operation to be lazy-evaluated.
2320 */
2321
2322static VALUE
2323lazy_grep(VALUE obj, VALUE pattern)
2324{
2325 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2326 &lazy_grep_iter_funcs : &lazy_grep_funcs;
2327 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2328}
2329
2330static struct MEMO *
2331lazy_grep_v_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2332{
2333 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2334 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2335 if (RTEST(chain)) return 0;
2336 return result;
2337}
2338
2339static struct MEMO *
2340lazy_grep_v_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2341{
2342 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2343 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2344
2345 if (RTEST(chain)) return 0;
2346 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2347 LAZY_MEMO_SET_VALUE(result, value);
2348 LAZY_MEMO_RESET_PACKED(result);
2349
2350 return result;
2351}
2352
2353static const lazyenum_funcs lazy_grep_v_iter_funcs = {
2354 lazy_grep_v_iter_proc, 0,
2355};
2356
2357static const lazyenum_funcs lazy_grep_v_funcs = {
2358 lazy_grep_v_proc, 0,
2359};
2360
2361/*
2362 * call-seq:
2363 * lazy.grep_v(pattern) -> lazy_enumerator
2364 * lazy.grep_v(pattern) { |obj| block } -> lazy_enumerator
2365 *
2366 * Like Enumerable#grep_v, but chains operation to be lazy-evaluated.
2367 */
2368
2369static VALUE
2370lazy_grep_v(VALUE obj, VALUE pattern)
2371{
2372 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2373 &lazy_grep_v_iter_funcs : &lazy_grep_v_funcs;
2374 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2375}
2376
2377static VALUE
2378call_next(VALUE obj)
2379{
2380 return rb_funcall(obj, id_next, 0);
2381}
2382
2383static VALUE
2384next_stopped(VALUE obj, VALUE _)
2385{
2386 return Qnil;
2387}
2388
2389static struct MEMO *
2390lazy_zip_arrays_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2391{
2392 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2393 VALUE ary, arrays = entry->memo;
2394 VALUE memo = rb_ary_entry(memos, memo_index);
2395 long i, count = NIL_P(memo) ? 0 : NUM2LONG(memo);
2396
2397 ary = rb_ary_new2(RARRAY_LEN(arrays) + 1);
2398 rb_ary_push(ary, result->memo_value);
2399 for (i = 0; i < RARRAY_LEN(arrays); i++) {
2400 rb_ary_push(ary, rb_ary_entry(RARRAY_AREF(arrays, i), count));
2401 }
2402 LAZY_MEMO_SET_VALUE(result, ary);
2403 LAZY_MEMO_SET_PACKED(result);
2404 rb_ary_store(memos, memo_index, LONG2NUM(++count));
2405 return result;
2406}
2407
2408static struct MEMO *
2409lazy_zip_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2410{
2411 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2412 VALUE arg = rb_ary_entry(memos, memo_index);
2413 VALUE zip_args = entry->memo;
2414 VALUE ary, v;
2415 long i;
2416
2417 if (NIL_P(arg)) {
2418 arg = rb_ary_new2(RARRAY_LEN(zip_args));
2419 for (i = 0; i < RARRAY_LEN(zip_args); i++) {
2420 rb_ary_push(arg, rb_funcall(RARRAY_AREF(zip_args, i), id_to_enum, 0));
2421 }
2422 rb_ary_store(memos, memo_index, arg);
2423 }
2424
2425 ary = rb_ary_new2(RARRAY_LEN(arg) + 1);
2426 rb_ary_push(ary, result->memo_value);
2427 for (i = 0; i < RARRAY_LEN(arg); i++) {
2428 v = rb_rescue2(call_next, RARRAY_AREF(arg, i), next_stopped, 0,
2430 rb_ary_push(ary, v);
2431 }
2432 LAZY_MEMO_SET_VALUE(result, ary);
2433 LAZY_MEMO_SET_PACKED(result);
2434 return result;
2435}
2436
2437static const lazyenum_funcs lazy_zip_funcs[] = {
2438 {lazy_zip_func, lazy_receiver_size,},
2439 {lazy_zip_arrays_func, lazy_receiver_size,},
2440};
2441
2442/*
2443 * call-seq:
2444 * lazy.zip(arg, ...) -> lazy_enumerator
2445 * lazy.zip(arg, ...) { |arr| block } -> nil
2446 *
2447 * Like Enumerable#zip, but chains operation to be lazy-evaluated.
2448 * However, if a block is given to zip, values are enumerated immediately.
2449 */
2450static VALUE
2451lazy_zip(int argc, VALUE *argv, VALUE obj)
2452{
2453 VALUE ary, v;
2454 long i;
2455 const lazyenum_funcs *funcs = &lazy_zip_funcs[1];
2456
2457 if (rb_block_given_p()) {
2458 return rb_call_super(argc, argv);
2459 }
2460
2461 ary = rb_ary_new2(argc);
2462 for (i = 0; i < argc; i++) {
2463 v = rb_check_array_type(argv[i]);
2464 if (NIL_P(v)) {
2465 for (; i < argc; i++) {
2466 if (!rb_respond_to(argv[i], id_each)) {
2467 rb_raise(rb_eTypeError, "wrong argument type %"PRIsVALUE" (must respond to :each)",
2468 rb_obj_class(argv[i]));
2469 }
2470 }
2471 ary = rb_ary_new4(argc, argv);
2472 funcs = &lazy_zip_funcs[0];
2473 break;
2474 }
2475 rb_ary_push(ary, v);
2476 }
2477
2478 return lazy_add_method(obj, 0, 0, ary, ary, funcs);
2479}
2480
2481static struct MEMO *
2482lazy_take_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2483{
2484 long remain;
2485 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2486 VALUE memo = rb_ary_entry(memos, memo_index);
2487
2488 if (NIL_P(memo)) {
2489 memo = entry->memo;
2490 }
2491
2492 remain = NUM2LONG(memo);
2493 if (--remain == 0) LAZY_MEMO_SET_BREAK(result);
2494 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2495 return result;
2496}
2497
2498static VALUE
2499lazy_take_size(VALUE entry, VALUE receiver)
2500{
2501 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(entry, id_arguments), 0));
2502 if (NIL_P(receiver) || (FIXNUM_P(receiver) && FIX2LONG(receiver) < len))
2503 return receiver;
2504 return LONG2NUM(len);
2505}
2506
2507static int
2508lazy_take_precheck(VALUE proc_entry)
2509{
2510 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2511 return entry->memo != INT2FIX(0);
2512}
2513
2514static const lazyenum_funcs lazy_take_funcs = {
2515 lazy_take_proc, lazy_take_size, lazy_take_precheck,
2516};
2517
2518/*
2519 * call-seq:
2520 * lazy.take(n) -> lazy_enumerator
2521 *
2522 * Like Enumerable#take, but chains operation to be lazy-evaluated.
2523 */
2524
2525static VALUE
2526lazy_take(VALUE obj, VALUE n)
2527{
2528 long len = NUM2LONG(n);
2529
2530 if (len < 0) {
2531 rb_raise(rb_eArgError, "attempt to take negative size");
2532 }
2533
2534 n = LONG2NUM(len); /* no more conversion */
2535
2536 return lazy_add_method(obj, 0, 0, n, rb_ary_new3(1, n), &lazy_take_funcs);
2537}
2538
2539static struct MEMO *
2540lazy_take_while_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2541{
2542 VALUE take = lazyenum_yield_values(proc_entry, result);
2543 if (!RTEST(take)) {
2544 LAZY_MEMO_SET_BREAK(result);
2545 return 0;
2546 }
2547 return result;
2548}
2549
2550static const lazyenum_funcs lazy_take_while_funcs = {
2551 lazy_take_while_proc, 0,
2552};
2553
2554/*
2555 * call-seq:
2556 * lazy.take_while { |obj| block } -> lazy_enumerator
2557 *
2558 * Like Enumerable#take_while, but chains operation to be lazy-evaluated.
2559 */
2560
2561static VALUE
2562lazy_take_while(VALUE obj)
2563{
2564 if (!rb_block_given_p()) {
2565 rb_raise(rb_eArgError, "tried to call lazy take_while without a block");
2566 }
2567
2568 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_take_while_funcs);
2569}
2570
2571static VALUE
2572lazy_drop_size(VALUE proc_entry, VALUE receiver)
2573{
2574 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(proc_entry, id_arguments), 0));
2575 if (NIL_P(receiver))
2576 return receiver;
2577 if (FIXNUM_P(receiver)) {
2578 len = FIX2LONG(receiver) - len;
2579 return LONG2FIX(len < 0 ? 0 : len);
2580 }
2581 return rb_funcall(receiver, '-', 1, LONG2NUM(len));
2582}
2583
2584static struct MEMO *
2585lazy_drop_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2586{
2587 long remain;
2588 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2589 VALUE memo = rb_ary_entry(memos, memo_index);
2590
2591 if (NIL_P(memo)) {
2592 memo = entry->memo;
2593 }
2594 remain = NUM2LONG(memo);
2595 if (remain > 0) {
2596 --remain;
2597 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2598 return 0;
2599 }
2600
2601 return result;
2602}
2603
2604static const lazyenum_funcs lazy_drop_funcs = {
2605 lazy_drop_proc, lazy_drop_size,
2606};
2607
2608/*
2609 * call-seq:
2610 * lazy.drop(n) -> lazy_enumerator
2611 *
2612 * Like Enumerable#drop, but chains operation to be lazy-evaluated.
2613 */
2614
2615static VALUE
2616lazy_drop(VALUE obj, VALUE n)
2617{
2618 long len = NUM2LONG(n);
2619 VALUE argv[2];
2620 argv[0] = sym_each;
2621 argv[1] = n;
2622
2623 if (len < 0) {
2624 rb_raise(rb_eArgError, "attempt to drop negative size");
2625 }
2626
2627 return lazy_add_method(obj, 2, argv, n, rb_ary_new3(1, n), &lazy_drop_funcs);
2628}
2629
2630static struct MEMO *
2631lazy_drop_while_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2632{
2633 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2634 VALUE memo = rb_ary_entry(memos, memo_index);
2635
2636 if (NIL_P(memo)) {
2637 memo = entry->memo;
2638 }
2639
2640 if (!RTEST(memo)) {
2641 VALUE drop = lazyenum_yield_values(proc_entry, result);
2642 if (RTEST(drop)) return 0;
2643 rb_ary_store(memos, memo_index, Qtrue);
2644 }
2645 return result;
2646}
2647
2648static const lazyenum_funcs lazy_drop_while_funcs = {
2649 lazy_drop_while_proc, 0,
2650};
2651
2652/*
2653 * call-seq:
2654 * lazy.drop_while { |obj| block } -> lazy_enumerator
2655 *
2656 * Like Enumerable#drop_while, but chains operation to be lazy-evaluated.
2657 */
2658
2659static VALUE
2660lazy_drop_while(VALUE obj)
2661{
2662 if (!rb_block_given_p()) {
2663 rb_raise(rb_eArgError, "tried to call lazy drop_while without a block");
2664 }
2665
2666 return lazy_add_method(obj, 0, 0, Qfalse, Qnil, &lazy_drop_while_funcs);
2667}
2668
2669static int
2670lazy_uniq_check(VALUE chain, VALUE memos, long memo_index)
2671{
2672 VALUE hash = rb_ary_entry(memos, memo_index);
2673
2674 if (NIL_P(hash)) {
2675 hash = rb_obj_hide(rb_hash_new());
2676 rb_ary_store(memos, memo_index, hash);
2677 }
2678
2679 return rb_hash_add_new_element(hash, chain, Qfalse);
2680}
2681
2682static struct MEMO *
2683lazy_uniq_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2684{
2685 if (lazy_uniq_check(result->memo_value, memos, memo_index)) return 0;
2686 return result;
2687}
2688
2689static struct MEMO *
2690lazy_uniq_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2691{
2692 VALUE chain = lazyenum_yield(proc_entry, result);
2693
2694 if (lazy_uniq_check(chain, memos, memo_index)) return 0;
2695 return result;
2696}
2697
2698static const lazyenum_funcs lazy_uniq_iter_funcs = {
2699 lazy_uniq_iter_proc, 0,
2700};
2701
2702static const lazyenum_funcs lazy_uniq_funcs = {
2703 lazy_uniq_proc, 0,
2704};
2705
2706/*
2707 * call-seq:
2708 * lazy.uniq -> lazy_enumerator
2709 * lazy.uniq { |item| block } -> lazy_enumerator
2710 *
2711 * Like Enumerable#uniq, but chains operation to be lazy-evaluated.
2712 */
2713
2714static VALUE
2715lazy_uniq(VALUE obj)
2716{
2717 const lazyenum_funcs *const funcs =
2718 rb_block_given_p() ? &lazy_uniq_iter_funcs : &lazy_uniq_funcs;
2719 return lazy_add_method(obj, 0, 0, Qnil, Qnil, funcs);
2720}
2721
2722static struct MEMO *
2723lazy_compact_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2724{
2725 if (NIL_P(result->memo_value)) return 0;
2726 return result;
2727}
2728
2729static const lazyenum_funcs lazy_compact_funcs = {
2730 lazy_compact_proc, 0,
2731};
2732
2733/*
2734 * call-seq:
2735 * lazy.compact -> lazy_enumerator
2736 *
2737 * Like Enumerable#compact, but chains operation to be lazy-evaluated.
2738 */
2739
2740static VALUE
2741lazy_compact(VALUE obj)
2742{
2743 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_compact_funcs);
2744}
2745
2746static struct MEMO *
2747lazy_with_index_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2748{
2749 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2750 VALUE memo = rb_ary_entry(memos, memo_index);
2751 VALUE argv[2];
2752
2753 if (NIL_P(memo)) {
2754 memo = entry->memo;
2755 }
2756
2757 argv[0] = result->memo_value;
2758 argv[1] = memo;
2759 if (entry->proc) {
2760 rb_proc_call_with_block(entry->proc, 2, argv, Qnil);
2761 LAZY_MEMO_RESET_PACKED(result);
2762 }
2763 else {
2764 LAZY_MEMO_SET_VALUE(result, rb_ary_new_from_values(2, argv));
2765 LAZY_MEMO_SET_PACKED(result);
2766 }
2767 rb_ary_store(memos, memo_index, LONG2NUM(NUM2LONG(memo) + 1));
2768 return result;
2769}
2770
2771static VALUE
2772lazy_with_index_size(VALUE proc, VALUE receiver)
2773{
2774 return receiver;
2775}
2776
2777static const lazyenum_funcs lazy_with_index_funcs = {
2778 lazy_with_index_proc, lazy_with_index_size,
2779};
2780
2781/*
2782 * call-seq:
2783 * lazy.with_index(offset = 0) {|(*args), idx| block }
2784 * lazy.with_index(offset = 0)
2785 *
2786 * If a block is given, returns a lazy enumerator that will
2787 * iterate over the given block for each element
2788 * with an index, which starts from +offset+, and returns a
2789 * lazy enumerator that yields the same values (without the index).
2790 *
2791 * If a block is not given, returns a new lazy enumerator that
2792 * includes the index, starting from +offset+.
2793 *
2794 * +offset+:: the starting index to use
2795 *
2796 * See Enumerator#with_index.
2797 */
2798static VALUE
2799lazy_with_index(int argc, VALUE *argv, VALUE obj)
2800{
2801 VALUE memo;
2802
2803 rb_scan_args(argc, argv, "01", &memo);
2804 if (NIL_P(memo))
2805 memo = LONG2NUM(0);
2806
2807 return lazy_add_method(obj, 0, 0, memo, rb_ary_new_from_values(1, &memo), &lazy_with_index_funcs);
2808}
2809
2810#if 0 /* for RDoc */
2811
2812/*
2813 * call-seq:
2814 * lazy.chunk { |elt| ... } -> lazy_enumerator
2815 *
2816 * Like Enumerable#chunk, but chains operation to be lazy-evaluated.
2817 */
2818static VALUE
2819lazy_chunk(VALUE self)
2820{
2821}
2822
2823/*
2824 * call-seq:
2825 * lazy.chunk_while {|elt_before, elt_after| bool } -> lazy_enumerator
2826 *
2827 * Like Enumerable#chunk_while, but chains operation to be lazy-evaluated.
2828 */
2829static VALUE
2830lazy_chunk_while(VALUE self)
2831{
2832}
2833
2834/*
2835 * call-seq:
2836 * lazy.slice_after(pattern) -> lazy_enumerator
2837 * lazy.slice_after { |elt| bool } -> lazy_enumerator
2838 *
2839 * Like Enumerable#slice_after, but chains operation to be lazy-evaluated.
2840 */
2841static VALUE
2842lazy_slice_after(VALUE self)
2843{
2844}
2845
2846/*
2847 * call-seq:
2848 * lazy.slice_before(pattern) -> lazy_enumerator
2849 * lazy.slice_before { |elt| bool } -> lazy_enumerator
2850 *
2851 * Like Enumerable#slice_before, but chains operation to be lazy-evaluated.
2852 */
2853static VALUE
2854lazy_slice_before(VALUE self)
2855{
2856}
2857
2858/*
2859 * call-seq:
2860 * lazy.slice_when {|elt_before, elt_after| bool } -> lazy_enumerator
2861 *
2862 * Like Enumerable#slice_when, but chains operation to be lazy-evaluated.
2863 */
2864static VALUE
2865lazy_slice_when(VALUE self)
2866{
2867}
2868# endif
2869
2870static VALUE
2871lazy_super(int argc, VALUE *argv, VALUE lazy)
2872{
2873 return enumerable_lazy(rb_call_super(argc, argv));
2874}
2875
2876/*
2877 * call-seq:
2878 * enum.lazy -> lazy_enumerator
2879 *
2880 * Returns self.
2881 */
2882
2883static VALUE
2884lazy_lazy(VALUE obj)
2885{
2886 return obj;
2887}
2888
2889/*
2890 * Document-class: StopIteration
2891 *
2892 * Raised to stop the iteration, in particular by Enumerator#next. It is
2893 * rescued by Kernel#loop.
2894 *
2895 * loop do
2896 * puts "Hello"
2897 * raise StopIteration
2898 * puts "World"
2899 * end
2900 * puts "Done!"
2901 *
2902 * <em>produces:</em>
2903 *
2904 * Hello
2905 * Done!
2906 */
2907
2908/*
2909 * call-seq:
2910 * result -> value
2911 *
2912 * Returns the return value of the iterator.
2913 *
2914 * o = Object.new
2915 * def o.each
2916 * yield 1
2917 * yield 2
2918 * yield 3
2919 * 100
2920 * end
2921 *
2922 * e = o.to_enum
2923 *
2924 * puts e.next #=> 1
2925 * puts e.next #=> 2
2926 * puts e.next #=> 3
2927 *
2928 * begin
2929 * e.next
2930 * rescue StopIteration => ex
2931 * puts ex.result #=> 100
2932 * end
2933 *
2934 */
2935
2936static VALUE
2937stop_result(VALUE self)
2938{
2939 return rb_attr_get(self, id_result);
2940}
2941
2942/*
2943 * Producer
2944 */
2945
2946static void
2947producer_mark(void *p)
2948{
2949 struct producer *ptr = p;
2950 rb_gc_mark_movable(ptr->init);
2951 rb_gc_mark_movable(ptr->proc);
2952}
2953
2954static void
2955producer_compact(void *p)
2956{
2957 struct producer *ptr = p;
2958 ptr->init = rb_gc_location(ptr->init);
2959 ptr->proc = rb_gc_location(ptr->proc);
2960}
2961
2962#define producer_free RUBY_TYPED_DEFAULT_FREE
2963
2964static size_t
2965producer_memsize(const void *p)
2966{
2967 return sizeof(struct producer);
2968}
2969
2970static const rb_data_type_t producer_data_type = {
2971 "producer",
2972 {
2973 producer_mark,
2974 producer_free,
2975 producer_memsize,
2976 producer_compact,
2977 },
2978 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
2979};
2980
2981static struct producer *
2982producer_ptr(VALUE obj)
2983{
2984 struct producer *ptr;
2985
2986 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
2987 if (!ptr || UNDEF_P(ptr->proc)) {
2988 rb_raise(rb_eArgError, "uninitialized producer");
2989 }
2990 return ptr;
2991}
2992
2993/* :nodoc: */
2994static VALUE
2995producer_allocate(VALUE klass)
2996{
2997 struct producer *ptr;
2998 VALUE obj;
2999
3000 obj = TypedData_Make_Struct(klass, struct producer, &producer_data_type, ptr);
3001 ptr->init = Qundef;
3002 ptr->proc = Qundef;
3003
3004 return obj;
3005}
3006
3007static VALUE
3008producer_init(VALUE obj, VALUE init, VALUE proc)
3009{
3010 struct producer *ptr;
3011
3012 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
3013
3014 if (!ptr) {
3015 rb_raise(rb_eArgError, "unallocated producer");
3016 }
3017
3018 ptr->init = init;
3019 ptr->proc = proc;
3020
3021 return obj;
3022}
3023
3024static VALUE
3025producer_each_stop(VALUE dummy, VALUE exc)
3026{
3027 return rb_attr_get(exc, id_result);
3028}
3029
3030NORETURN(static VALUE producer_each_i(VALUE obj));
3031
3032static VALUE
3033producer_each_i(VALUE obj)
3034{
3035 struct producer *ptr;
3036 VALUE init, proc, curr;
3037
3038 ptr = producer_ptr(obj);
3039 init = ptr->init;
3040 proc = ptr->proc;
3041
3042 if (UNDEF_P(init)) {
3043 curr = Qnil;
3044 }
3045 else {
3046 rb_yield(init);
3047 curr = init;
3048 }
3049
3050 for (;;) {
3051 curr = rb_funcall(proc, id_call, 1, curr);
3052 rb_yield(curr);
3053 }
3054
3056}
3057
3058/* :nodoc: */
3059static VALUE
3060producer_each(VALUE obj)
3061{
3062 rb_need_block();
3063
3064 return rb_rescue2(producer_each_i, obj, producer_each_stop, (VALUE)0, rb_eStopIteration, (VALUE)0);
3065}
3066
3067static VALUE
3068producer_size(VALUE obj, VALUE args, VALUE eobj)
3069{
3070 return DBL2NUM(HUGE_VAL);
3071}
3072
3073/*
3074 * call-seq:
3075 * Enumerator.produce(initial = nil) { |prev| block } -> enumerator
3076 *
3077 * Creates an infinite enumerator from any block, just called over and
3078 * over. The result of the previous iteration is passed to the next one.
3079 * If +initial+ is provided, it is passed to the first iteration, and
3080 * becomes the first element of the enumerator; if it is not provided,
3081 * the first iteration receives +nil+, and its result becomes the first
3082 * element of the iterator.
3083 *
3084 * Raising StopIteration from the block stops an iteration.
3085 *
3086 * Enumerator.produce(1, &:succ) # => enumerator of 1, 2, 3, 4, ....
3087 *
3088 * Enumerator.produce { rand(10) } # => infinite random number sequence
3089 *
3090 * ancestors = Enumerator.produce(node) { |prev| node = prev.parent or raise StopIteration }
3091 * enclosing_section = ancestors.find { |n| n.type == :section }
3092 *
3093 * Using ::produce together with Enumerable methods like Enumerable#detect,
3094 * Enumerable#slice_after, Enumerable#take_while can provide Enumerator-based alternatives
3095 * for +while+ and +until+ cycles:
3096 *
3097 * # Find next Tuesday
3098 * require "date"
3099 * Enumerator.produce(Date.today, &:succ).detect(&:tuesday?)
3100 *
3101 * # Simple lexer:
3102 * require "strscan"
3103 * scanner = StringScanner.new("7+38/6")
3104 * PATTERN = %r{\d+|[-/+*]}
3105 * Enumerator.produce { scanner.scan(PATTERN) }.slice_after { scanner.eos? }.first
3106 * # => ["7", "+", "38", "/", "6"]
3107 */
3108static VALUE
3109enumerator_s_produce(int argc, VALUE *argv, VALUE klass)
3110{
3111 VALUE init, producer;
3112
3113 if (!rb_block_given_p()) rb_raise(rb_eArgError, "no block given");
3114
3115 if (rb_scan_args(argc, argv, "01", &init) == 0) {
3116 init = Qundef;
3117 }
3118
3119 producer = producer_init(producer_allocate(rb_cEnumProducer), init, rb_block_proc());
3120
3121 return rb_enumeratorize_with_size_kw(producer, sym_each, 0, 0, producer_size, RB_NO_KEYWORDS);
3122}
3123
3124/*
3125 * Document-class: Enumerator::Chain
3126 *
3127 * Enumerator::Chain is a subclass of Enumerator, which represents a
3128 * chain of enumerables that works as a single enumerator.
3129 *
3130 * This type of objects can be created by Enumerable#chain and
3131 * Enumerator#+.
3132 */
3133
3134static void
3135enum_chain_mark(void *p)
3136{
3137 struct enum_chain *ptr = p;
3138 rb_gc_mark_movable(ptr->enums);
3139}
3140
3141static void
3142enum_chain_compact(void *p)
3143{
3144 struct enum_chain *ptr = p;
3145 ptr->enums = rb_gc_location(ptr->enums);
3146}
3147
3148#define enum_chain_free RUBY_TYPED_DEFAULT_FREE
3149
3150static size_t
3151enum_chain_memsize(const void *p)
3152{
3153 return sizeof(struct enum_chain);
3154}
3155
3156static const rb_data_type_t enum_chain_data_type = {
3157 "chain",
3158 {
3159 enum_chain_mark,
3160 enum_chain_free,
3161 enum_chain_memsize,
3162 enum_chain_compact,
3163 },
3164 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
3165};
3166
3167static struct enum_chain *
3168enum_chain_ptr(VALUE obj)
3169{
3170 struct enum_chain *ptr;
3171
3172 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3173 if (!ptr || UNDEF_P(ptr->enums)) {
3174 rb_raise(rb_eArgError, "uninitialized chain");
3175 }
3176 return ptr;
3177}
3178
3179/* :nodoc: */
3180static VALUE
3181enum_chain_allocate(VALUE klass)
3182{
3183 struct enum_chain *ptr;
3184 VALUE obj;
3185
3186 obj = TypedData_Make_Struct(klass, struct enum_chain, &enum_chain_data_type, ptr);
3187 ptr->enums = Qundef;
3188 ptr->pos = -1;
3189
3190 return obj;
3191}
3192
3193/*
3194 * call-seq:
3195 * Enumerator::Chain.new(*enums) -> enum
3196 *
3197 * Generates a new enumerator object that iterates over the elements
3198 * of given enumerable objects in sequence.
3199 *
3200 * e = Enumerator::Chain.new(1..3, [4, 5])
3201 * e.to_a #=> [1, 2, 3, 4, 5]
3202 * e.size #=> 5
3203 */
3204static VALUE
3205enum_chain_initialize(VALUE obj, VALUE enums)
3206{
3207 struct enum_chain *ptr;
3208
3209 rb_check_frozen(obj);
3210 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3211
3212 if (!ptr) rb_raise(rb_eArgError, "unallocated chain");
3213
3214 ptr->enums = rb_obj_freeze(enums);
3215 ptr->pos = -1;
3216
3217 return obj;
3218}
3219
3220static VALUE
3221new_enum_chain(VALUE enums)
3222{
3223 long i;
3224 VALUE obj = enum_chain_initialize(enum_chain_allocate(rb_cEnumChain), enums);
3225
3226 for (i = 0; i < RARRAY_LEN(enums); i++) {
3227 if (RTEST(rb_obj_is_kind_of(RARRAY_AREF(enums, i), rb_cLazy))) {
3228 return enumerable_lazy(obj);
3229 }
3230 }
3231
3232 return obj;
3233}
3234
3235/* :nodoc: */
3236static VALUE
3237enum_chain_init_copy(VALUE obj, VALUE orig)
3238{
3239 struct enum_chain *ptr0, *ptr1;
3240
3241 if (!OBJ_INIT_COPY(obj, orig)) return obj;
3242 ptr0 = enum_chain_ptr(orig);
3243
3244 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr1);
3245
3246 if (!ptr1) rb_raise(rb_eArgError, "unallocated chain");
3247
3248 ptr1->enums = ptr0->enums;
3249 ptr1->pos = ptr0->pos;
3250
3251 return obj;
3252}
3253
3254static VALUE
3255enum_chain_total_size(VALUE enums)
3256{
3257 VALUE total = INT2FIX(0);
3258 long i;
3259
3260 for (i = 0; i < RARRAY_LEN(enums); i++) {
3261 VALUE size = enum_size(RARRAY_AREF(enums, i));
3262
3263 if (NIL_P(size) || (RB_FLOAT_TYPE_P(size) && isinf(NUM2DBL(size)))) {
3264 return size;
3265 }
3266 if (!RB_INTEGER_TYPE_P(size)) {
3267 return Qnil;
3268 }
3269
3270 total = rb_funcall(total, '+', 1, size);
3271 }
3272
3273 return total;
3274}
3275
3276/*
3277 * call-seq:
3278 * obj.size -> int, Float::INFINITY or nil
3279 *
3280 * Returns the total size of the enumerator chain calculated by
3281 * summing up the size of each enumerable in the chain. If any of the
3282 * enumerables reports its size as nil or Float::INFINITY, that value
3283 * is returned as the total size.
3284 */
3285static VALUE
3286enum_chain_size(VALUE obj)
3287{
3288 return enum_chain_total_size(enum_chain_ptr(obj)->enums);
3289}
3290
3291static VALUE
3292enum_chain_enum_size(VALUE obj, VALUE args, VALUE eobj)
3293{
3294 return enum_chain_size(obj);
3295}
3296
3297static VALUE
3298enum_chain_enum_no_size(VALUE obj, VALUE args, VALUE eobj)
3299{
3300 return Qnil;
3301}
3302
3303/*
3304 * call-seq:
3305 * obj.each(*args) { |...| ... } -> obj
3306 * obj.each(*args) -> enumerator
3307 *
3308 * Iterates over the elements of the first enumerable by calling the
3309 * "each" method on it with the given arguments, then proceeds to the
3310 * following enumerables in sequence until all of the enumerables are
3311 * exhausted.
3312 *
3313 * If no block is given, returns an enumerator.
3314 */
3315static VALUE
3316enum_chain_each(int argc, VALUE *argv, VALUE obj)
3317{
3318 VALUE enums, block;
3319 struct enum_chain *objptr;
3320 long i;
3321
3322 RETURN_SIZED_ENUMERATOR(obj, argc, argv, argc > 0 ? enum_chain_enum_no_size : enum_chain_enum_size);
3323
3324 objptr = enum_chain_ptr(obj);
3325 enums = objptr->enums;
3326 block = rb_block_proc();
3327
3328 for (i = 0; i < RARRAY_LEN(enums); i++) {
3329 objptr->pos = i;
3330 rb_funcall_with_block(RARRAY_AREF(enums, i), id_each, argc, argv, block);
3331 }
3332
3333 return obj;
3334}
3335
3336/*
3337 * call-seq:
3338 * obj.rewind -> obj
3339 *
3340 * Rewinds the enumerator chain by calling the "rewind" method on each
3341 * enumerable in reverse order. Each call is performed only if the
3342 * enumerable responds to the method.
3343 */
3344static VALUE
3345enum_chain_rewind(VALUE obj)
3346{
3347 struct enum_chain *objptr = enum_chain_ptr(obj);
3348 VALUE enums = objptr->enums;
3349 long i;
3350
3351 for (i = objptr->pos; 0 <= i && i < RARRAY_LEN(enums); objptr->pos = --i) {
3352 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
3353 }
3354
3355 return obj;
3356}
3357
3358static VALUE
3359inspect_enum_chain(VALUE obj, VALUE dummy, int recur)
3360{
3361 VALUE klass = rb_obj_class(obj);
3362 struct enum_chain *ptr;
3363
3364 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3365
3366 if (!ptr || UNDEF_P(ptr->enums)) {
3367 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
3368 }
3369
3370 if (recur) {
3371 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
3372 }
3373
3374 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
3375}
3376
3377/*
3378 * call-seq:
3379 * obj.inspect -> string
3380 *
3381 * Returns a printable version of the enumerator chain.
3382 */
3383static VALUE
3384enum_chain_inspect(VALUE obj)
3385{
3386 return rb_exec_recursive(inspect_enum_chain, obj, 0);
3387}
3388
3389/*
3390 * call-seq:
3391 * e.chain(*enums) -> enumerator
3392 *
3393 * Returns an enumerator object generated from this enumerator and
3394 * given enumerables.
3395 *
3396 * e = (1..3).chain([4, 5])
3397 * e.to_a #=> [1, 2, 3, 4, 5]
3398 */
3399static VALUE
3400enum_chain(int argc, VALUE *argv, VALUE obj)
3401{
3402 VALUE enums = rb_ary_new_from_values(1, &obj);
3403 rb_ary_cat(enums, argv, argc);
3404 return new_enum_chain(enums);
3405}
3406
3407/*
3408 * call-seq:
3409 * e + enum -> enumerator
3410 *
3411 * Returns an enumerator object generated from this enumerator and a
3412 * given enumerable.
3413 *
3414 * e = (1..3).each + [4, 5]
3415 * e.to_a #=> [1, 2, 3, 4, 5]
3416 */
3417static VALUE
3418enumerator_plus(VALUE obj, VALUE eobj)
3419{
3420 return new_enum_chain(rb_ary_new_from_args(2, obj, eobj));
3421}
3422
3423/*
3424 * Document-class: Enumerator::Product
3425 *
3426 * Enumerator::Product generates a Cartesian product of any number of
3427 * enumerable objects. Iterating over the product of enumerable
3428 * objects is roughly equivalent to nested each_entry loops where the
3429 * loop for the rightmost object is put innermost.
3430 *
3431 * innings = Enumerator::Product.new(1..9, ['top', 'bottom'])
3432 *
3433 * innings.each do |i, h|
3434 * p [i, h]
3435 * end
3436 * # [1, "top"]
3437 * # [1, "bottom"]
3438 * # [2, "top"]
3439 * # [2, "bottom"]
3440 * # [3, "top"]
3441 * # [3, "bottom"]
3442 * # ...
3443 * # [9, "top"]
3444 * # [9, "bottom"]
3445 *
3446 * The method used against each enumerable object is `each_entry`
3447 * instead of `each` so that the product of N enumerable objects
3448 * yields an array of exactly N elements in each iteration.
3449 *
3450 * When no enumerator is given, it calls a given block once yielding
3451 * an empty argument list.
3452 *
3453 * This type of objects can be created by Enumerator.product.
3454 */
3455
3456static void
3457enum_product_mark(void *p)
3458{
3459 struct enum_product *ptr = p;
3460 rb_gc_mark_movable(ptr->enums);
3461}
3462
3463static void
3464enum_product_compact(void *p)
3465{
3466 struct enum_product *ptr = p;
3467 ptr->enums = rb_gc_location(ptr->enums);
3468}
3469
3470#define enum_product_free RUBY_TYPED_DEFAULT_FREE
3471
3472static size_t
3473enum_product_memsize(const void *p)
3474{
3475 return sizeof(struct enum_product);
3476}
3477
3478static const rb_data_type_t enum_product_data_type = {
3479 "product",
3480 {
3481 enum_product_mark,
3482 enum_product_free,
3483 enum_product_memsize,
3484 enum_product_compact,
3485 },
3486 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
3487};
3488
3489static struct enum_product *
3490enum_product_ptr(VALUE obj)
3491{
3492 struct enum_product *ptr;
3493
3494 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3495 if (!ptr || UNDEF_P(ptr->enums)) {
3496 rb_raise(rb_eArgError, "uninitialized product");
3497 }
3498 return ptr;
3499}
3500
3501/* :nodoc: */
3502static VALUE
3503enum_product_allocate(VALUE klass)
3504{
3505 struct enum_product *ptr;
3506 VALUE obj;
3507
3508 obj = TypedData_Make_Struct(klass, struct enum_product, &enum_product_data_type, ptr);
3509 ptr->enums = Qundef;
3510
3511 return obj;
3512}
3513
3514/*
3515 * call-seq:
3516 * Enumerator::Product.new(*enums) -> enum
3517 *
3518 * Generates a new enumerator object that generates a Cartesian
3519 * product of given enumerable objects.
3520 *
3521 * e = Enumerator::Product.new(1..3, [4, 5])
3522 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3523 * e.size #=> 6
3524 */
3525static VALUE
3526enum_product_initialize(int argc, VALUE *argv, VALUE obj)
3527{
3528 struct enum_product *ptr;
3529 VALUE enums = Qnil, options = Qnil;
3530
3531 rb_scan_args(argc, argv, "*:", &enums, &options);
3532
3533 if (!NIL_P(options) && !RHASH_EMPTY_P(options)) {
3534 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options)));
3535 }
3536
3537 rb_check_frozen(obj);
3538 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3539
3540 if (!ptr) rb_raise(rb_eArgError, "unallocated product");
3541
3542 ptr->enums = rb_obj_freeze(enums);
3543
3544 return obj;
3545}
3546
3547/* :nodoc: */
3548static VALUE
3549enum_product_init_copy(VALUE obj, VALUE orig)
3550{
3551 struct enum_product *ptr0, *ptr1;
3552
3553 if (!OBJ_INIT_COPY(obj, orig)) return obj;
3554 ptr0 = enum_product_ptr(orig);
3555
3556 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr1);
3557
3558 if (!ptr1) rb_raise(rb_eArgError, "unallocated product");
3559
3560 ptr1->enums = ptr0->enums;
3561
3562 return obj;
3563}
3564
3565static VALUE
3566enum_product_total_size(VALUE enums)
3567{
3568 VALUE total = INT2FIX(1);
3569 long i;
3570
3571 for (i = 0; i < RARRAY_LEN(enums); i++) {
3572 VALUE size = enum_size(RARRAY_AREF(enums, i));
3573
3574 if (NIL_P(size) || (RB_TYPE_P(size, T_FLOAT) && isinf(NUM2DBL(size)))) {
3575 return size;
3576 }
3577 if (!RB_INTEGER_TYPE_P(size)) {
3578 return Qnil;
3579 }
3580
3581 total = rb_funcall(total, '*', 1, size);
3582 }
3583
3584 return total;
3585}
3586
3587/*
3588 * call-seq:
3589 * obj.size -> int, Float::INFINITY or nil
3590 *
3591 * Returns the total size of the enumerator product calculated by
3592 * multiplying the sizes of enumerables in the product. If any of the
3593 * enumerables reports its size as nil or Float::INFINITY, that value
3594 * is returned as the size.
3595 */
3596static VALUE
3597enum_product_size(VALUE obj)
3598{
3599 return enum_product_total_size(enum_product_ptr(obj)->enums);
3600}
3601
3602static VALUE
3603enum_product_enum_size(VALUE obj, VALUE args, VALUE eobj)
3604{
3605 return enum_product_size(obj);
3606}
3607
3609 VALUE obj;
3610 VALUE block;
3611 int argc;
3612 VALUE *argv;
3613 int index;
3614};
3615
3616static VALUE product_each(VALUE, struct product_state *);
3617
3618static VALUE
3619product_each_i(RB_BLOCK_CALL_FUNC_ARGLIST(value, state))
3620{
3621 struct product_state *pstate = (struct product_state *)state;
3622 pstate->argv[pstate->index++] = value;
3623
3624 VALUE val = product_each(pstate->obj, pstate);
3625 pstate->index--;
3626 return val;
3627}
3628
3629static VALUE
3630product_each(VALUE obj, struct product_state *pstate)
3631{
3632 struct enum_product *ptr = enum_product_ptr(obj);
3633 VALUE enums = ptr->enums;
3634
3635 if (pstate->index < pstate->argc) {
3636 VALUE eobj = RARRAY_AREF(enums, pstate->index);
3637
3638 rb_block_call(eobj, id_each_entry, 0, NULL, product_each_i, (VALUE)pstate);
3639 }
3640 else {
3641 rb_funcall(pstate->block, id_call, 1, rb_ary_new_from_values(pstate->argc, pstate->argv));
3642 }
3643
3644 return obj;
3645}
3646
3647static VALUE
3648enum_product_run(VALUE obj, VALUE block)
3649{
3650 struct enum_product *ptr = enum_product_ptr(obj);
3651 int argc = RARRAY_LENINT(ptr->enums);
3652 struct product_state state = {
3653 .obj = obj,
3654 .block = block,
3655 .index = 0,
3656 .argc = argc,
3657 .argv = ALLOCA_N(VALUE, argc),
3658 };
3659
3660 return product_each(obj, &state);
3661}
3662
3663/*
3664 * call-seq:
3665 * obj.each { |...| ... } -> obj
3666 * obj.each -> enumerator
3667 *
3668 * Iterates over the elements of the first enumerable by calling the
3669 * "each_entry" method on it with the given arguments, then proceeds
3670 * to the following enumerables in sequence until all of the
3671 * enumerables are exhausted.
3672 *
3673 * If no block is given, returns an enumerator. Otherwise, returns self.
3674 */
3675static VALUE
3676enum_product_each(VALUE obj)
3677{
3678 RETURN_SIZED_ENUMERATOR(obj, 0, 0, enum_product_enum_size);
3679
3680 return enum_product_run(obj, rb_block_proc());
3681}
3682
3683/*
3684 * call-seq:
3685 * obj.rewind -> obj
3686 *
3687 * Rewinds the product enumerator by calling the "rewind" method on
3688 * each enumerable in reverse order. Each call is performed only if
3689 * the enumerable responds to the method.
3690 */
3691static VALUE
3692enum_product_rewind(VALUE obj)
3693{
3694 struct enum_product *ptr = enum_product_ptr(obj);
3695 VALUE enums = ptr->enums;
3696 long i;
3697
3698 for (i = 0; i < RARRAY_LEN(enums); i++) {
3699 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
3700 }
3701
3702 return obj;
3703}
3704
3705static VALUE
3706inspect_enum_product(VALUE obj, VALUE dummy, int recur)
3707{
3708 VALUE klass = rb_obj_class(obj);
3709 struct enum_product *ptr;
3710
3711 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3712
3713 if (!ptr || UNDEF_P(ptr->enums)) {
3714 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
3715 }
3716
3717 if (recur) {
3718 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
3719 }
3720
3721 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
3722}
3723
3724/*
3725 * call-seq:
3726 * obj.inspect -> string
3727 *
3728 * Returns a printable version of the product enumerator.
3729 */
3730static VALUE
3731enum_product_inspect(VALUE obj)
3732{
3733 return rb_exec_recursive(inspect_enum_product, obj, 0);
3734}
3735
3736/*
3737 * call-seq:
3738 * Enumerator.product(*enums) -> enumerator
3739 * Enumerator.product(*enums) { |elts| ... } -> enumerator
3740 *
3741 * Generates a new enumerator object that generates a Cartesian
3742 * product of given enumerable objects. This is equivalent to
3743 * Enumerator::Product.new.
3744 *
3745 * e = Enumerator.product(1..3, [4, 5])
3746 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3747 * e.size #=> 6
3748 *
3749 * When a block is given, calls the block with each N-element array
3750 * generated and returns +nil+.
3751 */
3752static VALUE
3753enumerator_s_product(int argc, VALUE *argv, VALUE klass)
3754{
3755 VALUE enums = Qnil, options = Qnil, block = Qnil;
3756
3757 rb_scan_args(argc, argv, "*:&", &enums, &options, &block);
3758
3759 if (!NIL_P(options) && !RHASH_EMPTY_P(options)) {
3760 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options)));
3761 }
3762
3763 VALUE obj = enum_product_initialize(argc, argv, enum_product_allocate(rb_cEnumProduct));
3764
3765 if (!NIL_P(block)) {
3766 enum_product_run(obj, block);
3767 return Qnil;
3768 }
3769
3770 return obj;
3771}
3772
3773/*
3774 * Document-class: Enumerator::ArithmeticSequence
3775 *
3776 * Enumerator::ArithmeticSequence is a subclass of Enumerator,
3777 * that is a representation of sequences of numbers with common difference.
3778 * Instances of this class can be generated by the Range#step and Numeric#step
3779 * methods.
3780 *
3781 * The class can be used for slicing Array (see Array#slice) or custom
3782 * collections.
3783 */
3784
3785VALUE
3786rb_arith_seq_new(VALUE obj, VALUE meth, int argc, VALUE const *argv,
3787 rb_enumerator_size_func *size_fn,
3788 VALUE beg, VALUE end, VALUE step, int excl)
3789{
3790 VALUE aseq = enumerator_init(enumerator_allocate(rb_cArithSeq),
3791 obj, meth, argc, argv, size_fn, Qnil, rb_keyword_given_p());
3792 rb_ivar_set(aseq, id_begin, beg);
3793 rb_ivar_set(aseq, id_end, end);
3794 rb_ivar_set(aseq, id_step, step);
3795 rb_ivar_set(aseq, id_exclude_end, RBOOL(excl));
3796 return aseq;
3797}
3798
3799/*
3800 * call-seq: aseq.begin -> num or nil
3801 *
3802 * Returns the number that defines the first element of this arithmetic
3803 * sequence.
3804 */
3805static inline VALUE
3806arith_seq_begin(VALUE self)
3807{
3808 return rb_ivar_get(self, id_begin);
3809}
3810
3811/*
3812 * call-seq: aseq.end -> num or nil
3813 *
3814 * Returns the number that defines the end of this arithmetic sequence.
3815 */
3816static inline VALUE
3817arith_seq_end(VALUE self)
3818{
3819 return rb_ivar_get(self, id_end);
3820}
3821
3822/*
3823 * call-seq: aseq.step -> num
3824 *
3825 * Returns the number that defines the common difference between
3826 * two adjacent elements in this arithmetic sequence.
3827 */
3828static inline VALUE
3829arith_seq_step(VALUE self)
3830{
3831 return rb_ivar_get(self, id_step);
3832}
3833
3834/*
3835 * call-seq: aseq.exclude_end? -> true or false
3836 *
3837 * Returns <code>true</code> if this arithmetic sequence excludes its end value.
3838 */
3839static inline VALUE
3840arith_seq_exclude_end(VALUE self)
3841{
3842 return rb_ivar_get(self, id_exclude_end);
3843}
3844
3845static inline int
3846arith_seq_exclude_end_p(VALUE self)
3847{
3848 return RTEST(arith_seq_exclude_end(self));
3849}
3850
3851int
3852rb_arithmetic_sequence_extract(VALUE obj, rb_arithmetic_sequence_components_t *component)
3853{
3854 if (rb_obj_is_kind_of(obj, rb_cArithSeq)) {
3855 component->begin = arith_seq_begin(obj);
3856 component->end = arith_seq_end(obj);
3857 component->step = arith_seq_step(obj);
3858 component->exclude_end = arith_seq_exclude_end_p(obj);
3859 return 1;
3860 }
3861 else if (rb_range_values(obj, &component->begin, &component->end, &component->exclude_end)) {
3862 component->step = INT2FIX(1);
3863 return 1;
3864 }
3865
3866 return 0;
3867}
3868
3869VALUE
3870rb_arithmetic_sequence_beg_len_step(VALUE obj, long *begp, long *lenp, long *stepp, long len, int err)
3871{
3872 RBIMPL_NONNULL_ARG(begp);
3873 RBIMPL_NONNULL_ARG(lenp);
3874 RBIMPL_NONNULL_ARG(stepp);
3875
3877 if (!rb_arithmetic_sequence_extract(obj, &aseq)) {
3878 return Qfalse;
3879 }
3880
3881 long step = NIL_P(aseq.step) ? 1 : NUM2LONG(aseq.step);
3882 *stepp = step;
3883
3884 if (step < 0) {
3885 if (aseq.exclude_end && !NIL_P(aseq.end)) {
3886 /* Handle exclusion before range reversal */
3887 aseq.end = LONG2NUM(NUM2LONG(aseq.end) + 1);
3888
3889 /* Don't exclude the previous beginning */
3890 aseq.exclude_end = 0;
3891 }
3892 VALUE tmp = aseq.begin;
3893 aseq.begin = aseq.end;
3894 aseq.end = tmp;
3895 }
3896
3897 if (err == 0 && (step < -1 || step > 1)) {
3898 if (rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, 1) == Qtrue) {
3899 if (*begp > len)
3900 goto out_of_range;
3901 if (*lenp > len)
3902 goto out_of_range;
3903 return Qtrue;
3904 }
3905 }
3906 else {
3907 return rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, err);
3908 }
3909
3910 out_of_range:
3911 rb_raise(rb_eRangeError, "%+"PRIsVALUE" out of range", obj);
3912 return Qnil;
3913}
3914
3915/*
3916 * call-seq:
3917 * aseq.first -> num or nil
3918 * aseq.first(n) -> an_array
3919 *
3920 * Returns the first number in this arithmetic sequence,
3921 * or an array of the first +n+ elements.
3922 */
3923static VALUE
3924arith_seq_first(int argc, VALUE *argv, VALUE self)
3925{
3926 VALUE b, e, s, ary;
3927 long n;
3928 int x;
3929
3930 rb_check_arity(argc, 0, 1);
3931
3932 b = arith_seq_begin(self);
3933 e = arith_seq_end(self);
3934 s = arith_seq_step(self);
3935 if (argc == 0) {
3936 if (NIL_P(b)) {
3937 return Qnil;
3938 }
3939 if (!NIL_P(e)) {
3940 VALUE zero = INT2FIX(0);
3941 int r = rb_cmpint(rb_num_coerce_cmp(s, zero, idCmp), s, zero);
3942 if (r > 0 && RTEST(rb_funcall(b, '>', 1, e))) {
3943 return Qnil;
3944 }
3945 if (r < 0 && RTEST(rb_funcall(b, '<', 1, e))) {
3946 return Qnil;
3947 }
3948 }
3949 return b;
3950 }
3951
3952 // TODO: the following code should be extracted as arith_seq_take
3953
3954 n = NUM2LONG(argv[0]);
3955 if (n < 0) {
3956 rb_raise(rb_eArgError, "attempt to take negative size");
3957 }
3958 if (n == 0) {
3959 return rb_ary_new_capa(0);
3960 }
3961
3962 x = arith_seq_exclude_end_p(self);
3963
3964 if (FIXNUM_P(b) && NIL_P(e) && FIXNUM_P(s)) {
3965 long i = FIX2LONG(b), unit = FIX2LONG(s);
3966 ary = rb_ary_new_capa(n);
3967 while (n > 0 && FIXABLE(i)) {
3968 rb_ary_push(ary, LONG2FIX(i));
3969 i += unit; // FIXABLE + FIXABLE never overflow;
3970 --n;
3971 }
3972 if (n > 0) {
3973 b = LONG2NUM(i);
3974 while (n > 0) {
3975 rb_ary_push(ary, b);
3976 b = rb_big_plus(b, s);
3977 --n;
3978 }
3979 }
3980 return ary;
3981 }
3982 else if (FIXNUM_P(b) && FIXNUM_P(e) && FIXNUM_P(s)) {
3983 long i = FIX2LONG(b);
3984 long end = FIX2LONG(e);
3985 long unit = FIX2LONG(s);
3986 long len;
3987
3988 if (unit >= 0) {
3989 if (!x) end += 1;
3990
3991 len = end - i;
3992 if (len < 0) len = 0;
3993 ary = rb_ary_new_capa((n < len) ? n : len);
3994 while (n > 0 && i < end) {
3995 rb_ary_push(ary, LONG2FIX(i));
3996 if (i + unit < i) break;
3997 i += unit;
3998 --n;
3999 }
4000 }
4001 else {
4002 if (!x) end -= 1;
4003
4004 len = i - end;
4005 if (len < 0) len = 0;
4006 ary = rb_ary_new_capa((n < len) ? n : len);
4007 while (n > 0 && i > end) {
4008 rb_ary_push(ary, LONG2FIX(i));
4009 if (i + unit > i) break;
4010 i += unit;
4011 --n;
4012 }
4013 }
4014 return ary;
4015 }
4016 else if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
4017 /* generate values like ruby_float_step */
4018
4019 double unit = NUM2DBL(s);
4020 double beg = NUM2DBL(b);
4021 double end = NIL_P(e) ? (unit < 0 ? -1 : 1)*HUGE_VAL : NUM2DBL(e);
4022 double len = ruby_float_step_size(beg, end, unit, x);
4023 long i;
4024
4025 if (n > len)
4026 n = (long)len;
4027
4028 if (isinf(unit)) {
4029 if (len > 0) {
4030 ary = rb_ary_new_capa(1);
4031 rb_ary_push(ary, DBL2NUM(beg));
4032 }
4033 else {
4034 ary = rb_ary_new_capa(0);
4035 }
4036 }
4037 else if (unit == 0) {
4038 VALUE val = DBL2NUM(beg);
4039 ary = rb_ary_new_capa(n);
4040 for (i = 0; i < len; ++i) {
4041 rb_ary_push(ary, val);
4042 }
4043 }
4044 else {
4045 ary = rb_ary_new_capa(n);
4046 for (i = 0; i < n; ++i) {
4047 double d = i*unit+beg;
4048 if (unit >= 0 ? end < d : d < end) d = end;
4049 rb_ary_push(ary, DBL2NUM(d));
4050 }
4051 }
4052
4053 return ary;
4054 }
4055
4056 return rb_call_super(argc, argv);
4057}
4058
4059static inline VALUE
4060num_plus(VALUE a, VALUE b)
4061{
4062 if (RB_INTEGER_TYPE_P(a)) {
4063 return rb_int_plus(a, b);
4064 }
4065 else if (RB_FLOAT_TYPE_P(a)) {
4066 return rb_float_plus(a, b);
4067 }
4068 else if (RB_TYPE_P(a, T_RATIONAL)) {
4069 return rb_rational_plus(a, b);
4070 }
4071 else {
4072 return rb_funcallv(a, '+', 1, &b);
4073 }
4074}
4075
4076static inline VALUE
4077num_minus(VALUE a, VALUE b)
4078{
4079 if (RB_INTEGER_TYPE_P(a)) {
4080 return rb_int_minus(a, b);
4081 }
4082 else if (RB_FLOAT_TYPE_P(a)) {
4083 return rb_float_minus(a, b);
4084 }
4085 else if (RB_TYPE_P(a, T_RATIONAL)) {
4086 return rb_rational_minus(a, b);
4087 }
4088 else {
4089 return rb_funcallv(a, '-', 1, &b);
4090 }
4091}
4092
4093static inline VALUE
4094num_mul(VALUE a, VALUE b)
4095{
4096 if (RB_INTEGER_TYPE_P(a)) {
4097 return rb_int_mul(a, b);
4098 }
4099 else if (RB_FLOAT_TYPE_P(a)) {
4100 return rb_float_mul(a, b);
4101 }
4102 else if (RB_TYPE_P(a, T_RATIONAL)) {
4103 return rb_rational_mul(a, b);
4104 }
4105 else {
4106 return rb_funcallv(a, '*', 1, &b);
4107 }
4108}
4109
4110static inline VALUE
4111num_idiv(VALUE a, VALUE b)
4112{
4113 VALUE q;
4114 if (RB_INTEGER_TYPE_P(a)) {
4115 q = rb_int_idiv(a, b);
4116 }
4117 else if (RB_FLOAT_TYPE_P(a)) {
4118 q = rb_float_div(a, b);
4119 }
4120 else if (RB_TYPE_P(a, T_RATIONAL)) {
4121 q = rb_rational_div(a, b);
4122 }
4123 else {
4124 q = rb_funcallv(a, idDiv, 1, &b);
4125 }
4126
4127 if (RB_INTEGER_TYPE_P(q)) {
4128 return q;
4129 }
4130 else if (RB_FLOAT_TYPE_P(q)) {
4131 return rb_float_floor(q, 0);
4132 }
4133 else if (RB_TYPE_P(q, T_RATIONAL)) {
4134 return rb_rational_floor(q, 0);
4135 }
4136 else {
4137 return rb_funcall(q, rb_intern("floor"), 0);
4138 }
4139}
4140
4141/*
4142 * call-seq:
4143 * aseq.last -> num or nil
4144 * aseq.last(n) -> an_array
4145 *
4146 * Returns the last number in this arithmetic sequence,
4147 * or an array of the last +n+ elements.
4148 */
4149static VALUE
4150arith_seq_last(int argc, VALUE *argv, VALUE self)
4151{
4152 VALUE b, e, s, len_1, len, last, nv, ary;
4153 int last_is_adjusted;
4154 long n;
4155
4156 e = arith_seq_end(self);
4157 if (NIL_P(e)) {
4159 "cannot get the last element of endless arithmetic sequence");
4160 }
4161
4162 b = arith_seq_begin(self);
4163 s = arith_seq_step(self);
4164
4165 len_1 = num_idiv(num_minus(e, b), s);
4166 if (rb_num_negative_int_p(len_1)) {
4167 if (argc == 0) {
4168 return Qnil;
4169 }
4170 return rb_ary_new_capa(0);
4171 }
4172
4173 last = num_plus(b, num_mul(s, len_1));
4174 if ((last_is_adjusted = arith_seq_exclude_end_p(self) && rb_equal(last, e))) {
4175 last = num_minus(last, s);
4176 }
4177
4178 if (argc == 0) {
4179 return last;
4180 }
4181
4182 if (last_is_adjusted) {
4183 len = len_1;
4184 }
4185 else {
4186 len = rb_int_plus(len_1, INT2FIX(1));
4187 }
4188
4189 rb_scan_args(argc, argv, "1", &nv);
4190 if (!RB_INTEGER_TYPE_P(nv)) {
4191 nv = rb_to_int(nv);
4192 }
4193 if (RTEST(rb_int_gt(nv, len))) {
4194 nv = len;
4195 }
4196 n = NUM2LONG(nv);
4197 if (n < 0) {
4198 rb_raise(rb_eArgError, "negative array size");
4199 }
4200
4201 ary = rb_ary_new_capa(n);
4202 b = rb_int_minus(last, rb_int_mul(s, nv));
4203 while (n) {
4204 b = rb_int_plus(b, s);
4205 rb_ary_push(ary, b);
4206 --n;
4207 }
4208
4209 return ary;
4210}
4211
4212/*
4213 * call-seq:
4214 * aseq.inspect -> string
4215 *
4216 * Convert this arithmetic sequence to a printable form.
4217 */
4218static VALUE
4219arith_seq_inspect(VALUE self)
4220{
4221 struct enumerator *e;
4222 VALUE eobj, str, eargs;
4223 int range_p;
4224
4225 TypedData_Get_Struct(self, struct enumerator, &enumerator_data_type, e);
4226
4227 eobj = rb_attr_get(self, id_receiver);
4228 if (NIL_P(eobj)) {
4229 eobj = e->obj;
4230 }
4231
4232 range_p = RTEST(rb_obj_is_kind_of(eobj, rb_cRange));
4233 str = rb_sprintf("(%s%"PRIsVALUE"%s.", range_p ? "(" : "", eobj, range_p ? ")" : "");
4234
4235 rb_str_buf_append(str, rb_id2str(e->meth));
4236
4237 eargs = rb_attr_get(eobj, id_arguments);
4238 if (NIL_P(eargs)) {
4239 eargs = e->args;
4240 }
4241 if (eargs != Qfalse) {
4242 long argc = RARRAY_LEN(eargs);
4243 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
4244
4245 if (argc > 0) {
4246 VALUE kwds = Qnil;
4247
4248 rb_str_buf_cat2(str, "(");
4249
4250 if (RB_TYPE_P(argv[argc-1], T_HASH)) {
4251 int all_key = TRUE;
4252 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
4253 if (all_key) kwds = argv[--argc];
4254 }
4255
4256 while (argc--) {
4257 VALUE arg = *argv++;
4258
4259 rb_str_append(str, rb_inspect(arg));
4260 rb_str_buf_cat2(str, ", ");
4261 }
4262 if (!NIL_P(kwds)) {
4263 rb_hash_foreach(kwds, kwd_append, str);
4264 }
4265 rb_str_set_len(str, RSTRING_LEN(str)-2); /* drop the last ", " */
4266 rb_str_buf_cat2(str, ")");
4267 }
4268 }
4269
4270 rb_str_buf_cat2(str, ")");
4271
4272 return str;
4273}
4274
4275/*
4276 * call-seq:
4277 * aseq == obj -> true or false
4278 *
4279 * Returns <code>true</code> only if +obj+ is an Enumerator::ArithmeticSequence,
4280 * has equivalent begin, end, step, and exclude_end? settings.
4281 */
4282static VALUE
4283arith_seq_eq(VALUE self, VALUE other)
4284{
4285 if (!RTEST(rb_obj_is_kind_of(other, rb_cArithSeq))) {
4286 return Qfalse;
4287 }
4288
4289 if (!rb_equal(arith_seq_begin(self), arith_seq_begin(other))) {
4290 return Qfalse;
4291 }
4292
4293 if (!rb_equal(arith_seq_end(self), arith_seq_end(other))) {
4294 return Qfalse;
4295 }
4296
4297 if (!rb_equal(arith_seq_step(self), arith_seq_step(other))) {
4298 return Qfalse;
4299 }
4300
4301 if (arith_seq_exclude_end_p(self) != arith_seq_exclude_end_p(other)) {
4302 return Qfalse;
4303 }
4304
4305 return Qtrue;
4306}
4307
4308/*
4309 * call-seq:
4310 * aseq.hash -> integer
4311 *
4312 * Compute a hash-value for this arithmetic sequence.
4313 * Two arithmetic sequences with same begin, end, step, and exclude_end?
4314 * values will generate the same hash-value.
4315 *
4316 * See also Object#hash.
4317 */
4318static VALUE
4319arith_seq_hash(VALUE self)
4320{
4321 st_index_t hash;
4322 VALUE v;
4323
4324 hash = rb_hash_start(arith_seq_exclude_end_p(self));
4325 v = rb_hash(arith_seq_begin(self));
4326 hash = rb_hash_uint(hash, NUM2LONG(v));
4327 v = rb_hash(arith_seq_end(self));
4328 hash = rb_hash_uint(hash, NUM2LONG(v));
4329 v = rb_hash(arith_seq_step(self));
4330 hash = rb_hash_uint(hash, NUM2LONG(v));
4331 hash = rb_hash_end(hash);
4332
4333 return ST2FIX(hash);
4334}
4335
4336#define NUM_GE(x, y) RTEST(rb_num_coerce_relop((x), (y), idGE))
4337
4339 VALUE current;
4340 VALUE end;
4341 VALUE step;
4342 int excl;
4343};
4344
4345/*
4346 * call-seq:
4347 * aseq.each {|i| block } -> aseq
4348 * aseq.each -> aseq
4349 */
4350static VALUE
4351arith_seq_each(VALUE self)
4352{
4353 VALUE c, e, s, len_1, last;
4354 int x;
4355
4356 if (!rb_block_given_p()) return self;
4357
4358 c = arith_seq_begin(self);
4359 e = arith_seq_end(self);
4360 s = arith_seq_step(self);
4361 x = arith_seq_exclude_end_p(self);
4362
4363 if (!RB_TYPE_P(s, T_COMPLEX) && ruby_float_step(c, e, s, x, TRUE)) {
4364 return self;
4365 }
4366
4367 if (NIL_P(e)) {
4368 while (1) {
4369 rb_yield(c);
4370 c = rb_int_plus(c, s);
4371 }
4372
4373 return self;
4374 }
4375
4376 if (rb_equal(s, INT2FIX(0))) {
4377 while (1) {
4378 rb_yield(c);
4379 }
4380
4381 return self;
4382 }
4383
4384 len_1 = num_idiv(num_minus(e, c), s);
4385 last = num_plus(c, num_mul(s, len_1));
4386 if (x && rb_equal(last, e)) {
4387 last = num_minus(last, s);
4388 }
4389
4390 if (rb_num_negative_int_p(s)) {
4391 while (NUM_GE(c, last)) {
4392 rb_yield(c);
4393 c = num_plus(c, s);
4394 }
4395 }
4396 else {
4397 while (NUM_GE(last, c)) {
4398 rb_yield(c);
4399 c = num_plus(c, s);
4400 }
4401 }
4402
4403 return self;
4404}
4405
4406/*
4407 * call-seq:
4408 * aseq.size -> num or nil
4409 *
4410 * Returns the number of elements in this arithmetic sequence if it is a finite
4411 * sequence. Otherwise, returns <code>nil</code>.
4412 */
4413static VALUE
4414arith_seq_size(VALUE self)
4415{
4416 VALUE b, e, s, len_1, len, last;
4417 int x;
4418
4419 b = arith_seq_begin(self);
4420 e = arith_seq_end(self);
4421 s = arith_seq_step(self);
4422 x = arith_seq_exclude_end_p(self);
4423
4424 if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
4425 double ee, n;
4426
4427 if (NIL_P(e)) {
4428 if (rb_num_negative_int_p(s)) {
4429 ee = -HUGE_VAL;
4430 }
4431 else {
4432 ee = HUGE_VAL;
4433 }
4434 }
4435 else {
4436 ee = NUM2DBL(e);
4437 }
4438
4439 n = ruby_float_step_size(NUM2DBL(b), ee, NUM2DBL(s), x);
4440 if (isinf(n)) return DBL2NUM(n);
4441 if (POSFIXABLE(n)) return LONG2FIX((long)n);
4442 return rb_dbl2big(n);
4443 }
4444
4445 if (NIL_P(e)) {
4446 return DBL2NUM(HUGE_VAL);
4447 }
4448
4449 if (!rb_obj_is_kind_of(s, rb_cNumeric)) {
4450 s = rb_to_int(s);
4451 }
4452
4453 if (rb_equal(s, INT2FIX(0))) {
4454 return DBL2NUM(HUGE_VAL);
4455 }
4456
4457 len_1 = rb_int_idiv(rb_int_minus(e, b), s);
4458 if (rb_num_negative_int_p(len_1)) {
4459 return INT2FIX(0);
4460 }
4461
4462 last = rb_int_plus(b, rb_int_mul(s, len_1));
4463 if (x && rb_equal(last, e)) {
4464 len = len_1;
4465 }
4466 else {
4467 len = rb_int_plus(len_1, INT2FIX(1));
4468 }
4469
4470 return len;
4471}
4472
4473#define sym(name) ID2SYM(rb_intern_const(name))
4474void
4475InitVM_Enumerator(void)
4476{
4477 ID id_private = rb_intern_const("private");
4478
4479 rb_define_method(rb_mKernel, "to_enum", obj_to_enum, -1);
4480 rb_define_method(rb_mKernel, "enum_for", obj_to_enum, -1);
4481
4482 rb_cEnumerator = rb_define_class("Enumerator", rb_cObject);
4484
4485 rb_define_alloc_func(rb_cEnumerator, enumerator_allocate);
4486 rb_define_method(rb_cEnumerator, "initialize", enumerator_initialize, -1);
4487 rb_define_method(rb_cEnumerator, "initialize_copy", enumerator_init_copy, 1);
4488 rb_define_method(rb_cEnumerator, "each", enumerator_each, -1);
4489 rb_define_method(rb_cEnumerator, "each_with_index", enumerator_each_with_index, 0);
4490 rb_define_method(rb_cEnumerator, "each_with_object", enumerator_with_object, 1);
4491 rb_define_method(rb_cEnumerator, "with_index", enumerator_with_index, -1);
4492 rb_define_method(rb_cEnumerator, "with_object", enumerator_with_object, 1);
4493 rb_define_method(rb_cEnumerator, "next_values", enumerator_next_values, 0);
4494 rb_define_method(rb_cEnumerator, "peek_values", enumerator_peek_values_m, 0);
4495 rb_define_method(rb_cEnumerator, "next", enumerator_next, 0);
4496 rb_define_method(rb_cEnumerator, "peek", enumerator_peek, 0);
4497 rb_define_method(rb_cEnumerator, "feed", enumerator_feed, 1);
4498 rb_define_method(rb_cEnumerator, "rewind", enumerator_rewind, 0);
4499 rb_define_method(rb_cEnumerator, "inspect", enumerator_inspect, 0);
4500 rb_define_method(rb_cEnumerator, "size", enumerator_size, 0);
4501 rb_define_method(rb_cEnumerator, "+", enumerator_plus, 1);
4503
4504 /* Lazy */
4506 rb_define_method(rb_mEnumerable, "lazy", enumerable_lazy, 0);
4507
4508 rb_define_alias(rb_cLazy, "_enumerable_map", "map");
4509 rb_define_alias(rb_cLazy, "_enumerable_collect", "collect");
4510 rb_define_alias(rb_cLazy, "_enumerable_flat_map", "flat_map");
4511 rb_define_alias(rb_cLazy, "_enumerable_collect_concat", "collect_concat");
4512 rb_define_alias(rb_cLazy, "_enumerable_select", "select");
4513 rb_define_alias(rb_cLazy, "_enumerable_find_all", "find_all");
4514 rb_define_alias(rb_cLazy, "_enumerable_filter", "filter");
4515 rb_define_alias(rb_cLazy, "_enumerable_filter_map", "filter_map");
4516 rb_define_alias(rb_cLazy, "_enumerable_reject", "reject");
4517 rb_define_alias(rb_cLazy, "_enumerable_grep", "grep");
4518 rb_define_alias(rb_cLazy, "_enumerable_grep_v", "grep_v");
4519 rb_define_alias(rb_cLazy, "_enumerable_zip", "zip");
4520 rb_define_alias(rb_cLazy, "_enumerable_take", "take");
4521 rb_define_alias(rb_cLazy, "_enumerable_take_while", "take_while");
4522 rb_define_alias(rb_cLazy, "_enumerable_drop", "drop");
4523 rb_define_alias(rb_cLazy, "_enumerable_drop_while", "drop_while");
4524 rb_define_alias(rb_cLazy, "_enumerable_uniq", "uniq");
4525 rb_define_private_method(rb_cLazy, "_enumerable_with_index", enumerator_with_index, -1);
4526
4527 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_map"));
4528 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect"));
4529 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_flat_map"));
4530 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect_concat"));
4531 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_select"));
4532 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_find_all"));
4533 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter"));
4534 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter_map"));
4535 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_reject"));
4536 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep"));
4537 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep_v"));
4538 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_zip"));
4539 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take"));
4540 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take_while"));
4541 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop"));
4542 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop_while"));
4543 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_uniq"));
4544
4545 rb_define_method(rb_cLazy, "initialize", lazy_initialize, -1);
4546 rb_define_method(rb_cLazy, "to_enum", lazy_to_enum, -1);
4547 rb_define_method(rb_cLazy, "enum_for", lazy_to_enum, -1);
4548 rb_define_method(rb_cLazy, "eager", lazy_eager, 0);
4549 rb_define_method(rb_cLazy, "map", lazy_map, 0);
4550 rb_define_method(rb_cLazy, "collect", lazy_map, 0);
4551 rb_define_method(rb_cLazy, "flat_map", lazy_flat_map, 0);
4552 rb_define_method(rb_cLazy, "collect_concat", lazy_flat_map, 0);
4553 rb_define_method(rb_cLazy, "select", lazy_select, 0);
4554 rb_define_method(rb_cLazy, "find_all", lazy_select, 0);
4555 rb_define_method(rb_cLazy, "filter", lazy_select, 0);
4556 rb_define_method(rb_cLazy, "filter_map", lazy_filter_map, 0);
4557 rb_define_method(rb_cLazy, "reject", lazy_reject, 0);
4558 rb_define_method(rb_cLazy, "grep", lazy_grep, 1);
4559 rb_define_method(rb_cLazy, "grep_v", lazy_grep_v, 1);
4560 rb_define_method(rb_cLazy, "zip", lazy_zip, -1);
4561 rb_define_method(rb_cLazy, "take", lazy_take, 1);
4562 rb_define_method(rb_cLazy, "take_while", lazy_take_while, 0);
4563 rb_define_method(rb_cLazy, "drop", lazy_drop, 1);
4564 rb_define_method(rb_cLazy, "drop_while", lazy_drop_while, 0);
4565 rb_define_method(rb_cLazy, "lazy", lazy_lazy, 0);
4566 rb_define_method(rb_cLazy, "chunk", lazy_super, -1);
4567 rb_define_method(rb_cLazy, "slice_before", lazy_super, -1);
4568 rb_define_method(rb_cLazy, "slice_after", lazy_super, -1);
4569 rb_define_method(rb_cLazy, "slice_when", lazy_super, -1);
4570 rb_define_method(rb_cLazy, "chunk_while", lazy_super, -1);
4571 rb_define_method(rb_cLazy, "uniq", lazy_uniq, 0);
4572 rb_define_method(rb_cLazy, "compact", lazy_compact, 0);
4573 rb_define_method(rb_cLazy, "with_index", lazy_with_index, -1);
4574
4575 lazy_use_super_method = rb_hash_new_with_size(18);
4576 rb_hash_aset(lazy_use_super_method, sym("map"), sym("_enumerable_map"));
4577 rb_hash_aset(lazy_use_super_method, sym("collect"), sym("_enumerable_collect"));
4578 rb_hash_aset(lazy_use_super_method, sym("flat_map"), sym("_enumerable_flat_map"));
4579 rb_hash_aset(lazy_use_super_method, sym("collect_concat"), sym("_enumerable_collect_concat"));
4580 rb_hash_aset(lazy_use_super_method, sym("select"), sym("_enumerable_select"));
4581 rb_hash_aset(lazy_use_super_method, sym("find_all"), sym("_enumerable_find_all"));
4582 rb_hash_aset(lazy_use_super_method, sym("filter"), sym("_enumerable_filter"));
4583 rb_hash_aset(lazy_use_super_method, sym("filter_map"), sym("_enumerable_filter_map"));
4584 rb_hash_aset(lazy_use_super_method, sym("reject"), sym("_enumerable_reject"));
4585 rb_hash_aset(lazy_use_super_method, sym("grep"), sym("_enumerable_grep"));
4586 rb_hash_aset(lazy_use_super_method, sym("grep_v"), sym("_enumerable_grep_v"));
4587 rb_hash_aset(lazy_use_super_method, sym("zip"), sym("_enumerable_zip"));
4588 rb_hash_aset(lazy_use_super_method, sym("take"), sym("_enumerable_take"));
4589 rb_hash_aset(lazy_use_super_method, sym("take_while"), sym("_enumerable_take_while"));
4590 rb_hash_aset(lazy_use_super_method, sym("drop"), sym("_enumerable_drop"));
4591 rb_hash_aset(lazy_use_super_method, sym("drop_while"), sym("_enumerable_drop_while"));
4592 rb_hash_aset(lazy_use_super_method, sym("uniq"), sym("_enumerable_uniq"));
4593 rb_hash_aset(lazy_use_super_method, sym("with_index"), sym("_enumerable_with_index"));
4594 rb_obj_freeze(lazy_use_super_method);
4595 rb_gc_register_mark_object(lazy_use_super_method);
4596
4597#if 0 /* for RDoc */
4598 rb_define_method(rb_cLazy, "to_a", lazy_to_a, 0);
4599 rb_define_method(rb_cLazy, "chunk", lazy_chunk, 0);
4600 rb_define_method(rb_cLazy, "chunk_while", lazy_chunk_while, 0);
4601 rb_define_method(rb_cLazy, "slice_after", lazy_slice_after, 0);
4602 rb_define_method(rb_cLazy, "slice_before", lazy_slice_before, 0);
4603 rb_define_method(rb_cLazy, "slice_when", lazy_slice_when, 0);
4604#endif
4605 rb_define_alias(rb_cLazy, "force", "to_a");
4606
4608 rb_define_method(rb_eStopIteration, "result", stop_result, 0);
4609
4610 /* Generator */
4611 rb_cGenerator = rb_define_class_under(rb_cEnumerator, "Generator", rb_cObject);
4612 rb_include_module(rb_cGenerator, rb_mEnumerable);
4613 rb_define_alloc_func(rb_cGenerator, generator_allocate);
4614 rb_define_method(rb_cGenerator, "initialize", generator_initialize, -1);
4615 rb_define_method(rb_cGenerator, "initialize_copy", generator_init_copy, 1);
4616 rb_define_method(rb_cGenerator, "each", generator_each, -1);
4617
4618 /* Yielder */
4619 rb_cYielder = rb_define_class_under(rb_cEnumerator, "Yielder", rb_cObject);
4620 rb_define_alloc_func(rb_cYielder, yielder_allocate);
4621 rb_define_method(rb_cYielder, "initialize", yielder_initialize, 0);
4622 rb_define_method(rb_cYielder, "yield", yielder_yield, -2);
4623 rb_define_method(rb_cYielder, "<<", yielder_yield_push, 1);
4624 rb_define_method(rb_cYielder, "to_proc", yielder_to_proc, 0);
4625
4626 /* Producer */
4627 rb_cEnumProducer = rb_define_class_under(rb_cEnumerator, "Producer", rb_cObject);
4628 rb_define_alloc_func(rb_cEnumProducer, producer_allocate);
4629 rb_define_method(rb_cEnumProducer, "each", producer_each, 0);
4630 rb_define_singleton_method(rb_cEnumerator, "produce", enumerator_s_produce, -1);
4631
4632 /* Chain */
4633 rb_cEnumChain = rb_define_class_under(rb_cEnumerator, "Chain", rb_cEnumerator);
4634 rb_define_alloc_func(rb_cEnumChain, enum_chain_allocate);
4635 rb_define_method(rb_cEnumChain, "initialize", enum_chain_initialize, -2);
4636 rb_define_method(rb_cEnumChain, "initialize_copy", enum_chain_init_copy, 1);
4637 rb_define_method(rb_cEnumChain, "each", enum_chain_each, -1);
4638 rb_define_method(rb_cEnumChain, "size", enum_chain_size, 0);
4639 rb_define_method(rb_cEnumChain, "rewind", enum_chain_rewind, 0);
4640 rb_define_method(rb_cEnumChain, "inspect", enum_chain_inspect, 0);
4641 rb_undef_method(rb_cEnumChain, "feed");
4642 rb_undef_method(rb_cEnumChain, "next");
4643 rb_undef_method(rb_cEnumChain, "next_values");
4644 rb_undef_method(rb_cEnumChain, "peek");
4645 rb_undef_method(rb_cEnumChain, "peek_values");
4646
4647 /* Product */
4648 rb_cEnumProduct = rb_define_class_under(rb_cEnumerator, "Product", rb_cEnumerator);
4649 rb_define_alloc_func(rb_cEnumProduct, enum_product_allocate);
4650 rb_define_method(rb_cEnumProduct, "initialize", enum_product_initialize, -1);
4651 rb_define_method(rb_cEnumProduct, "initialize_copy", enum_product_init_copy, 1);
4652 rb_define_method(rb_cEnumProduct, "each", enum_product_each, 0);
4653 rb_define_method(rb_cEnumProduct, "size", enum_product_size, 0);
4654 rb_define_method(rb_cEnumProduct, "rewind", enum_product_rewind, 0);
4655 rb_define_method(rb_cEnumProduct, "inspect", enum_product_inspect, 0);
4656 rb_undef_method(rb_cEnumProduct, "feed");
4657 rb_undef_method(rb_cEnumProduct, "next");
4658 rb_undef_method(rb_cEnumProduct, "next_values");
4659 rb_undef_method(rb_cEnumProduct, "peek");
4660 rb_undef_method(rb_cEnumProduct, "peek_values");
4661 rb_define_singleton_method(rb_cEnumerator, "product", enumerator_s_product, -1);
4662
4663 /* ArithmeticSequence */
4664 rb_cArithSeq = rb_define_class_under(rb_cEnumerator, "ArithmeticSequence", rb_cEnumerator);
4665 rb_undef_alloc_func(rb_cArithSeq);
4666 rb_undef_method(CLASS_OF(rb_cArithSeq), "new");
4667 rb_define_method(rb_cArithSeq, "begin", arith_seq_begin, 0);
4668 rb_define_method(rb_cArithSeq, "end", arith_seq_end, 0);
4669 rb_define_method(rb_cArithSeq, "exclude_end?", arith_seq_exclude_end, 0);
4670 rb_define_method(rb_cArithSeq, "step", arith_seq_step, 0);
4671 rb_define_method(rb_cArithSeq, "first", arith_seq_first, -1);
4672 rb_define_method(rb_cArithSeq, "last", arith_seq_last, -1);
4673 rb_define_method(rb_cArithSeq, "inspect", arith_seq_inspect, 0);
4674 rb_define_method(rb_cArithSeq, "==", arith_seq_eq, 1);
4675 rb_define_method(rb_cArithSeq, "===", arith_seq_eq, 1);
4676 rb_define_method(rb_cArithSeq, "eql?", arith_seq_eq, 1);
4677 rb_define_method(rb_cArithSeq, "hash", arith_seq_hash, 0);
4678 rb_define_method(rb_cArithSeq, "each", arith_seq_each, 0);
4679 rb_define_method(rb_cArithSeq, "size", arith_seq_size, 0);
4680
4681 rb_provide("enumerator.so"); /* for backward compatibility */
4682}
4683#undef sym
4684
4685void
4686Init_Enumerator(void)
4687{
4688 id_rewind = rb_intern_const("rewind");
4689 id_new = rb_intern_const("new");
4690 id_next = rb_intern_const("next");
4691 id_result = rb_intern_const("result");
4692 id_receiver = rb_intern_const("receiver");
4693 id_arguments = rb_intern_const("arguments");
4694 id_memo = rb_intern_const("memo");
4695 id_method = rb_intern_const("method");
4696 id_force = rb_intern_const("force");
4697 id_to_enum = rb_intern_const("to_enum");
4698 id_each_entry = rb_intern_const("each_entry");
4699 id_begin = rb_intern_const("begin");
4700 id_end = rb_intern_const("end");
4701 id_step = rb_intern_const("step");
4702 id_exclude_end = rb_intern_const("exclude_end");
4703 sym_each = ID2SYM(id_each);
4704 sym_cycle = ID2SYM(rb_intern_const("cycle"));
4705 sym_yield = ID2SYM(rb_intern_const("yield"));
4706
4707 InitVM(Enumerator);
4708}
#define rb_define_method(klass, mid, func, arity)
Defines klass#mid.
#define rb_define_singleton_method(klass, mid, func, arity)
Defines klass.mid.
#define rb_define_private_method(klass, mid, func, arity)
Defines klass#mid and makes it private.
void rb_include_module(VALUE klass, VALUE module)
Includes a module to a class.
Definition class.c:1125
VALUE rb_define_class(const char *name, VALUE super)
Defines a top-level class.
Definition class.c:923
VALUE rb_define_class_under(VALUE outer, const char *name, VALUE super)
Defines a class under the namespace of outer.
Definition class.c:955
void rb_define_alias(VALUE klass, const char *name1, const char *name2)
Defines an alias of a method.
Definition class.c:2284
void rb_need_block(void)
Declares that the current method needs a block.
Definition eval.c:889
void rb_undef_method(VALUE klass, const char *name)
Defines an undef of a method.
Definition class.c:2108
int rb_scan_args(int argc, const VALUE *argv, const char *fmt,...)
Retrieves argument from argc and argv to given VALUE references according to the format string.
Definition class.c:2574
int rb_keyword_given_p(void)
Determines if the current method is given a keyword argument.
Definition eval.c:881
int rb_block_given_p(void)
Determines if the current method is given a block.
Definition eval.c:868
#define T_COMPLEX
Old name of RUBY_T_COMPLEX.
Definition value_type.h:59
#define RB_INTEGER_TYPE_P
Old name of rb_integer_type_p.
Definition value_type.h:87
#define rb_str_buf_cat2
Old name of rb_usascii_str_new_cstr.
Definition string.h:1682
#define OBJ_INIT_COPY(obj, orig)
Old name of RB_OBJ_INIT_COPY.
Definition object.h:41
#define RFLOAT_VALUE
Old name of rb_float_value.
Definition double.h:28
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition long.h:48
#define T_FLOAT
Old name of RUBY_T_FLOAT.
Definition value_type.h:64
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
Definition assume.h:29
#define CLASS_OF
Old name of rb_class_of.
Definition globals.h:203
#define rb_ary_new4
Old name of rb_ary_new_from_values.
Definition array.h:653
#define FIXABLE
Old name of RB_FIXABLE.
Definition fixnum.h:25
#define rb_exc_new2
Old name of rb_exc_new_cstr.
Definition error.h:37
#define LONG2FIX
Old name of RB_INT2FIX.
Definition long.h:49
#define T_RATIONAL
Old name of RUBY_T_RATIONAL.
Definition value_type.h:76
#define T_HASH
Old name of RUBY_T_HASH.
Definition value_type.h:65
#define NUM2DBL
Old name of rb_num2dbl.
Definition double.h:27
#define rb_ary_new3
Old name of rb_ary_new_from_args.
Definition array.h:652
#define LONG2NUM
Old name of RB_LONG2NUM.
Definition long.h:50
#define Qtrue
Old name of RUBY_Qtrue.
#define ST2FIX
Old name of RB_ST2FIX.
Definition st_data_t.h:33
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
Definition long.h:46
#define T_ARRAY
Old name of RUBY_T_ARRAY.
Definition value_type.h:56
#define NIL_P
Old name of RB_NIL_P.
#define ALLOCV_N
Old name of RB_ALLOCV_N.
Definition memory.h:399
#define POSFIXABLE
Old name of RB_POSFIXABLE.
Definition fixnum.h:29
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
Definition value_type.h:80
#define DBL2NUM
Old name of rb_float_new.
Definition double.h:29
#define NUM2LONG
Old name of RB_NUM2LONG.
Definition long.h:51
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define rb_ary_new2
Old name of rb_ary_new_capa.
Definition array.h:651
#define ALLOCV_END
Old name of RB_ALLOCV_END.
Definition memory.h:400
#define SYMBOL_P
Old name of RB_SYMBOL_P.
Definition value_type.h:88
void rb_raise(VALUE exc, const char *fmt,...)
Exception entry point.
Definition error.c:3150
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
Definition eval.c:688
void rb_iter_break(void)
Breaks from a block.
Definition vm.c:1901
VALUE rb_eRangeError
RangeError exception.
Definition error.c:1095
VALUE rb_eTypeError
TypeError exception.
Definition error.c:1091
VALUE rb_eRuntimeError
RuntimeError exception.
Definition error.c:1089
VALUE rb_eStopIteration
StopIteration exception.
Definition enumerator.c:176
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports always regardless of runtime -W flag.
Definition error.c:411
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
Definition error.c:1142
VALUE rb_eArgError
ArgumentError exception.
Definition error.c:1092
VALUE rb_eIndexError
IndexError exception.
Definition error.c:1093
VALUE rb_mKernel
Kernel module.
Definition object.c:51
VALUE rb_mEnumerable
Enumerable module.
Definition enum.c:27
VALUE rb_cEnumerator
Enumerator class.
Definition enumerator.c:158
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
Definition object.c:84
VALUE rb_cNumeric
Numeric class.
Definition numeric.c:190
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
Definition object.c:190
VALUE rb_obj_dup(VALUE obj)
Duplicates the given object.
Definition object.c:487
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
Definition object.c:600
VALUE rb_cRange
Range class.
Definition range.c:31
VALUE rb_equal(VALUE lhs, VALUE rhs)
This function is an optimised version of calling #==.
Definition object.c:122
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
Definition object.c:787
VALUE rb_obj_freeze(VALUE obj)
Just calls rb_obj_freeze_inline() inside.
Definition object.c:1182
VALUE rb_to_int(VALUE val)
Identical to rb_check_to_int(), except it raises in case of conversion mismatch.
Definition object.c:3026
VALUE rb_funcall(VALUE recv, ID mid, int n,...)
Calls a method.
Definition vm_eval.c:1102
VALUE rb_funcall_with_block(VALUE recv, ID mid, int argc, const VALUE *argv, VALUE procval)
Identical to rb_funcallv_public(), except you can pass a block.
Definition vm_eval.c:1179
#define rb_funcall2
Definition eval.h:205
VALUE rb_call_super(int argc, const VALUE *argv)
This resembles ruby's super.
Definition vm_eval.c:339
#define RETURN_SIZED_ENUMERATOR(obj, argc, argv, size_fn)
This roughly resembles return enum_for(__callee__) unless block_given?.
Definition enumerator.h:206
VALUE rb_enumerator_size_func(VALUE recv, VALUE argv, VALUE eobj)
This is the type of functions that rb_enumeratorize_with_size() expects.
Definition enumerator.h:45
#define rb_check_frozen
Just another name of rb_check_frozen.
Definition error.h:264
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
Definition error.h:280
void rb_provide(const char *feature)
Declares that the given feature is already provided by someone else.
Definition load.c:685
VALUE rb_num_coerce_cmp(VALUE lhs, VALUE rhs, ID op)
Identical to rb_num_coerce_bin(), except for return values.
Definition numeric.c:478
VALUE rb_obj_method(VALUE recv, VALUE mid)
Creates a method object.
Definition proc.c:2075
VALUE rb_block_proc(void)
Constructs a Proc object from implicitly passed components.
Definition proc.c:848
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
Definition proc.c:1027
VALUE rb_proc_call_kw(VALUE recv, VALUE args, int kw_splat)
Identical to rb_proc_call(), except you can specify how to handle the last element of the given array...
Definition proc.c:988
VALUE rb_obj_is_proc(VALUE recv)
Queries if the given object is a proc.
Definition proc.c:175
int rb_range_values(VALUE range, VALUE *begp, VALUE *endp, int *exclp)
Deconstructs a range into its components.
Definition range.c:1490
#define rb_hash_uint(h, i)
Just another name of st_hash_uint.
Definition string.h:942
#define rb_hash_end(h)
Just another name of st_hash_end.
Definition string.h:945
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
Definition string.c:3353
VALUE rb_str_dup(VALUE str)
Duplicates a string.
Definition string.c:1834
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
Definition string.c:3319
void rb_str_set_len(VALUE str, long len)
Overwrites the length of the string.
Definition string.c:3020
st_index_t rb_hash_start(st_index_t i)
Starts a series of hashing.
Definition random.c:1735
VALUE rb_exec_recursive(VALUE(*f)(VALUE g, VALUE h, int r), VALUE g, VALUE h)
"Recursion" API entry point.
VALUE rb_attr_get(VALUE obj, ID name)
Identical to rb_ivar_get()
Definition variable.c:1226
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
Definition variable.c:1606
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
Definition variable.c:1218
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
Definition variable.c:188
int rb_respond_to(VALUE obj, ID mid)
Queries if the object responds to the method.
Definition vm_method.c:2805
void rb_undef_alloc_func(VALUE klass)
Deletes the allocator function of a class.
Definition vm_method.c:1142
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
Definition vm_eval.c:664
VALUE rb_check_funcall_kw(VALUE recv, ID mid, int argc, const VALUE *argv, int kw_splat)
Identical to rb_check_funcall(), except you can specify how to handle the last element of the given a...
Definition vm_eval.c:658
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
static ID rb_intern_const(const char *str)
This is a "tiny optimisation" over rb_intern().
Definition symbol.h:276
ID rb_intern(const char *name)
Finds or creates a symbol of the given name.
Definition symbol.c:796
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
Definition symbol.c:943
ID rb_to_id(VALUE str)
Identical to rb_intern(), except it takes an instance of rb_cString.
Definition string.c:11912
VALUE rb_sprintf(const char *fmt,...)
Ruby's extended sprintf(3).
Definition sprintf.c:1219
VALUE rb_str_catf(VALUE dst, const char *fmt,...)
Identical to rb_sprintf(), except it renders the output to the specified object rather than creating ...
Definition sprintf.c:1242
#define RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg)
Shim for block function parameters.
Definition iterator.h:58
VALUE rb_yield_values(int n,...)
Identical to rb_yield(), except it takes variadic number of parameters and pass them to the block.
Definition vm_eval.c:1369
VALUE rb_yield_values2(int n, const VALUE *argv)
Identical to rb_yield_values(), except it takes the parameters as a C array instead of variadic argum...
Definition vm_eval.c:1391
VALUE rb_yield(VALUE val)
Yields the block.
Definition vm_eval.c:1357
VALUE rb_yield_values_kw(int n, const VALUE *argv, int kw_splat)
Identical to rb_yield_values2(), except you can specify how to handle the last element of the given a...
Definition vm_eval.c:1397
VALUE rb_block_call_func(RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg))
This is the type of a function that the interpreter expect for C-backended blocks.
Definition iterator.h:83
VALUE rb_block_call_kw(VALUE obj, ID mid, int argc, const VALUE *argv, rb_block_call_func_t proc, VALUE data2, int kw_splat)
Identical to rb_funcallv_kw(), except it additionally passes a function as a block.
Definition vm_eval.c:1602
#define rb_long2int
Just another name of rb_long2int_inline.
Definition long.h:62
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
Definition memory.h:366
#define ALLOCA_N(type, n)
Definition memory.h:286
VALUE rb_block_call(VALUE q, ID w, int e, const VALUE *r, type *t, VALUE y)
Call a method with a block.
VALUE rb_proc_new(type *q, VALUE w)
Creates a rb_cProc instance.
VALUE rb_fiber_new(type *q, VALUE w)
Creates a rb_cFiber instance.
void rb_hash_foreach(VALUE q, int_type *w, VALUE e)
Iteration over the given hash.
VALUE rb_rescue2(type *q, VALUE w, type *e, VALUE r,...)
An equivalent of rescue clause.
#define RARRAY_LEN
Just another name of rb_array_len.
Definition rarray.h:68
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
Definition rarray.h:343
#define RARRAY_AREF(a, i)
Definition rarray.h:583
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
Definition rarray.h:69
#define DATA_PTR(obj)
Convenient getter macro.
Definition rdata.h:71
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
Definition rhash.h:92
static long RSTRING_LEN(VALUE str)
Queries the length of the string.
Definition rstring.h:484
#define TypedData_Get_Struct(obj, type, data_type, sval)
Obtains a C struct from inside of a wrapper Ruby object.
Definition rtypeddata.h:507
#define TypedData_Make_Struct(klass, type, data_type, sval)
Identical to TypedData_Wrap_Struct, except it allocates a new data region internally instead of takin...
Definition rtypeddata.h:489
#define InitVM(ext)
This macro is for internal use.
Definition ruby.h:230
#define RB_PASS_CALLED_KEYWORDS
Pass keywords if current method is called with keywords, useful for argument delegation.
Definition scan_args.h:78
#define RB_NO_KEYWORDS
Do not pass keywords.
Definition scan_args.h:69
#define RTEST
This is an old name of RB_TEST.
#define _(args)
This was a transition path from K&R to ANSI.
Definition stdarg.h:35
MEMO.
Definition imemo.h:104
Definition enumerator.c:218
Decomposed Enumerator::ArithmeicSequence.
Definition enumerator.h:53
int exclude_end
Whether the endpoint is open or closed.
Definition enumerator.h:57
VALUE end
"Right" or "highest" endpoint of the sequence.
Definition enumerator.h:55
VALUE step
Step between a sequence.
Definition enumerator.h:56
VALUE begin
"Left" or "lowest" endpoint of the sequence.
Definition enumerator.h:54
This is the struct that holds necessary info for a struct.
Definition rtypeddata.h:190
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
Definition value_type.h:263
static void Check_Type(VALUE v, enum ruby_value_type t)
Identical to RB_TYPE_P(), except it raises exceptions on predication failure.
Definition value_type.h:432
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
Definition value_type.h:375