1 /************************************************
2
3 enumerator.c - provides Enumerator class
4
5 $Author: usa $
6
7 Copyright (C) 2001-2003 Akinori MUSHA
8
9 $Idaemons: /home/cvs/rb/enumerator/enumerator.c,v 1.1.1.1 2001/07/15 10:12:48 knu Exp $
10 $RoughId: enumerator.c,v 1.6 2003/07/27 11:03:24 nobu Exp $
11 $Id: enumerator.c 67936 2021-04-05 01:22:11Z usa $
12
13 ************************************************/
14
15 #include "ruby/ruby.h"
16 #include "internal.h"
17 #include "id.h"
18
19 #ifdef HAVE_FLOAT_H
20 #include <float.h>
21 #endif
22
23 /*
24 * Document-class: Enumerator
25 *
26 * A class which allows both internal and external iteration.
27 *
28 * An Enumerator can be created by the following methods.
29 * - Kernel#to_enum
30 * - Kernel#enum_for
31 * - Enumerator.new
32 *
33 * Most methods have two forms: a block form where the contents
34 * are evaluated for each item in the enumeration, and a non-block form
35 * which returns a new Enumerator wrapping the iteration.
36 *
37 * enumerator = %w(one two three).each
38 * puts enumerator.class # => Enumerator
39 *
40 * enumerator.each_with_object("foo") do |item, obj|
41 * puts "#{obj}: #{item}"
42 * end
43 *
44 * # foo: one
45 * # foo: two
46 * # foo: three
47 *
48 * enum_with_obj = enumerator.each_with_object("foo")
49 * puts enum_with_obj.class # => Enumerator
50 *
51 * enum_with_obj.each do |item, obj|
52 * puts "#{obj}: #{item}"
53 * end
54 *
55 * # foo: one
56 * # foo: two
57 * # foo: three
58 *
59 * This allows you to chain Enumerators together. For example, you
60 * can map a list's elements to strings containing the index
61 * and the element as a string via:
62 *
63 * puts %w[foo bar baz].map.with_index { |w, i| "#{i}:#{w}" }
64 * # => ["0:foo", "1:bar", "2:baz"]
65 *
66 * An Enumerator can also be used as an external iterator.
67 * For example, Enumerator#next returns the next value of the iterator
68 * or raises StopIteration if the Enumerator is at the end.
69 *
70 * e = [1,2,3].each # returns an enumerator object.
71 * puts e.next # => 1
72 * puts e.next # => 2
73 * puts e.next # => 3
74 * puts e.next # raises StopIteration
75 *
76 * You can use this to implement an internal iterator as follows:
77 *
78 * def ext_each(e)
79 * while true
80 * begin
81 * vs = e.next_values
82 * rescue StopIteration
83 * return $!.result
84 * end
85 * y = yield(*vs)
86 * e.feed y
87 * end
88 * end
89 *
90 * o = Object.new
91 *
92 * def o.each
93 * puts yield
94 * puts yield(1)
95 * puts yield(1, 2)
96 * 3
97 * end
98 *
99 * # use o.each as an internal iterator directly.
100 * puts o.each {|*x| puts x; [:b, *x] }
101 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
102 *
103 * # convert o.each to an external iterator for
104 * # implementing an internal iterator.
105 * puts ext_each(o.to_enum) {|*x| puts x; [:b, *x] }
106 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
107 *
108 */
109 VALUE rb_cEnumerator;
110 static VALUE rb_cLazy;
111 static ID id_rewind, id_new, id_to_enum;
112 static ID id_next, id_result, id_receiver, id_arguments, id_memo, id_method, id_force;
113 static ID id_begin, id_end, id_step, id_exclude_end;
114 static VALUE sym_each, sym_cycle;
115
116 #define id_call idCall
117 #define id_each idEach
118 #define id_eqq idEqq
119 #define id_initialize idInitialize
120 #define id_size idSize
121
122 VALUE rb_eStopIteration;
123
124 struct enumerator {
125 VALUE obj;
126 ID meth;
127 VALUE args;
128 VALUE fib;
129 VALUE dst;
130 VALUE lookahead;
131 VALUE feedvalue;
132 VALUE stop_exc;
133 VALUE size;
134 VALUE procs;
135 rb_enumerator_size_func *size_fn;
136 };
137
138 static VALUE rb_cGenerator, rb_cYielder;
139
140 struct generator {
141 VALUE proc;
142 VALUE obj;
143 };
144
145 struct yielder {
146 VALUE proc;
147 };
148
149 typedef struct MEMO *lazyenum_proc_func(VALUE, struct MEMO *, VALUE, long);
150 typedef VALUE lazyenum_size_func(VALUE, VALUE);
151 typedef struct {
152 lazyenum_proc_func *proc;
153 lazyenum_size_func *size;
154 } lazyenum_funcs;
155
156 struct proc_entry {
157 VALUE proc;
158 VALUE memo;
159 const lazyenum_funcs *fn;
160 };
161
162 static VALUE generator_allocate(VALUE klass);
163 static VALUE generator_init(VALUE obj, VALUE proc);
164
165 static VALUE rb_cEnumChain;
166
167 struct enum_chain {
168 VALUE enums;
169 long pos;
170 };
171
172 VALUE rb_cArithSeq;
173
174 /*
175 * Enumerator
176 */
177 static void
enumerator_mark(void * p)178 enumerator_mark(void *p)
179 {
180 struct enumerator *ptr = p;
181 rb_gc_mark(ptr->obj);
182 rb_gc_mark(ptr->args);
183 rb_gc_mark(ptr->fib);
184 rb_gc_mark(ptr->dst);
185 rb_gc_mark(ptr->lookahead);
186 rb_gc_mark(ptr->feedvalue);
187 rb_gc_mark(ptr->stop_exc);
188 rb_gc_mark(ptr->size);
189 rb_gc_mark(ptr->procs);
190 }
191
192 #define enumerator_free RUBY_TYPED_DEFAULT_FREE
193
194 static size_t
enumerator_memsize(const void * p)195 enumerator_memsize(const void *p)
196 {
197 return sizeof(struct enumerator);
198 }
199
200 static const rb_data_type_t enumerator_data_type = {
201 "enumerator",
202 {
203 enumerator_mark,
204 enumerator_free,
205 enumerator_memsize,
206 },
207 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
208 };
209
210 static struct enumerator *
enumerator_ptr(VALUE obj)211 enumerator_ptr(VALUE obj)
212 {
213 struct enumerator *ptr;
214
215 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr);
216 if (!ptr || ptr->obj == Qundef) {
217 rb_raise(rb_eArgError, "uninitialized enumerator");
218 }
219 return ptr;
220 }
221
222 static void
proc_entry_mark(void * p)223 proc_entry_mark(void *p)
224 {
225 struct proc_entry *ptr = p;
226 rb_gc_mark(ptr->proc);
227 rb_gc_mark(ptr->memo);
228 }
229
230 #define proc_entry_free RUBY_TYPED_DEFAULT_FREE
231
232 static size_t
proc_entry_memsize(const void * p)233 proc_entry_memsize(const void *p)
234 {
235 return p ? sizeof(struct proc_entry) : 0;
236 }
237
238 static const rb_data_type_t proc_entry_data_type = {
239 "proc_entry",
240 {
241 proc_entry_mark,
242 proc_entry_free,
243 proc_entry_memsize,
244 },
245 };
246
247 static struct proc_entry *
proc_entry_ptr(VALUE proc_entry)248 proc_entry_ptr(VALUE proc_entry)
249 {
250 struct proc_entry *ptr;
251
252 TypedData_Get_Struct(proc_entry, struct proc_entry, &proc_entry_data_type, ptr);
253
254 return ptr;
255 }
256
257 /*
258 * call-seq:
259 * obj.to_enum(method = :each, *args) -> enum
260 * obj.enum_for(method = :each, *args) -> enum
261 * obj.to_enum(method = :each, *args) {|*args| block} -> enum
262 * obj.enum_for(method = :each, *args){|*args| block} -> enum
263 *
264 * Creates a new Enumerator which will enumerate by calling +method+ on
265 * +obj+, passing +args+ if any.
266 *
267 * If a block is given, it will be used to calculate the size of
268 * the enumerator without the need to iterate it (see Enumerator#size).
269 *
270 * === Examples
271 *
272 * str = "xyz"
273 *
274 * enum = str.enum_for(:each_byte)
275 * enum.each { |b| puts b }
276 * # => 120
277 * # => 121
278 * # => 122
279 *
280 * # protect an array from being modified by some_method
281 * a = [1, 2, 3]
282 * some_method(a.to_enum)
283 *
284 * It is typical to call to_enum when defining methods for
285 * a generic Enumerable, in case no block is passed.
286 *
287 * Here is such an example, with parameter passing and a sizing block:
288 *
289 * module Enumerable
290 * # a generic method to repeat the values of any enumerable
291 * def repeat(n)
292 * raise ArgumentError, "#{n} is negative!" if n < 0
293 * unless block_given?
294 * return to_enum(__method__, n) do # __method__ is :repeat here
295 * sz = size # Call size and multiply by n...
296 * sz * n if sz # but return nil if size itself is nil
297 * end
298 * end
299 * each do |*val|
300 * n.times { yield *val }
301 * end
302 * end
303 * end
304 *
305 * %i[hello world].repeat(2) { |w| puts w }
306 * # => Prints 'hello', 'hello', 'world', 'world'
307 * enum = (1..14).repeat(3)
308 * # => returns an Enumerator when called without a block
309 * enum.first(4) # => [1, 1, 1, 2]
310 * enum.size # => 42
311 */
312 static VALUE
obj_to_enum(int argc,VALUE * argv,VALUE obj)313 obj_to_enum(int argc, VALUE *argv, VALUE obj)
314 {
315 VALUE enumerator, meth = sym_each;
316
317 if (argc > 0) {
318 --argc;
319 meth = *argv++;
320 }
321 enumerator = rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
322 if (rb_block_given_p()) {
323 enumerator_ptr(enumerator)->size = rb_block_proc();
324 }
325 return enumerator;
326 }
327
328 static VALUE
enumerator_allocate(VALUE klass)329 enumerator_allocate(VALUE klass)
330 {
331 struct enumerator *ptr;
332 VALUE enum_obj;
333
334 enum_obj = TypedData_Make_Struct(klass, struct enumerator, &enumerator_data_type, ptr);
335 ptr->obj = Qundef;
336
337 return enum_obj;
338 }
339
340 static VALUE
enumerator_init(VALUE enum_obj,VALUE obj,VALUE meth,int argc,const VALUE * argv,rb_enumerator_size_func * size_fn,VALUE size)341 enumerator_init(VALUE enum_obj, VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, VALUE size)
342 {
343 struct enumerator *ptr;
344
345 rb_check_frozen(enum_obj);
346 TypedData_Get_Struct(enum_obj, struct enumerator, &enumerator_data_type, ptr);
347
348 if (!ptr) {
349 rb_raise(rb_eArgError, "unallocated enumerator");
350 }
351
352 ptr->obj = obj;
353 ptr->meth = rb_to_id(meth);
354 if (argc) ptr->args = rb_ary_new4(argc, argv);
355 ptr->fib = 0;
356 ptr->dst = Qnil;
357 ptr->lookahead = Qundef;
358 ptr->feedvalue = Qundef;
359 ptr->stop_exc = Qfalse;
360 ptr->size = size;
361 ptr->size_fn = size_fn;
362
363 return enum_obj;
364 }
365
366 /*
367 * call-seq:
368 * Enumerator.new(size = nil) { |yielder| ... }
369 * Enumerator.new(obj, method = :each, *args)
370 *
371 * Creates a new Enumerator object, which can be used as an
372 * Enumerable.
373 *
374 * In the first form, iteration is defined by the given block, in
375 * which a "yielder" object, given as block parameter, can be used to
376 * yield a value by calling the +yield+ method (aliased as +<<+):
377 *
378 * fib = Enumerator.new do |y|
379 * a = b = 1
380 * loop do
381 * y << a
382 * a, b = b, a + b
383 * end
384 * end
385 *
386 * p fib.take(10) # => [1, 1, 2, 3, 5, 8, 13, 21, 34, 55]
387 *
388 * The optional parameter can be used to specify how to calculate the size
389 * in a lazy fashion (see Enumerator#size). It can either be a value or
390 * a callable object.
391 *
392 * In the second, deprecated, form, a generated Enumerator iterates over the
393 * given object using the given method with the given arguments passed.
394 *
395 * Use of this form is discouraged. Use Kernel#enum_for or Kernel#to_enum
396 * instead.
397 *
398 * e = Enumerator.new(ObjectSpace, :each_object)
399 * #-> ObjectSpace.enum_for(:each_object)
400 *
401 * e.select { |obj| obj.is_a?(Class) } #=> array of all classes
402 *
403 */
404 static VALUE
enumerator_initialize(int argc,VALUE * argv,VALUE obj)405 enumerator_initialize(int argc, VALUE *argv, VALUE obj)
406 {
407 VALUE recv, meth = sym_each;
408 VALUE size = Qnil;
409
410 if (rb_block_given_p()) {
411 rb_check_arity(argc, 0, 1);
412 recv = generator_init(generator_allocate(rb_cGenerator), rb_block_proc());
413 if (argc) {
414 if (NIL_P(argv[0]) || rb_respond_to(argv[0], id_call) ||
415 (RB_TYPE_P(argv[0], T_FLOAT) && RFLOAT_VALUE(argv[0]) == HUGE_VAL)) {
416 size = argv[0];
417 }
418 else {
419 size = rb_to_int(argv[0]);
420 }
421 argc = 0;
422 }
423 }
424 else {
425 rb_check_arity(argc, 1, UNLIMITED_ARGUMENTS);
426 rb_warn("Enumerator.new without a block is deprecated; use Object#to_enum");
427 recv = *argv++;
428 if (--argc) {
429 meth = *argv++;
430 --argc;
431 }
432 }
433
434 return enumerator_init(obj, recv, meth, argc, argv, 0, size);
435 }
436
437 /* :nodoc: */
438 static VALUE
enumerator_init_copy(VALUE obj,VALUE orig)439 enumerator_init_copy(VALUE obj, VALUE orig)
440 {
441 struct enumerator *ptr0, *ptr1;
442
443 if (!OBJ_INIT_COPY(obj, orig)) return obj;
444 ptr0 = enumerator_ptr(orig);
445 if (ptr0->fib) {
446 /* Fibers cannot be copied */
447 rb_raise(rb_eTypeError, "can't copy execution context");
448 }
449
450 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr1);
451
452 if (!ptr1) {
453 rb_raise(rb_eArgError, "unallocated enumerator");
454 }
455
456 ptr1->obj = ptr0->obj;
457 ptr1->meth = ptr0->meth;
458 ptr1->args = ptr0->args;
459 ptr1->fib = 0;
460 ptr1->lookahead = Qundef;
461 ptr1->feedvalue = Qundef;
462 ptr1->size = ptr0->size;
463 ptr1->size_fn = ptr0->size_fn;
464
465 return obj;
466 }
467
468 /*
469 * For backwards compatibility; use rb_enumeratorize_with_size
470 */
471 VALUE
rb_enumeratorize(VALUE obj,VALUE meth,int argc,const VALUE * argv)472 rb_enumeratorize(VALUE obj, VALUE meth, int argc, const VALUE *argv)
473 {
474 return rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
475 }
476
477 static VALUE
478 lazy_to_enum_i(VALUE self, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn);
479
480 VALUE
rb_enumeratorize_with_size(VALUE obj,VALUE meth,int argc,const VALUE * argv,rb_enumerator_size_func * size_fn)481 rb_enumeratorize_with_size(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn)
482 {
483 /* Similar effect as calling obj.to_enum, i.e. dispatching to either
484 Kernel#to_enum vs Lazy#to_enum */
485 if (RTEST(rb_obj_is_kind_of(obj, rb_cLazy)))
486 return lazy_to_enum_i(obj, meth, argc, argv, size_fn);
487 else
488 return enumerator_init(enumerator_allocate(rb_cEnumerator),
489 obj, meth, argc, argv, size_fn, Qnil);
490 }
491
492 static VALUE
enumerator_block_call(VALUE obj,rb_block_call_func * func,VALUE arg)493 enumerator_block_call(VALUE obj, rb_block_call_func *func, VALUE arg)
494 {
495 int argc = 0;
496 const VALUE *argv = 0;
497 const struct enumerator *e = enumerator_ptr(obj);
498 ID meth = e->meth;
499
500 if (e->args) {
501 argc = RARRAY_LENINT(e->args);
502 argv = RARRAY_CONST_PTR(e->args);
503 }
504 return rb_block_call(e->obj, meth, argc, argv, func, arg);
505 }
506
507 /*
508 * call-seq:
509 * enum.each { |elm| block } -> obj
510 * enum.each -> enum
511 * enum.each(*appending_args) { |elm| block } -> obj
512 * enum.each(*appending_args) -> an_enumerator
513 *
514 * Iterates over the block according to how this Enumerator was constructed.
515 * If no block and no arguments are given, returns self.
516 *
517 * === Examples
518 *
519 * "Hello, world!".scan(/\w+/) #=> ["Hello", "world"]
520 * "Hello, world!".to_enum(:scan, /\w+/).to_a #=> ["Hello", "world"]
521 * "Hello, world!".to_enum(:scan).each(/\w+/).to_a #=> ["Hello", "world"]
522 *
523 * obj = Object.new
524 *
525 * def obj.each_arg(a, b=:b, *rest)
526 * yield a
527 * yield b
528 * yield rest
529 * :method_returned
530 * end
531 *
532 * enum = obj.to_enum :each_arg, :a, :x
533 *
534 * enum.each.to_a #=> [:a, :x, []]
535 * enum.each.equal?(enum) #=> true
536 * enum.each { |elm| elm } #=> :method_returned
537 *
538 * enum.each(:y, :z).to_a #=> [:a, :x, [:y, :z]]
539 * enum.each(:y, :z).equal?(enum) #=> false
540 * enum.each(:y, :z) { |elm| elm } #=> :method_returned
541 *
542 */
543 static VALUE
enumerator_each(int argc,VALUE * argv,VALUE obj)544 enumerator_each(int argc, VALUE *argv, VALUE obj)
545 {
546 if (argc > 0) {
547 struct enumerator *e = enumerator_ptr(obj = rb_obj_dup(obj));
548 VALUE args = e->args;
549 if (args) {
550 #if SIZEOF_INT < SIZEOF_LONG
551 /* check int range overflow */
552 rb_long2int(RARRAY_LEN(args) + argc);
553 #endif
554 args = rb_ary_dup(args);
555 rb_ary_cat(args, argv, argc);
556 }
557 else {
558 args = rb_ary_new4(argc, argv);
559 }
560 e->args = args;
561 e->size = Qnil;
562 e->size_fn = 0;
563 }
564 if (!rb_block_given_p()) return obj;
565 return enumerator_block_call(obj, 0, obj);
566 }
567
568 static VALUE
enumerator_with_index_i(RB_BLOCK_CALL_FUNC_ARGLIST (val,m))569 enumerator_with_index_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
570 {
571 struct MEMO *memo = (struct MEMO *)m;
572 VALUE idx = memo->v1;
573 MEMO_V1_SET(memo, rb_int_succ(idx));
574
575 if (argc <= 1)
576 return rb_yield_values(2, val, idx);
577
578 return rb_yield_values(2, rb_ary_new4(argc, argv), idx);
579 }
580
581 static VALUE
582 enumerator_size(VALUE obj);
583
584 static VALUE
enumerator_enum_size(VALUE obj,VALUE args,VALUE eobj)585 enumerator_enum_size(VALUE obj, VALUE args, VALUE eobj)
586 {
587 return enumerator_size(obj);
588 }
589
590 /*
591 * call-seq:
592 * e.with_index(offset = 0) {|(*args), idx| ... }
593 * e.with_index(offset = 0)
594 *
595 * Iterates the given block for each element with an index, which
596 * starts from +offset+. If no block is given, returns a new Enumerator
597 * that includes the index, starting from +offset+
598 *
599 * +offset+:: the starting index to use
600 *
601 */
602 static VALUE
enumerator_with_index(int argc,VALUE * argv,VALUE obj)603 enumerator_with_index(int argc, VALUE *argv, VALUE obj)
604 {
605 VALUE memo;
606
607 rb_check_arity(argc, 0, 1);
608 RETURN_SIZED_ENUMERATOR(obj, argc, argv, enumerator_enum_size);
609 memo = (!argc || NIL_P(memo = argv[0])) ? INT2FIX(0) : rb_to_int(memo);
610 return enumerator_block_call(obj, enumerator_with_index_i, (VALUE)MEMO_NEW(memo, 0, 0));
611 }
612
613 /*
614 * call-seq:
615 * e.each_with_index {|(*args), idx| ... }
616 * e.each_with_index
617 *
618 * Same as Enumerator#with_index(0), i.e. there is no starting offset.
619 *
620 * If no block is given, a new Enumerator is returned that includes the index.
621 *
622 */
623 static VALUE
enumerator_each_with_index(VALUE obj)624 enumerator_each_with_index(VALUE obj)
625 {
626 return enumerator_with_index(0, NULL, obj);
627 }
628
629 static VALUE
enumerator_with_object_i(RB_BLOCK_CALL_FUNC_ARGLIST (val,memo))630 enumerator_with_object_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, memo))
631 {
632 if (argc <= 1)
633 return rb_yield_values(2, val, memo);
634
635 return rb_yield_values(2, rb_ary_new4(argc, argv), memo);
636 }
637
638 /*
639 * call-seq:
640 * e.each_with_object(obj) {|(*args), obj| ... }
641 * e.each_with_object(obj)
642 * e.with_object(obj) {|(*args), obj| ... }
643 * e.with_object(obj)
644 *
645 * Iterates the given block for each element with an arbitrary object, +obj+,
646 * and returns +obj+
647 *
648 * If no block is given, returns a new Enumerator.
649 *
650 * === Example
651 *
652 * to_three = Enumerator.new do |y|
653 * 3.times do |x|
654 * y << x
655 * end
656 * end
657 *
658 * to_three_with_string = to_three.with_object("foo")
659 * to_three_with_string.each do |x,string|
660 * puts "#{string}: #{x}"
661 * end
662 *
663 * # => foo:0
664 * # => foo:1
665 * # => foo:2
666 */
667 static VALUE
enumerator_with_object(VALUE obj,VALUE memo)668 enumerator_with_object(VALUE obj, VALUE memo)
669 {
670 RETURN_SIZED_ENUMERATOR(obj, 1, &memo, enumerator_enum_size);
671 enumerator_block_call(obj, enumerator_with_object_i, memo);
672
673 return memo;
674 }
675
676 static VALUE
next_ii(RB_BLOCK_CALL_FUNC_ARGLIST (i,obj))677 next_ii(RB_BLOCK_CALL_FUNC_ARGLIST(i, obj))
678 {
679 struct enumerator *e = enumerator_ptr(obj);
680 VALUE feedvalue = Qnil;
681 VALUE args = rb_ary_new4(argc, argv);
682 rb_fiber_yield(1, &args);
683 if (e->feedvalue != Qundef) {
684 feedvalue = e->feedvalue;
685 e->feedvalue = Qundef;
686 }
687 return feedvalue;
688 }
689
690 static VALUE
next_i(VALUE curr,VALUE obj)691 next_i(VALUE curr, VALUE obj)
692 {
693 struct enumerator *e = enumerator_ptr(obj);
694 VALUE nil = Qnil;
695 VALUE result;
696
697 result = rb_block_call(obj, id_each, 0, 0, next_ii, obj);
698 e->stop_exc = rb_exc_new2(rb_eStopIteration, "iteration reached an end");
699 rb_ivar_set(e->stop_exc, id_result, result);
700 return rb_fiber_yield(1, &nil);
701 }
702
703 static void
next_init(VALUE obj,struct enumerator * e)704 next_init(VALUE obj, struct enumerator *e)
705 {
706 VALUE curr = rb_fiber_current();
707 e->dst = curr;
708 e->fib = rb_fiber_new(next_i, obj);
709 e->lookahead = Qundef;
710 }
711
712 static VALUE
get_next_values(VALUE obj,struct enumerator * e)713 get_next_values(VALUE obj, struct enumerator *e)
714 {
715 VALUE curr, vs;
716
717 if (e->stop_exc)
718 rb_exc_raise(e->stop_exc);
719
720 curr = rb_fiber_current();
721
722 if (!e->fib || !rb_fiber_alive_p(e->fib)) {
723 next_init(obj, e);
724 }
725
726 vs = rb_fiber_resume(e->fib, 1, &curr);
727 if (e->stop_exc) {
728 e->fib = 0;
729 e->dst = Qnil;
730 e->lookahead = Qundef;
731 e->feedvalue = Qundef;
732 rb_exc_raise(e->stop_exc);
733 }
734 return vs;
735 }
736
737 /*
738 * call-seq:
739 * e.next_values -> array
740 *
741 * Returns the next object as an array in the enumerator, and move the
742 * internal position forward. When the position reached at the end,
743 * StopIteration is raised.
744 *
745 * This method can be used to distinguish <code>yield</code> and <code>yield
746 * nil</code>.
747 *
748 * === Example
749 *
750 * o = Object.new
751 * def o.each
752 * yield
753 * yield 1
754 * yield 1, 2
755 * yield nil
756 * yield [1, 2]
757 * end
758 * e = o.to_enum
759 * p e.next_values
760 * p e.next_values
761 * p e.next_values
762 * p e.next_values
763 * p e.next_values
764 * e = o.to_enum
765 * p e.next
766 * p e.next
767 * p e.next
768 * p e.next
769 * p e.next
770 *
771 * ## yield args next_values next
772 * # yield [] nil
773 * # yield 1 [1] 1
774 * # yield 1, 2 [1, 2] [1, 2]
775 * # yield nil [nil] nil
776 * # yield [1, 2] [[1, 2]] [1, 2]
777 *
778 * Note that +next_values+ does not affect other non-external enumeration
779 * methods unless underlying iteration method itself has side-effect, e.g.
780 * IO#each_line.
781 *
782 */
783
784 static VALUE
enumerator_next_values(VALUE obj)785 enumerator_next_values(VALUE obj)
786 {
787 struct enumerator *e = enumerator_ptr(obj);
788 VALUE vs;
789
790 if (e->lookahead != Qundef) {
791 vs = e->lookahead;
792 e->lookahead = Qundef;
793 return vs;
794 }
795
796 return get_next_values(obj, e);
797 }
798
799 static VALUE
ary2sv(VALUE args,int dup)800 ary2sv(VALUE args, int dup)
801 {
802 if (!RB_TYPE_P(args, T_ARRAY))
803 return args;
804
805 switch (RARRAY_LEN(args)) {
806 case 0:
807 return Qnil;
808
809 case 1:
810 return RARRAY_AREF(args, 0);
811
812 default:
813 if (dup)
814 return rb_ary_dup(args);
815 return args;
816 }
817 }
818
819 /*
820 * call-seq:
821 * e.next -> object
822 *
823 * Returns the next object in the enumerator, and move the internal position
824 * forward. When the position reached at the end, StopIteration is raised.
825 *
826 * === Example
827 *
828 * a = [1,2,3]
829 * e = a.to_enum
830 * p e.next #=> 1
831 * p e.next #=> 2
832 * p e.next #=> 3
833 * p e.next #raises StopIteration
834 *
835 * Note that enumeration sequence by +next+ does not affect other non-external
836 * enumeration methods, unless the underlying iteration methods itself has
837 * side-effect, e.g. IO#each_line.
838 *
839 */
840
841 static VALUE
enumerator_next(VALUE obj)842 enumerator_next(VALUE obj)
843 {
844 VALUE vs = enumerator_next_values(obj);
845 return ary2sv(vs, 0);
846 }
847
848 static VALUE
enumerator_peek_values(VALUE obj)849 enumerator_peek_values(VALUE obj)
850 {
851 struct enumerator *e = enumerator_ptr(obj);
852
853 if (e->lookahead == Qundef) {
854 e->lookahead = get_next_values(obj, e);
855 }
856 return e->lookahead;
857 }
858
859 /*
860 * call-seq:
861 * e.peek_values -> array
862 *
863 * Returns the next object as an array, similar to Enumerator#next_values, but
864 * doesn't move the internal position forward. If the position is already at
865 * the end, StopIteration is raised.
866 *
867 * === Example
868 *
869 * o = Object.new
870 * def o.each
871 * yield
872 * yield 1
873 * yield 1, 2
874 * end
875 * e = o.to_enum
876 * p e.peek_values #=> []
877 * e.next
878 * p e.peek_values #=> [1]
879 * p e.peek_values #=> [1]
880 * e.next
881 * p e.peek_values #=> [1, 2]
882 * e.next
883 * p e.peek_values # raises StopIteration
884 *
885 */
886
887 static VALUE
enumerator_peek_values_m(VALUE obj)888 enumerator_peek_values_m(VALUE obj)
889 {
890 return rb_ary_dup(enumerator_peek_values(obj));
891 }
892
893 /*
894 * call-seq:
895 * e.peek -> object
896 *
897 * Returns the next object in the enumerator, but doesn't move the internal
898 * position forward. If the position is already at the end, StopIteration
899 * is raised.
900 *
901 * === Example
902 *
903 * a = [1,2,3]
904 * e = a.to_enum
905 * p e.next #=> 1
906 * p e.peek #=> 2
907 * p e.peek #=> 2
908 * p e.peek #=> 2
909 * p e.next #=> 2
910 * p e.next #=> 3
911 * p e.peek #raises StopIteration
912 *
913 */
914
915 static VALUE
enumerator_peek(VALUE obj)916 enumerator_peek(VALUE obj)
917 {
918 VALUE vs = enumerator_peek_values(obj);
919 return ary2sv(vs, 1);
920 }
921
922 /*
923 * call-seq:
924 * e.feed obj -> nil
925 *
926 * Sets the value to be returned by the next yield inside +e+.
927 *
928 * If the value is not set, the yield returns nil.
929 *
930 * This value is cleared after being yielded.
931 *
932 * # Array#map passes the array's elements to "yield" and collects the
933 * # results of "yield" as an array.
934 * # Following example shows that "next" returns the passed elements and
935 * # values passed to "feed" are collected as an array which can be
936 * # obtained by StopIteration#result.
937 * e = [1,2,3].map
938 * p e.next #=> 1
939 * e.feed "a"
940 * p e.next #=> 2
941 * e.feed "b"
942 * p e.next #=> 3
943 * e.feed "c"
944 * begin
945 * e.next
946 * rescue StopIteration
947 * p $!.result #=> ["a", "b", "c"]
948 * end
949 *
950 * o = Object.new
951 * def o.each
952 * x = yield # (2) blocks
953 * p x # (5) => "foo"
954 * x = yield # (6) blocks
955 * p x # (8) => nil
956 * x = yield # (9) blocks
957 * p x # not reached w/o another e.next
958 * end
959 *
960 * e = o.to_enum
961 * e.next # (1)
962 * e.feed "foo" # (3)
963 * e.next # (4)
964 * e.next # (7)
965 * # (10)
966 */
967
968 static VALUE
enumerator_feed(VALUE obj,VALUE v)969 enumerator_feed(VALUE obj, VALUE v)
970 {
971 struct enumerator *e = enumerator_ptr(obj);
972
973 if (e->feedvalue != Qundef) {
974 rb_raise(rb_eTypeError, "feed value already set");
975 }
976 e->feedvalue = v;
977
978 return Qnil;
979 }
980
981 /*
982 * call-seq:
983 * e.rewind -> e
984 *
985 * Rewinds the enumeration sequence to the beginning.
986 *
987 * If the enclosed object responds to a "rewind" method, it is called.
988 */
989
990 static VALUE
enumerator_rewind(VALUE obj)991 enumerator_rewind(VALUE obj)
992 {
993 struct enumerator *e = enumerator_ptr(obj);
994
995 rb_check_funcall(e->obj, id_rewind, 0, 0);
996
997 e->fib = 0;
998 e->dst = Qnil;
999 e->lookahead = Qundef;
1000 e->feedvalue = Qundef;
1001 e->stop_exc = Qfalse;
1002 return obj;
1003 }
1004
1005 static struct generator *generator_ptr(VALUE obj);
1006 static VALUE append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args);
1007
1008 static VALUE
inspect_enumerator(VALUE obj,VALUE dummy,int recur)1009 inspect_enumerator(VALUE obj, VALUE dummy, int recur)
1010 {
1011 struct enumerator *e;
1012 VALUE eobj, str, cname;
1013
1014 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, e);
1015
1016 cname = rb_obj_class(obj);
1017
1018 if (!e || e->obj == Qundef) {
1019 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(cname));
1020 }
1021
1022 if (recur) {
1023 str = rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(cname));
1024 OBJ_TAINT(str);
1025 return str;
1026 }
1027
1028 if (e->procs) {
1029 long i;
1030
1031 eobj = generator_ptr(e->obj)->obj;
1032 /* In case procs chained enumerator traversing all proc entries manually */
1033 if (rb_obj_class(eobj) == cname) {
1034 str = rb_inspect(eobj);
1035 }
1036 else {
1037 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(cname), eobj);
1038 }
1039 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1040 str = rb_sprintf("#<%"PRIsVALUE": %"PRIsVALUE, cname, str);
1041 append_method(RARRAY_AREF(e->procs, i), str, e->meth, e->args);
1042 rb_str_buf_cat2(str, ">");
1043 }
1044 return str;
1045 }
1046
1047 eobj = rb_attr_get(obj, id_receiver);
1048 if (NIL_P(eobj)) {
1049 eobj = e->obj;
1050 }
1051
1052 /* (1..100).each_cons(2) => "#<Enumerator: 1..100:each_cons(2)>" */
1053 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE, rb_class_path(cname), eobj);
1054 append_method(obj, str, e->meth, e->args);
1055
1056 rb_str_buf_cat2(str, ">");
1057
1058 return str;
1059 }
1060
1061 static int
key_symbol_p(VALUE key,VALUE val,VALUE arg)1062 key_symbol_p(VALUE key, VALUE val, VALUE arg)
1063 {
1064 if (SYMBOL_P(key)) return ST_CONTINUE;
1065 *(int *)arg = FALSE;
1066 return ST_STOP;
1067 }
1068
1069 static int
kwd_append(VALUE key,VALUE val,VALUE str)1070 kwd_append(VALUE key, VALUE val, VALUE str)
1071 {
1072 if (!SYMBOL_P(key)) rb_raise(rb_eRuntimeError, "non-symbol key inserted");
1073 rb_str_catf(str, "% "PRIsVALUE": %"PRIsVALUE", ", key, val);
1074 return ST_CONTINUE;
1075 }
1076
1077 static VALUE
append_method(VALUE obj,VALUE str,ID default_method,VALUE default_args)1078 append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args)
1079 {
1080 VALUE method, eargs;
1081
1082 method = rb_attr_get(obj, id_method);
1083 if (method != Qfalse) {
1084 if (!NIL_P(method)) {
1085 Check_Type(method, T_SYMBOL);
1086 method = rb_sym2str(method);
1087 }
1088 else {
1089 method = rb_id2str(default_method);
1090 }
1091 rb_str_buf_cat2(str, ":");
1092 rb_str_buf_append(str, method);
1093 }
1094
1095 eargs = rb_attr_get(obj, id_arguments);
1096 if (NIL_P(eargs)) {
1097 eargs = default_args;
1098 }
1099 if (eargs != Qfalse) {
1100 long argc = RARRAY_LEN(eargs);
1101 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
1102
1103 if (argc > 0) {
1104 VALUE kwds = Qnil;
1105
1106 rb_str_buf_cat2(str, "(");
1107
1108 if (RB_TYPE_P(argv[argc-1], T_HASH) && !RHASH_EMPTY_P(argv[argc-1])) {
1109 int all_key = TRUE;
1110 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
1111 if (all_key) kwds = argv[--argc];
1112 }
1113
1114 while (argc--) {
1115 VALUE arg = *argv++;
1116
1117 rb_str_append(str, rb_inspect(arg));
1118 rb_str_buf_cat2(str, ", ");
1119 OBJ_INFECT(str, arg);
1120 }
1121 if (!NIL_P(kwds)) {
1122 rb_hash_foreach(kwds, kwd_append, str);
1123 }
1124 rb_str_set_len(str, RSTRING_LEN(str)-2);
1125 rb_str_buf_cat2(str, ")");
1126 }
1127 }
1128
1129 return str;
1130 }
1131
1132 /*
1133 * call-seq:
1134 * e.inspect -> string
1135 *
1136 * Creates a printable version of <i>e</i>.
1137 */
1138
1139 static VALUE
enumerator_inspect(VALUE obj)1140 enumerator_inspect(VALUE obj)
1141 {
1142 return rb_exec_recursive(inspect_enumerator, obj, 0);
1143 }
1144
1145 /*
1146 * call-seq:
1147 * e.size -> int, Float::INFINITY or nil
1148 *
1149 * Returns the size of the enumerator, or +nil+ if it can't be calculated lazily.
1150 *
1151 * (1..100).to_a.permutation(4).size # => 94109400
1152 * loop.size # => Float::INFINITY
1153 * (1..100).drop_while.size # => nil
1154 */
1155
1156 static VALUE
enumerator_size(VALUE obj)1157 enumerator_size(VALUE obj)
1158 {
1159 struct enumerator *e = enumerator_ptr(obj);
1160 int argc = 0;
1161 const VALUE *argv = NULL;
1162 VALUE size;
1163
1164 if (e->procs) {
1165 struct generator *g = generator_ptr(e->obj);
1166 VALUE receiver = rb_check_funcall(g->obj, id_size, 0, 0);
1167 long i = 0;
1168
1169 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1170 VALUE proc = RARRAY_AREF(e->procs, i);
1171 struct proc_entry *entry = proc_entry_ptr(proc);
1172 lazyenum_size_func *size_fn = entry->fn->size;
1173 if (!size_fn) {
1174 return Qnil;
1175 }
1176 receiver = (*size_fn)(proc, receiver);
1177 }
1178 return receiver;
1179 }
1180
1181 if (e->size_fn) {
1182 return (*e->size_fn)(e->obj, e->args, obj);
1183 }
1184 if (e->args) {
1185 argc = (int)RARRAY_LEN(e->args);
1186 argv = RARRAY_CONST_PTR(e->args);
1187 }
1188 size = rb_check_funcall(e->size, id_call, argc, argv);
1189 if (size != Qundef) return size;
1190 return e->size;
1191 }
1192
1193 /*
1194 * Yielder
1195 */
1196 static void
yielder_mark(void * p)1197 yielder_mark(void *p)
1198 {
1199 struct yielder *ptr = p;
1200 rb_gc_mark(ptr->proc);
1201 }
1202
1203 #define yielder_free RUBY_TYPED_DEFAULT_FREE
1204
1205 static size_t
yielder_memsize(const void * p)1206 yielder_memsize(const void *p)
1207 {
1208 return sizeof(struct yielder);
1209 }
1210
1211 static const rb_data_type_t yielder_data_type = {
1212 "yielder",
1213 {
1214 yielder_mark,
1215 yielder_free,
1216 yielder_memsize,
1217 },
1218 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
1219 };
1220
1221 static struct yielder *
yielder_ptr(VALUE obj)1222 yielder_ptr(VALUE obj)
1223 {
1224 struct yielder *ptr;
1225
1226 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1227 if (!ptr || ptr->proc == Qundef) {
1228 rb_raise(rb_eArgError, "uninitialized yielder");
1229 }
1230 return ptr;
1231 }
1232
1233 /* :nodoc: */
1234 static VALUE
yielder_allocate(VALUE klass)1235 yielder_allocate(VALUE klass)
1236 {
1237 struct yielder *ptr;
1238 VALUE obj;
1239
1240 obj = TypedData_Make_Struct(klass, struct yielder, &yielder_data_type, ptr);
1241 ptr->proc = Qundef;
1242
1243 return obj;
1244 }
1245
1246 static VALUE
yielder_init(VALUE obj,VALUE proc)1247 yielder_init(VALUE obj, VALUE proc)
1248 {
1249 struct yielder *ptr;
1250
1251 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1252
1253 if (!ptr) {
1254 rb_raise(rb_eArgError, "unallocated yielder");
1255 }
1256
1257 ptr->proc = proc;
1258
1259 return obj;
1260 }
1261
1262 /* :nodoc: */
1263 static VALUE
yielder_initialize(VALUE obj)1264 yielder_initialize(VALUE obj)
1265 {
1266 rb_need_block();
1267
1268 return yielder_init(obj, rb_block_proc());
1269 }
1270
1271 /* :nodoc: */
1272 static VALUE
yielder_yield(VALUE obj,VALUE args)1273 yielder_yield(VALUE obj, VALUE args)
1274 {
1275 struct yielder *ptr = yielder_ptr(obj);
1276
1277 return rb_proc_call(ptr->proc, args);
1278 }
1279
1280 /* :nodoc: */
1281 static VALUE
yielder_yield_push(VALUE obj,VALUE arg)1282 yielder_yield_push(VALUE obj, VALUE arg)
1283 {
1284 struct yielder *ptr = yielder_ptr(obj);
1285
1286 rb_proc_call_with_block(ptr->proc, 1, &arg, Qnil);
1287
1288 return obj;
1289 }
1290
1291 static VALUE
yielder_yield_i(RB_BLOCK_CALL_FUNC_ARGLIST (obj,memo))1292 yielder_yield_i(RB_BLOCK_CALL_FUNC_ARGLIST(obj, memo))
1293 {
1294 return rb_yield_values2(argc, argv);
1295 }
1296
1297 static VALUE
yielder_new(void)1298 yielder_new(void)
1299 {
1300 return yielder_init(yielder_allocate(rb_cYielder), rb_proc_new(yielder_yield_i, 0));
1301 }
1302
1303 /*
1304 * Generator
1305 */
1306 static void
generator_mark(void * p)1307 generator_mark(void *p)
1308 {
1309 struct generator *ptr = p;
1310 rb_gc_mark(ptr->proc);
1311 rb_gc_mark(ptr->obj);
1312 }
1313
1314 #define generator_free RUBY_TYPED_DEFAULT_FREE
1315
1316 static size_t
generator_memsize(const void * p)1317 generator_memsize(const void *p)
1318 {
1319 return sizeof(struct generator);
1320 }
1321
1322 static const rb_data_type_t generator_data_type = {
1323 "generator",
1324 {
1325 generator_mark,
1326 generator_free,
1327 generator_memsize,
1328 },
1329 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
1330 };
1331
1332 static struct generator *
generator_ptr(VALUE obj)1333 generator_ptr(VALUE obj)
1334 {
1335 struct generator *ptr;
1336
1337 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1338 if (!ptr || ptr->proc == Qundef) {
1339 rb_raise(rb_eArgError, "uninitialized generator");
1340 }
1341 return ptr;
1342 }
1343
1344 /* :nodoc: */
1345 static VALUE
generator_allocate(VALUE klass)1346 generator_allocate(VALUE klass)
1347 {
1348 struct generator *ptr;
1349 VALUE obj;
1350
1351 obj = TypedData_Make_Struct(klass, struct generator, &generator_data_type, ptr);
1352 ptr->proc = Qundef;
1353
1354 return obj;
1355 }
1356
1357 static VALUE
generator_init(VALUE obj,VALUE proc)1358 generator_init(VALUE obj, VALUE proc)
1359 {
1360 struct generator *ptr;
1361
1362 rb_check_frozen(obj);
1363 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1364
1365 if (!ptr) {
1366 rb_raise(rb_eArgError, "unallocated generator");
1367 }
1368
1369 ptr->proc = proc;
1370
1371 return obj;
1372 }
1373
1374 /* :nodoc: */
1375 static VALUE
generator_initialize(int argc,VALUE * argv,VALUE obj)1376 generator_initialize(int argc, VALUE *argv, VALUE obj)
1377 {
1378 VALUE proc;
1379
1380 if (argc == 0) {
1381 rb_need_block();
1382
1383 proc = rb_block_proc();
1384 }
1385 else {
1386 rb_scan_args(argc, argv, "1", &proc);
1387
1388 if (!rb_obj_is_proc(proc))
1389 rb_raise(rb_eTypeError,
1390 "wrong argument type %"PRIsVALUE" (expected Proc)",
1391 rb_obj_class(proc));
1392
1393 if (rb_block_given_p()) {
1394 rb_warn("given block not used");
1395 }
1396 }
1397
1398 return generator_init(obj, proc);
1399 }
1400
1401 /* :nodoc: */
1402 static VALUE
generator_init_copy(VALUE obj,VALUE orig)1403 generator_init_copy(VALUE obj, VALUE orig)
1404 {
1405 struct generator *ptr0, *ptr1;
1406
1407 if (!OBJ_INIT_COPY(obj, orig)) return obj;
1408
1409 ptr0 = generator_ptr(orig);
1410
1411 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr1);
1412
1413 if (!ptr1) {
1414 rb_raise(rb_eArgError, "unallocated generator");
1415 }
1416
1417 ptr1->proc = ptr0->proc;
1418
1419 return obj;
1420 }
1421
1422 /* :nodoc: */
1423 static VALUE
generator_each(int argc,VALUE * argv,VALUE obj)1424 generator_each(int argc, VALUE *argv, VALUE obj)
1425 {
1426 struct generator *ptr = generator_ptr(obj);
1427 VALUE args = rb_ary_new2(argc + 1);
1428
1429 rb_ary_push(args, yielder_new());
1430 if (argc > 0) {
1431 rb_ary_cat(args, argv, argc);
1432 }
1433
1434 return rb_proc_call(ptr->proc, args);
1435 }
1436
1437 /* Lazy Enumerator methods */
1438 static VALUE
enum_size(VALUE self)1439 enum_size(VALUE self)
1440 {
1441 VALUE r = rb_check_funcall(self, id_size, 0, 0);
1442 return (r == Qundef) ? Qnil : r;
1443 }
1444
1445 static VALUE
lazyenum_size(VALUE self,VALUE args,VALUE eobj)1446 lazyenum_size(VALUE self, VALUE args, VALUE eobj)
1447 {
1448 return enum_size(self);
1449 }
1450
1451 static VALUE
lazy_size(VALUE self)1452 lazy_size(VALUE self)
1453 {
1454 return enum_size(rb_ivar_get(self, id_receiver));
1455 }
1456
1457 static VALUE
lazy_receiver_size(VALUE generator,VALUE args,VALUE lazy)1458 lazy_receiver_size(VALUE generator, VALUE args, VALUE lazy)
1459 {
1460 return lazy_size(lazy);
1461 }
1462
1463 static VALUE
lazy_init_iterator(RB_BLOCK_CALL_FUNC_ARGLIST (val,m))1464 lazy_init_iterator(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1465 {
1466 VALUE result;
1467 if (argc == 1) {
1468 VALUE args[2];
1469 args[0] = m;
1470 args[1] = val;
1471 result = rb_yield_values2(2, args);
1472 }
1473 else {
1474 VALUE args;
1475 int len = rb_long2int((long)argc + 1);
1476 VALUE *nargv = ALLOCV_N(VALUE, args, len);
1477
1478 nargv[0] = m;
1479 if (argc > 0) {
1480 MEMCPY(nargv + 1, argv, VALUE, argc);
1481 }
1482 result = rb_yield_values2(len, nargv);
1483 ALLOCV_END(args);
1484 }
1485 if (result == Qundef) rb_iter_break();
1486 return Qnil;
1487 }
1488
1489 static VALUE
lazy_init_block_i(RB_BLOCK_CALL_FUNC_ARGLIST (val,m))1490 lazy_init_block_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1491 {
1492 rb_block_call(m, id_each, argc-1, argv+1, lazy_init_iterator, val);
1493 return Qnil;
1494 }
1495
1496 #define memo_value v2
1497 #define memo_flags u3.state
1498 #define LAZY_MEMO_BREAK 1
1499 #define LAZY_MEMO_PACKED 2
1500 #define LAZY_MEMO_BREAK_P(memo) ((memo)->memo_flags & LAZY_MEMO_BREAK)
1501 #define LAZY_MEMO_PACKED_P(memo) ((memo)->memo_flags & LAZY_MEMO_PACKED)
1502 #define LAZY_MEMO_SET_BREAK(memo) ((memo)->memo_flags |= LAZY_MEMO_BREAK)
1503 #define LAZY_MEMO_SET_VALUE(memo, value) MEMO_V2_SET(memo, value)
1504 #define LAZY_MEMO_SET_PACKED(memo) ((memo)->memo_flags |= LAZY_MEMO_PACKED)
1505 #define LAZY_MEMO_RESET_PACKED(memo) ((memo)->memo_flags &= ~LAZY_MEMO_PACKED)
1506
1507 static VALUE
lazy_init_yielder(VALUE val,VALUE m,int argc,VALUE * argv)1508 lazy_init_yielder(VALUE val, VALUE m, int argc, VALUE *argv)
1509 {
1510 VALUE yielder = RARRAY_AREF(m, 0);
1511 VALUE procs_array = RARRAY_AREF(m, 1);
1512 VALUE memos = rb_attr_get(yielder, id_memo);
1513 long i = 0;
1514 struct MEMO *result;
1515 int cont = 1;
1516
1517 result = MEMO_NEW(Qnil, rb_enum_values_pack(argc, argv),
1518 argc > 1 ? LAZY_MEMO_PACKED : 0);
1519
1520 for (i = 0; i < RARRAY_LEN(procs_array); i++) {
1521 VALUE proc = RARRAY_AREF(procs_array, i);
1522 struct proc_entry *entry = proc_entry_ptr(proc);
1523 if (!(*entry->fn->proc)(proc, result, memos, i)) {
1524 cont = 0;
1525 break;
1526 }
1527 }
1528
1529 if (cont) {
1530 rb_funcall2(yielder, idLTLT, 1, &(result->memo_value));
1531 }
1532 if (LAZY_MEMO_BREAK_P(result)) {
1533 rb_iter_break();
1534 }
1535 return result->memo_value;
1536 }
1537
1538 static VALUE
lazy_init_block(VALUE val,VALUE m,int argc,VALUE * argv)1539 lazy_init_block(VALUE val, VALUE m, int argc, VALUE *argv)
1540 {
1541 VALUE procs = RARRAY_AREF(m, 1);
1542
1543 rb_ivar_set(val, id_memo, rb_ary_new2(RARRAY_LEN(procs)));
1544 rb_block_call(RARRAY_AREF(m, 0), id_each, 0, 0,
1545 lazy_init_yielder, rb_ary_new3(2, val, procs));
1546 return Qnil;
1547 }
1548
1549 static VALUE
lazy_generator_init(VALUE enumerator,VALUE procs)1550 lazy_generator_init(VALUE enumerator, VALUE procs)
1551 {
1552 VALUE generator;
1553 VALUE obj;
1554 struct generator *gen_ptr;
1555 struct enumerator *e = enumerator_ptr(enumerator);
1556
1557 if (RARRAY_LEN(procs) > 0) {
1558 struct generator *old_gen_ptr = generator_ptr(e->obj);
1559 obj = old_gen_ptr->obj;
1560 }
1561 else {
1562 obj = enumerator;
1563 }
1564
1565 generator = generator_allocate(rb_cGenerator);
1566
1567 rb_block_call(generator, id_initialize, 0, 0,
1568 lazy_init_block, rb_ary_new3(2, obj, procs));
1569
1570 gen_ptr = generator_ptr(generator);
1571 gen_ptr->obj = obj;
1572
1573 return generator;
1574 }
1575
1576 /*
1577 * call-seq:
1578 * Lazy.new(obj, size=nil) { |yielder, *values| ... }
1579 *
1580 * Creates a new Lazy enumerator. When the enumerator is actually enumerated
1581 * (e.g. by calling #force), +obj+ will be enumerated and each value passed
1582 * to the given block. The block can yield values back using +yielder+.
1583 * For example, to create a method +filter_map+ in both lazy and
1584 * non-lazy fashions:
1585 *
1586 * module Enumerable
1587 * def filter_map(&block)
1588 * map(&block).compact
1589 * end
1590 * end
1591 *
1592 * class Enumerator::Lazy
1593 * def filter_map
1594 * Lazy.new(self) do |yielder, *values|
1595 * result = yield *values
1596 * yielder << result if result
1597 * end
1598 * end
1599 * end
1600 *
1601 * (1..Float::INFINITY).lazy.filter_map{|i| i*i if i.even?}.first(5)
1602 * # => [4, 16, 36, 64, 100]
1603 */
1604 static VALUE
lazy_initialize(int argc,VALUE * argv,VALUE self)1605 lazy_initialize(int argc, VALUE *argv, VALUE self)
1606 {
1607 VALUE obj, size = Qnil;
1608 VALUE generator;
1609
1610 rb_check_arity(argc, 1, 2);
1611 if (!rb_block_given_p()) {
1612 rb_raise(rb_eArgError, "tried to call lazy new without a block");
1613 }
1614 obj = argv[0];
1615 if (argc > 1) {
1616 size = argv[1];
1617 }
1618 generator = generator_allocate(rb_cGenerator);
1619 rb_block_call(generator, id_initialize, 0, 0, lazy_init_block_i, obj);
1620 enumerator_init(self, generator, sym_each, 0, 0, 0, size);
1621 rb_ivar_set(self, id_receiver, obj);
1622
1623 return self;
1624 }
1625
1626 #if 0 /* for RDoc */
1627 /*
1628 * call-seq:
1629 * lazy.to_a -> array
1630 * lazy.force -> array
1631 *
1632 * Expands +lazy+ enumerator to an array.
1633 * See Enumerable#to_a.
1634 */
1635 static VALUE lazy_to_a(VALUE self)
1636 {
1637 }
1638 #endif
1639
1640 static void
lazy_set_args(VALUE lazy,VALUE args)1641 lazy_set_args(VALUE lazy, VALUE args)
1642 {
1643 ID id = rb_frame_this_func();
1644 rb_ivar_set(lazy, id_method, ID2SYM(id));
1645 if (NIL_P(args)) {
1646 /* Qfalse indicates that the arguments are empty */
1647 rb_ivar_set(lazy, id_arguments, Qfalse);
1648 }
1649 else {
1650 rb_ivar_set(lazy, id_arguments, args);
1651 }
1652 }
1653
1654 static VALUE
lazy_set_method(VALUE lazy,VALUE args,rb_enumerator_size_func * size_fn)1655 lazy_set_method(VALUE lazy, VALUE args, rb_enumerator_size_func *size_fn)
1656 {
1657 struct enumerator *e = enumerator_ptr(lazy);
1658 lazy_set_args(lazy, args);
1659 e->size_fn = size_fn;
1660 return lazy;
1661 }
1662
1663 static VALUE
lazy_add_method(VALUE obj,int argc,VALUE * argv,VALUE args,VALUE memo,const lazyenum_funcs * fn)1664 lazy_add_method(VALUE obj, int argc, VALUE *argv, VALUE args, VALUE memo,
1665 const lazyenum_funcs *fn)
1666 {
1667 struct enumerator *new_e;
1668 VALUE new_obj;
1669 VALUE new_generator;
1670 VALUE new_procs;
1671 struct enumerator *e = enumerator_ptr(obj);
1672 struct proc_entry *entry;
1673 VALUE entry_obj = TypedData_Make_Struct(rb_cObject, struct proc_entry,
1674 &proc_entry_data_type, entry);
1675 if (rb_block_given_p()) {
1676 entry->proc = rb_block_proc();
1677 }
1678 entry->fn = fn;
1679 entry->memo = args;
1680
1681 lazy_set_args(entry_obj, memo);
1682
1683 new_procs = RTEST(e->procs) ? rb_ary_dup(e->procs) : rb_ary_new();
1684 new_generator = lazy_generator_init(obj, new_procs);
1685 rb_ary_push(new_procs, entry_obj);
1686
1687 new_obj = enumerator_init_copy(enumerator_allocate(rb_cLazy), obj);
1688 new_e = DATA_PTR(new_obj);
1689 new_e->obj = new_generator;
1690 new_e->procs = new_procs;
1691
1692 if (argc > 0) {
1693 new_e->meth = rb_to_id(*argv++);
1694 --argc;
1695 }
1696 else {
1697 new_e->meth = id_each;
1698 }
1699 new_e->args = rb_ary_new4(argc, argv);
1700 return new_obj;
1701 }
1702
1703 /*
1704 * call-seq:
1705 * e.lazy -> lazy_enumerator
1706 *
1707 * Returns a lazy enumerator, whose methods map/collect,
1708 * flat_map/collect_concat, select/find_all, reject, grep, grep_v, zip, take,
1709 * take_while, drop, and drop_while enumerate values only on an
1710 * as-needed basis. However, if a block is given to zip, values
1711 * are enumerated immediately.
1712 *
1713 * === Example
1714 *
1715 * The following program finds pythagorean triples:
1716 *
1717 * def pythagorean_triples
1718 * (1..Float::INFINITY).lazy.flat_map {|z|
1719 * (1..z).flat_map {|x|
1720 * (x..z).select {|y|
1721 * x**2 + y**2 == z**2
1722 * }.map {|y|
1723 * [x, y, z]
1724 * }
1725 * }
1726 * }
1727 * end
1728 * # show first ten pythagorean triples
1729 * p pythagorean_triples.take(10).force # take is lazy, so force is needed
1730 * p pythagorean_triples.first(10) # first is eager
1731 * # show pythagorean triples less than 100
1732 * p pythagorean_triples.take_while { |*, z| z < 100 }.force
1733 */
1734 static VALUE
enumerable_lazy(VALUE obj)1735 enumerable_lazy(VALUE obj)
1736 {
1737 VALUE result = lazy_to_enum_i(obj, sym_each, 0, 0, lazyenum_size);
1738 /* Qfalse indicates that the Enumerator::Lazy has no method name */
1739 rb_ivar_set(result, id_method, Qfalse);
1740 return result;
1741 }
1742
1743 static VALUE
lazy_to_enum_i(VALUE obj,VALUE meth,int argc,const VALUE * argv,rb_enumerator_size_func * size_fn)1744 lazy_to_enum_i(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn)
1745 {
1746 return enumerator_init(enumerator_allocate(rb_cLazy),
1747 obj, meth, argc, argv, size_fn, Qnil);
1748 }
1749
1750 /*
1751 * call-seq:
1752 * lzy.to_enum(method = :each, *args) -> lazy_enum
1753 * lzy.enum_for(method = :each, *args) -> lazy_enum
1754 * lzy.to_enum(method = :each, *args) {|*args| block} -> lazy_enum
1755 * lzy.enum_for(method = :each, *args){|*args| block} -> lazy_enum
1756 *
1757 * Similar to Kernel#to_enum, except it returns a lazy enumerator.
1758 * This makes it easy to define Enumerable methods that will
1759 * naturally remain lazy if called from a lazy enumerator.
1760 *
1761 * For example, continuing from the example in Kernel#to_enum:
1762 *
1763 * # See Kernel#to_enum for the definition of repeat
1764 * r = 1..Float::INFINITY
1765 * r.repeat(2).first(5) # => [1, 1, 2, 2, 3]
1766 * r.repeat(2).class # => Enumerator
1767 * r.repeat(2).map{|n| n ** 2}.first(5) # => endless loop!
1768 * # works naturally on lazy enumerator:
1769 * r.lazy.repeat(2).class # => Enumerator::Lazy
1770 * r.lazy.repeat(2).map{|n| n ** 2}.first(5) # => [1, 1, 4, 4, 9]
1771 */
1772
1773 static VALUE
lazy_to_enum(int argc,VALUE * argv,VALUE self)1774 lazy_to_enum(int argc, VALUE *argv, VALUE self)
1775 {
1776 VALUE lazy, meth = sym_each;
1777
1778 if (argc > 0) {
1779 --argc;
1780 meth = *argv++;
1781 }
1782 lazy = lazy_to_enum_i(self, meth, argc, argv, 0);
1783 if (rb_block_given_p()) {
1784 enumerator_ptr(lazy)->size = rb_block_proc();
1785 }
1786 return lazy;
1787 }
1788
1789 static VALUE
lazyenum_yield(VALUE proc_entry,struct MEMO * result)1790 lazyenum_yield(VALUE proc_entry, struct MEMO *result)
1791 {
1792 struct proc_entry *entry = proc_entry_ptr(proc_entry);
1793 return rb_proc_call_with_block(entry->proc, 1, &result->memo_value, Qnil);
1794 }
1795
1796 static VALUE
lazyenum_yield_values(VALUE proc_entry,struct MEMO * result)1797 lazyenum_yield_values(VALUE proc_entry, struct MEMO *result)
1798 {
1799 struct proc_entry *entry = proc_entry_ptr(proc_entry);
1800 int argc = 1;
1801 const VALUE *argv = &result->memo_value;
1802 if (LAZY_MEMO_PACKED_P(result)) {
1803 const VALUE args = *argv;
1804 argc = RARRAY_LENINT(args);
1805 argv = RARRAY_CONST_PTR(args);
1806 }
1807 return rb_proc_call_with_block(entry->proc, argc, argv, Qnil);
1808 }
1809
1810 static struct MEMO *
lazy_map_proc(VALUE proc_entry,struct MEMO * result,VALUE memos,long memo_index)1811 lazy_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
1812 {
1813 VALUE value = lazyenum_yield_values(proc_entry, result);
1814 LAZY_MEMO_SET_VALUE(result, value);
1815 LAZY_MEMO_RESET_PACKED(result);
1816 return result;
1817 }
1818
1819 static VALUE
lazy_map_size(VALUE entry,VALUE receiver)1820 lazy_map_size(VALUE entry, VALUE receiver)
1821 {
1822 return receiver;
1823 }
1824
1825 static const lazyenum_funcs lazy_map_funcs = {
1826 lazy_map_proc, lazy_map_size,
1827 };
1828
1829 static VALUE
lazy_map(VALUE obj)1830 lazy_map(VALUE obj)
1831 {
1832 if (!rb_block_given_p()) {
1833 rb_raise(rb_eArgError, "tried to call lazy map without a block");
1834 }
1835
1836 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_map_funcs);
1837 }
1838
1839 static VALUE
lazy_flat_map_i(RB_BLOCK_CALL_FUNC_ARGLIST (i,yielder))1840 lazy_flat_map_i(RB_BLOCK_CALL_FUNC_ARGLIST(i, yielder))
1841 {
1842 VALUE arg = rb_enum_values_pack(argc, argv);
1843
1844 return rb_funcallv(yielder, idLTLT, 1, &arg);
1845 }
1846
1847 static VALUE
lazy_flat_map_each(VALUE obj,VALUE yielder)1848 lazy_flat_map_each(VALUE obj, VALUE yielder)
1849 {
1850 rb_block_call(obj, id_each, 0, 0, lazy_flat_map_i, yielder);
1851 return Qnil;
1852 }
1853
1854 static VALUE
lazy_flat_map_to_ary(VALUE obj,VALUE yielder)1855 lazy_flat_map_to_ary(VALUE obj, VALUE yielder)
1856 {
1857 VALUE ary = rb_check_array_type(obj);
1858 if (NIL_P(ary)) {
1859 rb_funcall(yielder, idLTLT, 1, obj);
1860 }
1861 else {
1862 long i;
1863 for (i = 0; i < RARRAY_LEN(ary); i++) {
1864 rb_funcall(yielder, idLTLT, 1, RARRAY_AREF(ary, i));
1865 }
1866 }
1867 return Qnil;
1868 }
1869
1870 static VALUE
lazy_flat_map_proc(RB_BLOCK_CALL_FUNC_ARGLIST (val,m))1871 lazy_flat_map_proc(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1872 {
1873 VALUE result = rb_yield_values2(argc - 1, &argv[1]);
1874 if (RB_TYPE_P(result, T_ARRAY)) {
1875 long i;
1876 for (i = 0; i < RARRAY_LEN(result); i++) {
1877 rb_funcall(argv[0], idLTLT, 1, RARRAY_AREF(result, i));
1878 }
1879 }
1880 else {
1881 if (rb_respond_to(result, id_force) && rb_respond_to(result, id_each)) {
1882 lazy_flat_map_each(result, argv[0]);
1883 }
1884 else {
1885 lazy_flat_map_to_ary(result, argv[0]);
1886 }
1887 }
1888 return Qnil;
1889 }
1890
1891 /*
1892 * call-seq:
1893 * lazy.collect_concat { |obj| block } -> a_lazy_enumerator
1894 * lazy.flat_map { |obj| block } -> a_lazy_enumerator
1895 *
1896 * Returns a new lazy enumerator with the concatenated results of running
1897 * <i>block</i> once for every element in <i>lazy</i>.
1898 *
1899 * ["foo", "bar"].lazy.flat_map {|i| i.each_char.lazy}.force
1900 * #=> ["f", "o", "o", "b", "a", "r"]
1901 *
1902 * A value <i>x</i> returned by <i>block</i> is decomposed if either of
1903 * the following conditions is true:
1904 *
1905 * a) <i>x</i> responds to both each and force, which means that
1906 * <i>x</i> is a lazy enumerator.
1907 * b) <i>x</i> is an array or responds to to_ary.
1908 *
1909 * Otherwise, <i>x</i> is contained as-is in the return value.
1910 *
1911 * [{a:1}, {b:2}].lazy.flat_map {|i| i}.force
1912 * #=> [{:a=>1}, {:b=>2}]
1913 */
1914 static VALUE
lazy_flat_map(VALUE obj)1915 lazy_flat_map(VALUE obj)
1916 {
1917 if (!rb_block_given_p()) {
1918 rb_raise(rb_eArgError, "tried to call lazy flat_map without a block");
1919 }
1920
1921 return lazy_set_method(rb_block_call(rb_cLazy, id_new, 1, &obj,
1922 lazy_flat_map_proc, 0),
1923 Qnil, 0);
1924 }
1925
1926 static struct MEMO *
lazy_select_proc(VALUE proc_entry,struct MEMO * result,VALUE memos,long memo_index)1927 lazy_select_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
1928 {
1929 VALUE chain = lazyenum_yield(proc_entry, result);
1930 if (!RTEST(chain)) return 0;
1931 return result;
1932 }
1933
1934 static const lazyenum_funcs lazy_select_funcs = {
1935 lazy_select_proc, 0,
1936 };
1937
1938 static VALUE
lazy_select(VALUE obj)1939 lazy_select(VALUE obj)
1940 {
1941 if (!rb_block_given_p()) {
1942 rb_raise(rb_eArgError, "tried to call lazy select without a block");
1943 }
1944
1945 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_select_funcs);
1946 }
1947
1948 static struct MEMO *
lazy_reject_proc(VALUE proc_entry,struct MEMO * result,VALUE memos,long memo_index)1949 lazy_reject_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
1950 {
1951 VALUE chain = lazyenum_yield(proc_entry, result);
1952 if (RTEST(chain)) return 0;
1953 return result;
1954 }
1955
1956 static const lazyenum_funcs lazy_reject_funcs = {
1957 lazy_reject_proc, 0,
1958 };
1959
1960 static VALUE
lazy_reject(VALUE obj)1961 lazy_reject(VALUE obj)
1962 {
1963 if (!rb_block_given_p()) {
1964 rb_raise(rb_eArgError, "tried to call lazy reject without a block");
1965 }
1966
1967 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_reject_funcs);
1968 }
1969
1970 static struct MEMO *
lazy_grep_proc(VALUE proc_entry,struct MEMO * result,VALUE memos,long memo_index)1971 lazy_grep_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
1972 {
1973 struct proc_entry *entry = proc_entry_ptr(proc_entry);
1974 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
1975 if (!RTEST(chain)) return 0;
1976 return result;
1977 }
1978
1979 static struct MEMO *
lazy_grep_iter_proc(VALUE proc_entry,struct MEMO * result,VALUE memos,long memo_index)1980 lazy_grep_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
1981 {
1982 struct proc_entry *entry = proc_entry_ptr(proc_entry);
1983 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
1984
1985 if (!RTEST(chain)) return 0;
1986 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
1987 LAZY_MEMO_SET_VALUE(result, value);
1988 LAZY_MEMO_RESET_PACKED(result);
1989
1990 return result;
1991 }
1992
1993 static const lazyenum_funcs lazy_grep_iter_funcs = {
1994 lazy_grep_iter_proc, 0,
1995 };
1996
1997 static const lazyenum_funcs lazy_grep_funcs = {
1998 lazy_grep_proc, 0,
1999 };
2000
2001 static VALUE
lazy_grep(VALUE obj,VALUE pattern)2002 lazy_grep(VALUE obj, VALUE pattern)
2003 {
2004 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2005 &lazy_grep_iter_funcs : &lazy_grep_funcs;
2006 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2007 }
2008
2009 static struct MEMO *
lazy_grep_v_proc(VALUE proc_entry,struct MEMO * result,VALUE memos,long memo_index)2010 lazy_grep_v_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2011 {
2012 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2013 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2014 if (RTEST(chain)) return 0;
2015 return result;
2016 }
2017
2018 static struct MEMO *
lazy_grep_v_iter_proc(VALUE proc_entry,struct MEMO * result,VALUE memos,long memo_index)2019 lazy_grep_v_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2020 {
2021 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2022 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2023
2024 if (RTEST(chain)) return 0;
2025 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2026 LAZY_MEMO_SET_VALUE(result, value);
2027 LAZY_MEMO_RESET_PACKED(result);
2028
2029 return result;
2030 }
2031
2032 static const lazyenum_funcs lazy_grep_v_iter_funcs = {
2033 lazy_grep_v_iter_proc, 0,
2034 };
2035
2036 static const lazyenum_funcs lazy_grep_v_funcs = {
2037 lazy_grep_v_proc, 0,
2038 };
2039
2040 static VALUE
lazy_grep_v(VALUE obj,VALUE pattern)2041 lazy_grep_v(VALUE obj, VALUE pattern)
2042 {
2043 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2044 &lazy_grep_v_iter_funcs : &lazy_grep_v_funcs;
2045 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2046 }
2047
2048 static VALUE
call_next(VALUE obj)2049 call_next(VALUE obj)
2050 {
2051 return rb_funcall(obj, id_next, 0);
2052 }
2053
2054 static VALUE
next_stopped(VALUE obj)2055 next_stopped(VALUE obj)
2056 {
2057 return Qnil;
2058 }
2059
2060 static VALUE
lazy_zip_arrays_func(RB_BLOCK_CALL_FUNC_ARGLIST (val,arrays))2061 lazy_zip_arrays_func(RB_BLOCK_CALL_FUNC_ARGLIST(val, arrays))
2062 {
2063 VALUE yielder, ary, memo;
2064 long i, count;
2065
2066 yielder = argv[0];
2067 memo = rb_attr_get(yielder, id_memo);
2068 count = NIL_P(memo) ? 0 : NUM2LONG(memo);
2069
2070 ary = rb_ary_new2(RARRAY_LEN(arrays) + 1);
2071 rb_ary_push(ary, argv[1]);
2072 for (i = 0; i < RARRAY_LEN(arrays); i++) {
2073 rb_ary_push(ary, rb_ary_entry(RARRAY_AREF(arrays, i), count));
2074 }
2075 rb_funcall(yielder, idLTLT, 1, ary);
2076 rb_ivar_set(yielder, id_memo, LONG2NUM(++count));
2077 return Qnil;
2078 }
2079
2080 static VALUE
lazy_zip_func(RB_BLOCK_CALL_FUNC_ARGLIST (val,zip_args))2081 lazy_zip_func(RB_BLOCK_CALL_FUNC_ARGLIST(val, zip_args))
2082 {
2083 VALUE yielder, ary, arg, v;
2084 long i;
2085
2086 yielder = argv[0];
2087 arg = rb_attr_get(yielder, id_memo);
2088 if (NIL_P(arg)) {
2089 arg = rb_ary_new2(RARRAY_LEN(zip_args));
2090 for (i = 0; i < RARRAY_LEN(zip_args); i++) {
2091 rb_ary_push(arg, rb_funcall(RARRAY_AREF(zip_args, i), id_to_enum, 0));
2092 }
2093 rb_ivar_set(yielder, id_memo, arg);
2094 }
2095
2096 ary = rb_ary_new2(RARRAY_LEN(arg) + 1);
2097 v = Qnil;
2098 if (--argc > 0) {
2099 ++argv;
2100 v = argc > 1 ? rb_ary_new_from_values(argc, argv) : *argv;
2101 }
2102 rb_ary_push(ary, v);
2103 for (i = 0; i < RARRAY_LEN(arg); i++) {
2104 v = rb_rescue2(call_next, RARRAY_AREF(arg, i), next_stopped, 0,
2105 rb_eStopIteration, (VALUE)0);
2106 rb_ary_push(ary, v);
2107 }
2108 rb_funcall(yielder, idLTLT, 1, ary);
2109 return Qnil;
2110 }
2111
2112 static VALUE
lazy_zip(int argc,VALUE * argv,VALUE obj)2113 lazy_zip(int argc, VALUE *argv, VALUE obj)
2114 {
2115 VALUE ary, v;
2116 long i;
2117 rb_block_call_func *func = lazy_zip_arrays_func;
2118
2119 if (rb_block_given_p()) {
2120 return rb_call_super(argc, argv);
2121 }
2122
2123 ary = rb_ary_new2(argc);
2124 for (i = 0; i < argc; i++) {
2125 v = rb_check_array_type(argv[i]);
2126 if (NIL_P(v)) {
2127 for (; i < argc; i++) {
2128 if (!rb_respond_to(argv[i], id_each)) {
2129 rb_raise(rb_eTypeError, "wrong argument type %"PRIsVALUE" (must respond to :each)",
2130 rb_obj_class(argv[i]));
2131 }
2132 }
2133 ary = rb_ary_new4(argc, argv);
2134 func = lazy_zip_func;
2135 break;
2136 }
2137 rb_ary_push(ary, v);
2138 }
2139
2140 return lazy_set_method(rb_block_call(rb_cLazy, id_new, 1, &obj,
2141 func, ary),
2142 ary, lazy_receiver_size);
2143 }
2144
2145 static struct MEMO *
lazy_take_proc(VALUE proc_entry,struct MEMO * result,VALUE memos,long memo_index)2146 lazy_take_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2147 {
2148 long remain;
2149 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2150 VALUE memo = rb_ary_entry(memos, memo_index);
2151
2152 if (NIL_P(memo)) {
2153 memo = entry->memo;
2154 }
2155
2156 remain = NUM2LONG(memo);
2157 if (remain == 0) {
2158 LAZY_MEMO_SET_BREAK(result);
2159 }
2160 else {
2161 if (--remain == 0) LAZY_MEMO_SET_BREAK(result);
2162 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2163 }
2164 return result;
2165 }
2166
2167 static VALUE
lazy_take_size(VALUE entry,VALUE receiver)2168 lazy_take_size(VALUE entry, VALUE receiver)
2169 {
2170 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(entry, id_arguments), 0));
2171 if (NIL_P(receiver) || (FIXNUM_P(receiver) && FIX2LONG(receiver) < len))
2172 return receiver;
2173 return LONG2NUM(len);
2174 }
2175
2176 static const lazyenum_funcs lazy_take_funcs = {
2177 lazy_take_proc, lazy_take_size,
2178 };
2179
2180 static VALUE
lazy_take(VALUE obj,VALUE n)2181 lazy_take(VALUE obj, VALUE n)
2182 {
2183 long len = NUM2LONG(n);
2184 int argc = 0;
2185 VALUE argv[2];
2186
2187 if (len < 0) {
2188 rb_raise(rb_eArgError, "attempt to take negative size");
2189 }
2190
2191 if (len == 0) {
2192 argv[0] = sym_cycle;
2193 argv[1] = INT2NUM(0);
2194 argc = 2;
2195 }
2196
2197 return lazy_add_method(obj, argc, argv, n, rb_ary_new3(1, n), &lazy_take_funcs);
2198 }
2199
2200 static struct MEMO *
lazy_take_while_proc(VALUE proc_entry,struct MEMO * result,VALUE memos,long memo_index)2201 lazy_take_while_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2202 {
2203 VALUE take = lazyenum_yield_values(proc_entry, result);
2204 if (!RTEST(take)) {
2205 LAZY_MEMO_SET_BREAK(result);
2206 return 0;
2207 }
2208 return result;
2209 }
2210
2211 static const lazyenum_funcs lazy_take_while_funcs = {
2212 lazy_take_while_proc, 0,
2213 };
2214
2215 static VALUE
lazy_take_while(VALUE obj)2216 lazy_take_while(VALUE obj)
2217 {
2218 if (!rb_block_given_p()) {
2219 rb_raise(rb_eArgError, "tried to call lazy take_while without a block");
2220 }
2221
2222 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_take_while_funcs);
2223 }
2224
2225 static VALUE
lazy_drop_size(VALUE proc_entry,VALUE receiver)2226 lazy_drop_size(VALUE proc_entry, VALUE receiver)
2227 {
2228 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(proc_entry, id_arguments), 0));
2229 if (NIL_P(receiver))
2230 return receiver;
2231 if (FIXNUM_P(receiver)) {
2232 len = FIX2LONG(receiver) - len;
2233 return LONG2FIX(len < 0 ? 0 : len);
2234 }
2235 return rb_funcall(receiver, '-', 1, LONG2NUM(len));
2236 }
2237
2238 static struct MEMO *
lazy_drop_proc(VALUE proc_entry,struct MEMO * result,VALUE memos,long memo_index)2239 lazy_drop_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2240 {
2241 long remain;
2242 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2243 VALUE memo = rb_ary_entry(memos, memo_index);
2244
2245 if (NIL_P(memo)) {
2246 memo = entry->memo;
2247 }
2248 remain = NUM2LONG(memo);
2249 if (remain > 0) {
2250 --remain;
2251 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2252 return 0;
2253 }
2254
2255 return result;
2256 }
2257
2258 static const lazyenum_funcs lazy_drop_funcs = {
2259 lazy_drop_proc, lazy_drop_size,
2260 };
2261
2262 static VALUE
lazy_drop(VALUE obj,VALUE n)2263 lazy_drop(VALUE obj, VALUE n)
2264 {
2265 long len = NUM2LONG(n);
2266 VALUE argv[2];
2267 argv[0] = sym_each;
2268 argv[1] = n;
2269
2270 if (len < 0) {
2271 rb_raise(rb_eArgError, "attempt to drop negative size");
2272 }
2273
2274 return lazy_add_method(obj, 2, argv, n, rb_ary_new3(1, n), &lazy_drop_funcs);
2275 }
2276
2277 static struct MEMO *
lazy_drop_while_proc(VALUE proc_entry,struct MEMO * result,VALUE memos,long memo_index)2278 lazy_drop_while_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2279 {
2280 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2281 VALUE memo = rb_ary_entry(memos, memo_index);
2282
2283 if (NIL_P(memo)) {
2284 memo = entry->memo;
2285 }
2286
2287 if (!RTEST(memo)) {
2288 VALUE drop = lazyenum_yield_values(proc_entry, result);
2289 if (RTEST(drop)) return 0;
2290 rb_ary_store(memos, memo_index, Qtrue);
2291 }
2292 return result;
2293 }
2294
2295 static const lazyenum_funcs lazy_drop_while_funcs = {
2296 lazy_drop_while_proc, 0,
2297 };
2298
2299 static VALUE
lazy_drop_while(VALUE obj)2300 lazy_drop_while(VALUE obj)
2301 {
2302 if (!rb_block_given_p()) {
2303 rb_raise(rb_eArgError, "tried to call lazy drop_while without a block");
2304 }
2305
2306 return lazy_add_method(obj, 0, 0, Qfalse, Qnil, &lazy_drop_while_funcs);
2307 }
2308
2309 static int
lazy_uniq_check(VALUE chain,VALUE memos,long memo_index)2310 lazy_uniq_check(VALUE chain, VALUE memos, long memo_index)
2311 {
2312 VALUE hash = rb_ary_entry(memos, memo_index);
2313
2314 if (NIL_P(hash)) {
2315 hash = rb_obj_hide(rb_hash_new());
2316 rb_ary_store(memos, memo_index, hash);
2317 }
2318
2319 return rb_hash_add_new_element(hash, chain, Qfalse);
2320 }
2321
2322 static struct MEMO *
lazy_uniq_proc(VALUE proc_entry,struct MEMO * result,VALUE memos,long memo_index)2323 lazy_uniq_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2324 {
2325 if (lazy_uniq_check(result->memo_value, memos, memo_index)) return 0;
2326 return result;
2327 }
2328
2329 static struct MEMO *
lazy_uniq_iter_proc(VALUE proc_entry,struct MEMO * result,VALUE memos,long memo_index)2330 lazy_uniq_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2331 {
2332 VALUE chain = lazyenum_yield(proc_entry, result);
2333
2334 if (lazy_uniq_check(chain, memos, memo_index)) return 0;
2335 return result;
2336 }
2337
2338 static const lazyenum_funcs lazy_uniq_iter_funcs = {
2339 lazy_uniq_iter_proc, 0,
2340 };
2341
2342 static const lazyenum_funcs lazy_uniq_funcs = {
2343 lazy_uniq_proc, 0,
2344 };
2345
2346 static VALUE
lazy_uniq(VALUE obj)2347 lazy_uniq(VALUE obj)
2348 {
2349 const lazyenum_funcs *const funcs =
2350 rb_block_given_p() ? &lazy_uniq_iter_funcs : &lazy_uniq_funcs;
2351 return lazy_add_method(obj, 0, 0, Qnil, Qnil, funcs);
2352 }
2353
2354 static VALUE
lazy_super(int argc,VALUE * argv,VALUE lazy)2355 lazy_super(int argc, VALUE *argv, VALUE lazy)
2356 {
2357 return enumerable_lazy(rb_call_super(argc, argv));
2358 }
2359
2360 static VALUE
lazy_lazy(VALUE obj)2361 lazy_lazy(VALUE obj)
2362 {
2363 return obj;
2364 }
2365
2366 /*
2367 * Document-class: StopIteration
2368 *
2369 * Raised to stop the iteration, in particular by Enumerator#next. It is
2370 * rescued by Kernel#loop.
2371 *
2372 * loop do
2373 * puts "Hello"
2374 * raise StopIteration
2375 * puts "World"
2376 * end
2377 * puts "Done!"
2378 *
2379 * <em>produces:</em>
2380 *
2381 * Hello
2382 * Done!
2383 */
2384
2385 /*
2386 * call-seq:
2387 * result -> value
2388 *
2389 * Returns the return value of the iterator.
2390 *
2391 * o = Object.new
2392 * def o.each
2393 * yield 1
2394 * yield 2
2395 * yield 3
2396 * 100
2397 * end
2398 *
2399 * e = o.to_enum
2400 *
2401 * puts e.next #=> 1
2402 * puts e.next #=> 2
2403 * puts e.next #=> 3
2404 *
2405 * begin
2406 * e.next
2407 * rescue StopIteration => ex
2408 * puts ex.result #=> 100
2409 * end
2410 *
2411 */
2412
2413 static VALUE
stop_result(VALUE self)2414 stop_result(VALUE self)
2415 {
2416 return rb_attr_get(self, id_result);
2417 }
2418
2419 /*
2420 * Document-class: Enumerator::Chain
2421 *
2422 * Enumerator::Chain is a subclass of Enumerator, which represents a
2423 * chain of enumerables that works as a single enumerator.
2424 *
2425 * This type of objects can be created by Enumerable#chain and
2426 * Enumerator#+.
2427 */
2428
2429 static void
enum_chain_mark(void * p)2430 enum_chain_mark(void *p)
2431 {
2432 struct enum_chain *ptr = p;
2433 rb_gc_mark(ptr->enums);
2434 }
2435
2436 #define enum_chain_free RUBY_TYPED_DEFAULT_FREE
2437
2438 static size_t
enum_chain_memsize(const void * p)2439 enum_chain_memsize(const void *p)
2440 {
2441 return sizeof(struct enum_chain);
2442 }
2443
2444 static const rb_data_type_t enum_chain_data_type = {
2445 "chain",
2446 {
2447 enum_chain_mark,
2448 enum_chain_free,
2449 enum_chain_memsize,
2450 },
2451 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
2452 };
2453
2454 static struct enum_chain *
enum_chain_ptr(VALUE obj)2455 enum_chain_ptr(VALUE obj)
2456 {
2457 struct enum_chain *ptr;
2458
2459 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
2460 if (!ptr || ptr->enums == Qundef) {
2461 rb_raise(rb_eArgError, "uninitialized chain");
2462 }
2463 return ptr;
2464 }
2465
2466 /* :nodoc: */
2467 static VALUE
enum_chain_allocate(VALUE klass)2468 enum_chain_allocate(VALUE klass)
2469 {
2470 struct enum_chain *ptr;
2471 VALUE obj;
2472
2473 obj = TypedData_Make_Struct(klass, struct enum_chain, &enum_chain_data_type, ptr);
2474 ptr->enums = Qundef;
2475 ptr->pos = -1;
2476
2477 return obj;
2478 }
2479
2480 /*
2481 * call-seq:
2482 * Enumerator::Chain.new(*enums) -> enum
2483 *
2484 * Generates a new enumerator object that iterates over the elements
2485 * of given enumerable objects in sequence.
2486 *
2487 * e = Enumerator::Chain.new(1..3, [4, 5])
2488 * e.to_a #=> [1, 2, 3, 4, 5]
2489 * e.size #=> 5
2490 */
2491 static VALUE
enum_chain_initialize(VALUE obj,VALUE enums)2492 enum_chain_initialize(VALUE obj, VALUE enums)
2493 {
2494 struct enum_chain *ptr;
2495
2496 rb_check_frozen(obj);
2497 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
2498
2499 if (!ptr) rb_raise(rb_eArgError, "unallocated chain");
2500
2501 ptr->enums = rb_obj_freeze(enums);
2502 ptr->pos = -1;
2503
2504 return obj;
2505 }
2506
2507 /* :nodoc: */
2508 static VALUE
enum_chain_init_copy(VALUE obj,VALUE orig)2509 enum_chain_init_copy(VALUE obj, VALUE orig)
2510 {
2511 struct enum_chain *ptr0, *ptr1;
2512
2513 if (!OBJ_INIT_COPY(obj, orig)) return obj;
2514 ptr0 = enum_chain_ptr(orig);
2515
2516 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr1);
2517
2518 if (!ptr1) rb_raise(rb_eArgError, "unallocated chain");
2519
2520 ptr1->enums = ptr0->enums;
2521 ptr1->pos = ptr0->pos;
2522
2523 return obj;
2524 }
2525
2526 static VALUE
enum_chain_total_size(VALUE enums)2527 enum_chain_total_size(VALUE enums)
2528 {
2529 VALUE total = INT2FIX(0);
2530 long i;
2531
2532 for (i = 0; i < RARRAY_LEN(enums); i++) {
2533 VALUE size = enum_size(RARRAY_AREF(enums, i));
2534
2535 if (NIL_P(size) || (RB_TYPE_P(size, T_FLOAT) && isinf(NUM2DBL(size)))) {
2536 return size;
2537 }
2538 if (!RB_INTEGER_TYPE_P(size)) {
2539 return Qnil;
2540 }
2541
2542 total = rb_funcall(total, '+', 1, size);
2543 }
2544
2545 return total;
2546 }
2547
2548 /*
2549 * call-seq:
2550 * obj.size -> int, Float::INFINITY or nil
2551 *
2552 * Returns the total size of the enumerator chain calculated by
2553 * summing up the size of each enumerable in the chain. If any of the
2554 * enumerables reports its size as nil or Float::INFINITY, that value
2555 * is returned as the total size.
2556 */
2557 static VALUE
enum_chain_size(VALUE obj)2558 enum_chain_size(VALUE obj)
2559 {
2560 return enum_chain_total_size(enum_chain_ptr(obj)->enums);
2561 }
2562
2563 static VALUE
enum_chain_enum_size(VALUE obj,VALUE args,VALUE eobj)2564 enum_chain_enum_size(VALUE obj, VALUE args, VALUE eobj)
2565 {
2566 return enum_chain_size(obj);
2567 }
2568
2569 static VALUE
enum_chain_yield_block(VALUE arg,VALUE block,int argc,VALUE * argv)2570 enum_chain_yield_block(VALUE arg, VALUE block, int argc, VALUE *argv)
2571 {
2572 return rb_funcallv(block, id_call, argc, argv);
2573 }
2574
2575 static VALUE
enum_chain_enum_no_size(VALUE obj,VALUE args,VALUE eobj)2576 enum_chain_enum_no_size(VALUE obj, VALUE args, VALUE eobj)
2577 {
2578 return Qnil;
2579 }
2580
2581 /*
2582 * call-seq:
2583 * obj.each(*args) { |...| ... } -> obj
2584 * obj.each(*args) -> enumerator
2585 *
2586 * Iterates over the elements of the first enumerable by calling the
2587 * "each" method on it with the given arguments, then proceeds to the
2588 * following enumerables in sequence until all of the enumerables are
2589 * exhausted.
2590 *
2591 * If no block is given, returns an enumerator.
2592 */
2593 static VALUE
enum_chain_each(int argc,VALUE * argv,VALUE obj)2594 enum_chain_each(int argc, VALUE *argv, VALUE obj)
2595 {
2596 VALUE enums, block;
2597 struct enum_chain *objptr;
2598 long i;
2599
2600 RETURN_SIZED_ENUMERATOR(obj, argc, argv, argc > 0 ? enum_chain_enum_no_size : enum_chain_enum_size);
2601
2602 objptr = enum_chain_ptr(obj);
2603 enums = objptr->enums;
2604 block = rb_block_proc();
2605
2606
2607 for (i = 0; i < RARRAY_LEN(enums); i++) {
2608 objptr->pos = i;
2609 rb_block_call(RARRAY_AREF(enums, i), id_each, argc, argv, enum_chain_yield_block, block);
2610 }
2611
2612 return obj;
2613 }
2614
2615 /*
2616 * call-seq:
2617 * obj.rewind -> obj
2618 *
2619 * Rewinds the enumerator chain by calling the "rewind" method on each
2620 * enumerable in reverse order. Each call is performed only if the
2621 * enumerable responds to the method.
2622 */
2623 static VALUE
enum_chain_rewind(VALUE obj)2624 enum_chain_rewind(VALUE obj)
2625 {
2626 struct enum_chain *objptr = enum_chain_ptr(obj);
2627 VALUE enums = objptr->enums;
2628 long i;
2629
2630 for (i = objptr->pos; 0 <= i && i < RARRAY_LEN(enums); objptr->pos = --i) {
2631 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
2632 }
2633
2634 return obj;
2635 }
2636
2637 static VALUE
inspect_enum_chain(VALUE obj,VALUE dummy,int recur)2638 inspect_enum_chain(VALUE obj, VALUE dummy, int recur)
2639 {
2640 VALUE klass = rb_obj_class(obj);
2641 struct enum_chain *ptr;
2642
2643 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
2644
2645 if (!ptr || ptr->enums == Qundef) {
2646 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
2647 }
2648
2649 if (recur) {
2650 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
2651 }
2652
2653 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
2654 }
2655
2656 /*
2657 * call-seq:
2658 * obj.inspect -> string
2659 *
2660 * Returns a printable version of the enumerator chain.
2661 */
2662 static VALUE
enum_chain_inspect(VALUE obj)2663 enum_chain_inspect(VALUE obj)
2664 {
2665 return rb_exec_recursive(inspect_enum_chain, obj, 0);
2666 }
2667
2668 /*
2669 * call-seq:
2670 * e.chain(*enums) -> enumerator
2671 *
2672 * Returns an enumerator object generated from this enumerator and
2673 * given enumerables.
2674 *
2675 * e = (1..3).chain([4, 5])
2676 * e.to_a #=> [1, 2, 3, 4, 5]
2677 */
2678 static VALUE
enum_chain(int argc,VALUE * argv,VALUE obj)2679 enum_chain(int argc, VALUE *argv, VALUE obj)
2680 {
2681 VALUE enums = rb_ary_new_from_values(1, &obj);
2682 rb_ary_cat(enums, argv, argc);
2683
2684 return enum_chain_initialize(enum_chain_allocate(rb_cEnumChain), enums);
2685 }
2686
2687 /*
2688 * call-seq:
2689 * e + enum -> enumerator
2690 *
2691 * Returns an enumerator object generated from this enumerator and a
2692 * given enumerable.
2693 *
2694 * e = (1..3).each + [4, 5]
2695 * e.to_a #=> [1, 2, 3, 4, 5]
2696 */
2697 static VALUE
enumerator_plus(VALUE obj,VALUE eobj)2698 enumerator_plus(VALUE obj, VALUE eobj)
2699 {
2700 VALUE enums = rb_ary_new_from_args(2, obj, eobj);
2701
2702 return enum_chain_initialize(enum_chain_allocate(rb_cEnumChain), enums);
2703 }
2704
2705 /*
2706 * Document-class: Enumerator::ArithmeticSequence
2707 *
2708 * Enumerator::ArithmeticSequence is a subclass of Enumerator,
2709 * that is a representation of sequences of numbers with common difference.
2710 * Instances of this class can be generated by the Range#step and Numeric#step
2711 * methods.
2712 */
2713
2714 VALUE
rb_arith_seq_new(VALUE obj,VALUE meth,int argc,VALUE const * argv,rb_enumerator_size_func * size_fn,VALUE beg,VALUE end,VALUE step,int excl)2715 rb_arith_seq_new(VALUE obj, VALUE meth, int argc, VALUE const *argv,
2716 rb_enumerator_size_func *size_fn,
2717 VALUE beg, VALUE end, VALUE step, int excl)
2718 {
2719 VALUE aseq = enumerator_init(enumerator_allocate(rb_cArithSeq),
2720 obj, meth, argc, argv, size_fn, Qnil);
2721 rb_ivar_set(aseq, id_begin, beg);
2722 rb_ivar_set(aseq, id_end, end);
2723 rb_ivar_set(aseq, id_step, step);
2724 rb_ivar_set(aseq, id_exclude_end, excl ? Qtrue : Qfalse);
2725 return aseq;
2726 }
2727
2728 /*
2729 * call-seq: aseq.begin -> num
2730 *
2731 * Returns the number that defines the first element of this arithmetic
2732 * sequence.
2733 */
2734 static inline VALUE
arith_seq_begin(VALUE self)2735 arith_seq_begin(VALUE self)
2736 {
2737 return rb_ivar_get(self, id_begin);
2738 }
2739
2740 /*
2741 * call-seq: aseq.end -> num or nil
2742 *
2743 * Returns the number that defines the end of this arithmetic sequence.
2744 */
2745 static inline VALUE
arith_seq_end(VALUE self)2746 arith_seq_end(VALUE self)
2747 {
2748 return rb_ivar_get(self, id_end);
2749 }
2750
2751 /*
2752 * call-seq: aseq.step -> num
2753 *
2754 * Returns the number that defines the common difference between
2755 * two adjacent elements in this arithmetic sequence.
2756 */
2757 static inline VALUE
arith_seq_step(VALUE self)2758 arith_seq_step(VALUE self)
2759 {
2760 return rb_ivar_get(self, id_step);
2761 }
2762
2763 /*
2764 * call-seq: aseq.exclude_end? -> true or false
2765 *
2766 * Returns <code>true</code> if this arithmetic sequence excludes its end value.
2767 */
2768 static inline VALUE
arith_seq_exclude_end(VALUE self)2769 arith_seq_exclude_end(VALUE self)
2770 {
2771 return rb_ivar_get(self, id_exclude_end);
2772 }
2773
2774 static inline int
arith_seq_exclude_end_p(VALUE self)2775 arith_seq_exclude_end_p(VALUE self)
2776 {
2777 return RTEST(arith_seq_exclude_end(self));
2778 }
2779
2780 int
rb_arithmetic_sequence_extract(VALUE obj,rb_arithmetic_sequence_components_t * component)2781 rb_arithmetic_sequence_extract(VALUE obj, rb_arithmetic_sequence_components_t *component)
2782 {
2783 if (rb_obj_is_kind_of(obj, rb_cArithSeq)) {
2784 component->begin = arith_seq_begin(obj);
2785 component->end = arith_seq_end(obj);
2786 component->step = arith_seq_step(obj);
2787 component->exclude_end = arith_seq_exclude_end_p(obj);
2788 return 1;
2789 }
2790 else if (rb_obj_is_kind_of(obj, rb_cRange)) {
2791 component->begin = RANGE_BEG(obj);
2792 component->end = RANGE_END(obj);
2793 component->step = INT2FIX(1);
2794 component->exclude_end = RTEST(RANGE_EXCL(obj));
2795 return 1;
2796 }
2797
2798 return 0;
2799 }
2800
2801 /*
2802 * call-seq:
2803 * aseq.first -> num or nil
2804 * aseq.first(n) -> an_array
2805 *
2806 * Returns the first number in this arithmetic sequence,
2807 * or an array of the first +n+ elements.
2808 */
2809 static VALUE
arith_seq_first(int argc,VALUE * argv,VALUE self)2810 arith_seq_first(int argc, VALUE *argv, VALUE self)
2811 {
2812 VALUE b, e, s, ary;
2813 long n;
2814 int x;
2815
2816 rb_check_arity(argc, 0, 1);
2817
2818 b = arith_seq_begin(self);
2819 e = arith_seq_end(self);
2820 s = arith_seq_step(self);
2821 if (argc == 0) {
2822 if (!NIL_P(e)) {
2823 VALUE zero = INT2FIX(0);
2824 int r = rb_cmpint(rb_num_coerce_cmp(s, zero, idCmp), s, zero);
2825 if (r > 0 && RTEST(rb_funcall(b, '>', 1, e))) {
2826 return Qnil;
2827 }
2828 if (r < 0 && RTEST(rb_funcall(b, '<', 1, e))) {
2829 return Qnil;
2830 }
2831 }
2832 return b;
2833 }
2834
2835 /* TODO: the following code should be extracted as arith_seq_take */
2836
2837 n = NUM2LONG(argv[0]);
2838 if (n < 0) {
2839 rb_raise(rb_eArgError, "attempt to take negative size");
2840 }
2841 if (n == 0) {
2842 return rb_ary_new_capa(0);
2843 }
2844
2845 x = arith_seq_exclude_end_p(self);
2846
2847 if (FIXNUM_P(b) && NIL_P(e) && FIXNUM_P(s)) {
2848 long i = FIX2LONG(b), unit = FIX2LONG(s);
2849 ary = rb_ary_new_capa(n);
2850 while (n > 0 && FIXABLE(i)) {
2851 rb_ary_push(ary, LONG2FIX(i));
2852 i += unit; /* FIXABLE + FIXABLE never overflow; */
2853 --n;
2854 }
2855 if (n > 0) {
2856 b = LONG2NUM(i);
2857 while (n > 0) {
2858 rb_ary_push(ary, b);
2859 b = rb_big_plus(b, s);
2860 --n;
2861 }
2862 }
2863 return ary;
2864 }
2865 else if (FIXNUM_P(b) && FIXNUM_P(e) && FIXNUM_P(s)) {
2866 long i = FIX2LONG(b);
2867 long end = FIX2LONG(e);
2868 long unit = FIX2LONG(s);
2869 long len;
2870
2871 if (unit >= 0) {
2872 if (!x) end += 1;
2873
2874 len = end - i;
2875 if (len < 0) len = 0;
2876 ary = rb_ary_new_capa((n < len) ? n : len);
2877 while (n > 0 && i < end) {
2878 rb_ary_push(ary, LONG2FIX(i));
2879 if (i + unit < i) break;
2880 i += unit;
2881 --n;
2882 }
2883 }
2884 else {
2885 if (!x) end -= 1;
2886
2887 len = i - end;
2888 if (len < 0) len = 0;
2889 ary = rb_ary_new_capa((n < len) ? n : len);
2890 while (n > 0 && i > end) {
2891 rb_ary_push(ary, LONG2FIX(i));
2892 if (i + unit > i) break;
2893 i += unit;
2894 --n;
2895 }
2896 }
2897 return ary;
2898 }
2899 else if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
2900 /* generate values like ruby_float_step */
2901
2902 double unit = NUM2DBL(s);
2903 double beg = NUM2DBL(b);
2904 double end = NIL_P(e) ? (unit < 0 ? -1 : 1)*HUGE_VAL : NUM2DBL(e);
2905 double len = ruby_float_step_size(beg, end, unit, x);
2906 long i;
2907
2908 if (n > len)
2909 n = (long)len;
2910
2911 if (isinf(unit)) {
2912 if (len > 0) {
2913 ary = rb_ary_new_capa(1);
2914 rb_ary_push(ary, DBL2NUM(beg));
2915 }
2916 else {
2917 ary = rb_ary_new_capa(0);
2918 }
2919 }
2920 else if (unit == 0) {
2921 VALUE val = DBL2NUM(beg);
2922 ary = rb_ary_new_capa(n);
2923 for (i = 0; i < len; ++i) {
2924 rb_ary_push(ary, val);
2925 }
2926 }
2927 else {
2928 ary = rb_ary_new_capa(n);
2929 for (i = 0; i < n; ++i) {
2930 double d = i*unit+beg;
2931 if (unit >= 0 ? end < d : d < end) d = end;
2932 rb_ary_push(ary, DBL2NUM(d));
2933 }
2934 }
2935
2936 return ary;
2937 }
2938
2939 return rb_call_super(argc, argv);
2940 }
2941
2942 static inline VALUE
num_plus(VALUE a,VALUE b)2943 num_plus(VALUE a, VALUE b)
2944 {
2945 if (RB_INTEGER_TYPE_P(a)) {
2946 return rb_int_plus(a, b);
2947 }
2948 else if (RB_FLOAT_TYPE_P(a)) {
2949 return rb_float_plus(a, b);
2950 }
2951 else if (RB_TYPE_P(a, T_RATIONAL)) {
2952 return rb_rational_plus(a, b);
2953 }
2954 else {
2955 return rb_funcallv(a, '+', 1, &b);
2956 }
2957 }
2958
2959 static inline VALUE
num_minus(VALUE a,VALUE b)2960 num_minus(VALUE a, VALUE b)
2961 {
2962 if (RB_INTEGER_TYPE_P(a)) {
2963 return rb_int_minus(a, b);
2964 }
2965 else if (RB_FLOAT_TYPE_P(a)) {
2966 return rb_float_minus(a, b);
2967 }
2968 else if (RB_TYPE_P(a, T_RATIONAL)) {
2969 return rb_rational_minus(a, b);
2970 }
2971 else {
2972 return rb_funcallv(a, '-', 1, &b);
2973 }
2974 }
2975
2976 static inline VALUE
num_mul(VALUE a,VALUE b)2977 num_mul(VALUE a, VALUE b)
2978 {
2979 if (RB_INTEGER_TYPE_P(a)) {
2980 return rb_int_mul(a, b);
2981 }
2982 else if (RB_FLOAT_TYPE_P(a)) {
2983 return rb_float_mul(a, b);
2984 }
2985 else if (RB_TYPE_P(a, T_RATIONAL)) {
2986 return rb_rational_mul(a, b);
2987 }
2988 else {
2989 return rb_funcallv(a, '*', 1, &b);
2990 }
2991 }
2992
2993 static inline VALUE
num_idiv(VALUE a,VALUE b)2994 num_idiv(VALUE a, VALUE b)
2995 {
2996 VALUE q;
2997 if (RB_INTEGER_TYPE_P(a)) {
2998 q = rb_int_idiv(a, b);
2999 }
3000 else if (RB_FLOAT_TYPE_P(a)) {
3001 q = rb_float_div(a, b);
3002 }
3003 else if (RB_TYPE_P(a, T_RATIONAL)) {
3004 q = rb_rational_div(a, b);
3005 }
3006 else {
3007 q = rb_funcallv(a, '/', 1, &b);
3008 }
3009
3010 if (RB_INTEGER_TYPE_P(q)) {
3011 return q;
3012 }
3013 else if (RB_FLOAT_TYPE_P(q)) {
3014 return rb_float_floor(q, 0);
3015 }
3016 else if (RB_TYPE_P(q, T_RATIONAL)) {
3017 return rb_rational_floor(q, 0);
3018 }
3019 else {
3020 return rb_funcall(q, rb_intern("floor"), 0);
3021 }
3022 }
3023
3024 /*
3025 * call-seq:
3026 * aseq.last -> num or nil
3027 * aseq.last(n) -> an_array
3028 *
3029 * Returns the last number in this arithmetic sequence,
3030 * or an array of the last +n+ elements.
3031 */
3032 static VALUE
arith_seq_last(int argc,VALUE * argv,VALUE self)3033 arith_seq_last(int argc, VALUE *argv, VALUE self)
3034 {
3035 VALUE b, e, s, len_1, len, last, nv, ary;
3036 int last_is_adjusted;
3037 long n;
3038
3039 e = arith_seq_end(self);
3040 if (NIL_P(e)) {
3041 rb_raise(rb_eRangeError,
3042 "cannot get the last element of endless arithmetic sequence");
3043 }
3044
3045 b = arith_seq_begin(self);
3046 s = arith_seq_step(self);
3047
3048 len_1 = num_idiv(num_minus(e, b), s);
3049 if (rb_num_negative_int_p(len_1)) {
3050 if (argc == 0) {
3051 return Qnil;
3052 }
3053 return rb_ary_new_capa(0);
3054 }
3055
3056 last = num_plus(b, num_mul(s, len_1));
3057 if ((last_is_adjusted = arith_seq_exclude_end_p(self) && rb_equal(last, e))) {
3058 last = num_minus(last, s);
3059 }
3060
3061 if (argc == 0) {
3062 return last;
3063 }
3064
3065 if (last_is_adjusted) {
3066 len = len_1;
3067 }
3068 else {
3069 len = rb_int_plus(len_1, INT2FIX(1));
3070 }
3071
3072 rb_scan_args(argc, argv, "1", &nv);
3073 if (!RB_INTEGER_TYPE_P(nv)) {
3074 nv = rb_to_int(nv);
3075 }
3076 if (RTEST(rb_int_gt(nv, len))) {
3077 nv = len;
3078 }
3079 n = NUM2LONG(nv);
3080 if (n < 0) {
3081 rb_raise(rb_eArgError, "negative array size");
3082 }
3083
3084 ary = rb_ary_new_capa(n);
3085 b = rb_int_minus(last, rb_int_mul(s, nv));
3086 while (n) {
3087 b = rb_int_plus(b, s);
3088 rb_ary_push(ary, b);
3089 --n;
3090 }
3091
3092 return ary;
3093 }
3094
3095 /*
3096 * call-seq:
3097 * aseq.inspect -> string
3098 *
3099 * Convert this arithmetic sequence to a printable form.
3100 */
3101 static VALUE
arith_seq_inspect(VALUE self)3102 arith_seq_inspect(VALUE self)
3103 {
3104 struct enumerator *e;
3105 VALUE eobj, str, eargs;
3106 int range_p;
3107
3108 TypedData_Get_Struct(self, struct enumerator, &enumerator_data_type, e);
3109
3110 eobj = rb_attr_get(self, id_receiver);
3111 if (NIL_P(eobj)) {
3112 eobj = e->obj;
3113 }
3114
3115 range_p = RTEST(rb_obj_is_kind_of(eobj, rb_cRange));
3116 str = rb_sprintf("(%s%"PRIsVALUE"%s.", range_p ? "(" : "", eobj, range_p ? ")" : "");
3117
3118 rb_str_buf_append(str, rb_id2str(e->meth));
3119
3120 eargs = rb_attr_get(eobj, id_arguments);
3121 if (NIL_P(eargs)) {
3122 eargs = e->args;
3123 }
3124 if (eargs != Qfalse) {
3125 long argc = RARRAY_LEN(eargs);
3126 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
3127
3128 if (argc > 0) {
3129 VALUE kwds = Qnil;
3130
3131 rb_str_buf_cat2(str, "(");
3132
3133 if (RB_TYPE_P(argv[argc-1], T_HASH)) {
3134 int all_key = TRUE;
3135 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
3136 if (all_key) kwds = argv[--argc];
3137 }
3138
3139 while (argc--) {
3140 VALUE arg = *argv++;
3141
3142 rb_str_append(str, rb_inspect(arg));
3143 rb_str_buf_cat2(str, ", ");
3144 OBJ_INFECT(str, arg);
3145 }
3146 if (!NIL_P(kwds)) {
3147 rb_hash_foreach(kwds, kwd_append, str);
3148 }
3149 rb_str_set_len(str, RSTRING_LEN(str)-2); /* drop the last ", " */
3150 rb_str_buf_cat2(str, ")");
3151 }
3152 }
3153
3154 rb_str_buf_cat2(str, ")");
3155
3156 return str;
3157 }
3158
3159 /*
3160 * call-seq:
3161 * aseq == obj -> true or false
3162 *
3163 * Returns <code>true</code> only if +obj+ is an Enumerator::ArithmeticSequence,
3164 * has equivalent begin, end, step, and exclude_end? settings.
3165 */
3166 static VALUE
arith_seq_eq(VALUE self,VALUE other)3167 arith_seq_eq(VALUE self, VALUE other)
3168 {
3169 if (!RTEST(rb_obj_is_kind_of(other, rb_cArithSeq))) {
3170 return Qfalse;
3171 }
3172
3173 if (!rb_equal(arith_seq_begin(self), arith_seq_begin(other))) {
3174 return Qfalse;
3175 }
3176
3177 if (!rb_equal(arith_seq_end(self), arith_seq_end(other))) {
3178 return Qfalse;
3179 }
3180
3181 if (!rb_equal(arith_seq_step(self), arith_seq_step(other))) {
3182 return Qfalse;
3183 }
3184
3185 if (arith_seq_exclude_end_p(self) != arith_seq_exclude_end_p(other)) {
3186 return Qfalse;
3187 }
3188
3189 return Qtrue;
3190 }
3191
3192 /*
3193 * call-seq:
3194 * aseq.hash -> integer
3195 *
3196 * Compute a hash-value for this arithmetic sequence.
3197 * Two arithmetic sequences with same begin, end, step, and exclude_end?
3198 * values will generate the same hash-value.
3199 *
3200 * See also Object#hash.
3201 */
3202 static VALUE
arith_seq_hash(VALUE self)3203 arith_seq_hash(VALUE self)
3204 {
3205 st_index_t hash;
3206 VALUE v;
3207
3208 hash = rb_hash_start(arith_seq_exclude_end_p(self));
3209 v = rb_hash(arith_seq_begin(self));
3210 hash = rb_hash_uint(hash, NUM2LONG(v));
3211 v = rb_hash(arith_seq_end(self));
3212 hash = rb_hash_uint(hash, NUM2LONG(v));
3213 v = rb_hash(arith_seq_step(self));
3214 hash = rb_hash_uint(hash, NUM2LONG(v));
3215 hash = rb_hash_end(hash);
3216
3217 return ST2FIX(hash);
3218 }
3219
3220 #define NUM_GE(x, y) RTEST(rb_num_coerce_relop((x), (y), idGE))
3221
3222 struct arith_seq_gen {
3223 VALUE current;
3224 VALUE end;
3225 VALUE step;
3226 int excl;
3227 };
3228
3229 /*
3230 * call-seq:
3231 * aseq.each {|i| block } -> aseq
3232 * aseq.each -> aseq
3233 */
3234 static VALUE
arith_seq_each(VALUE self)3235 arith_seq_each(VALUE self)
3236 {
3237 VALUE c, e, s, len_1, last;
3238 int x;
3239
3240 if (!rb_block_given_p()) return self;
3241
3242 c = arith_seq_begin(self);
3243 e = arith_seq_end(self);
3244 s = arith_seq_step(self);
3245 x = arith_seq_exclude_end_p(self);
3246
3247 if (!RB_TYPE_P(s, T_COMPLEX) && ruby_float_step(c, e, s, x, TRUE)) {
3248 return self;
3249 }
3250
3251 if (NIL_P(e)) {
3252 while (1) {
3253 rb_yield(c);
3254 c = rb_int_plus(c, s);
3255 }
3256
3257 return self;
3258 }
3259
3260 if (rb_equal(s, INT2FIX(0))) {
3261 while (1) {
3262 rb_yield(c);
3263 }
3264
3265 return self;
3266 }
3267
3268 len_1 = num_idiv(num_minus(e, c), s);
3269 last = num_plus(c, num_mul(s, len_1));
3270 if (x && rb_equal(last, e)) {
3271 last = num_minus(last, s);
3272 }
3273
3274 if (rb_num_negative_int_p(s)) {
3275 while (NUM_GE(c, last)) {
3276 rb_yield(c);
3277 c = num_plus(c, s);
3278 }
3279 }
3280 else {
3281 while (NUM_GE(last, c)) {
3282 rb_yield(c);
3283 c = num_plus(c, s);
3284 }
3285 }
3286
3287 return self;
3288 }
3289
3290 static double
arith_seq_float_step_size(double beg,double end,double step,int excl)3291 arith_seq_float_step_size(double beg, double end, double step, int excl)
3292 {
3293 double const epsilon = DBL_EPSILON;
3294 double n, err;
3295
3296 if (step == 0) {
3297 return HUGE_VAL;
3298 }
3299 n = (end - beg) / step;
3300 err = (fabs(beg) + fabs(end) + fabs(end - beg)) / fabs(step) * epsilon;
3301 if (isinf(step)) {
3302 return step > 0 ? beg <= end : beg >= end;
3303 }
3304 if (err > 0.5) err = 0.5;
3305 if (excl) {
3306 if (n <= 0) return 0;
3307 if (n < 1)
3308 n = 0;
3309 else
3310 n = floor(n - err);
3311 }
3312 else {
3313 if (n < 0) return 0;
3314 n = floor(n + err);
3315 }
3316 return n + 1;
3317 }
3318
3319 /*
3320 * call-seq:
3321 * aseq.size -> num or nil
3322 *
3323 * Returns the number of elements in this arithmetic sequence if it is a finite
3324 * sequence. Otherwise, returns <code>nil</code>.
3325 */
3326 static VALUE
arith_seq_size(VALUE self)3327 arith_seq_size(VALUE self)
3328 {
3329 VALUE b, e, s, len_1, len, last;
3330 int x;
3331
3332 b = arith_seq_begin(self);
3333 e = arith_seq_end(self);
3334 s = arith_seq_step(self);
3335 x = arith_seq_exclude_end_p(self);
3336
3337 if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
3338 double ee, n;
3339
3340 if (NIL_P(e)) {
3341 if (rb_num_negative_int_p(s)) {
3342 ee = -HUGE_VAL;
3343 }
3344 else {
3345 ee = HUGE_VAL;
3346 }
3347 }
3348 else {
3349 ee = NUM2DBL(e);
3350 }
3351
3352 n = arith_seq_float_step_size(NUM2DBL(b), ee, NUM2DBL(s), x);
3353 if (isinf(n)) return DBL2NUM(n);
3354 if (POSFIXABLE(n)) return LONG2FIX(n);
3355 return rb_dbl2big(n);
3356 }
3357
3358 if (NIL_P(e)) {
3359 return DBL2NUM(HUGE_VAL);
3360 }
3361
3362 if (!rb_obj_is_kind_of(s, rb_cNumeric)) {
3363 s = rb_to_int(s);
3364 }
3365
3366 if (rb_equal(s, INT2FIX(0))) {
3367 return DBL2NUM(HUGE_VAL);
3368 }
3369
3370 len_1 = rb_int_idiv(rb_int_minus(e, b), s);
3371 if (rb_num_negative_int_p(len_1)) {
3372 return INT2FIX(0);
3373 }
3374
3375 last = rb_int_plus(b, rb_int_mul(s, len_1));
3376 if (x && rb_equal(last, e)) {
3377 len = len_1;
3378 }
3379 else {
3380 len = rb_int_plus(len_1, INT2FIX(1));
3381 }
3382
3383 return len;
3384 }
3385
3386 void
InitVM_Enumerator(void)3387 InitVM_Enumerator(void)
3388 {
3389 rb_define_method(rb_mKernel, "to_enum", obj_to_enum, -1);
3390 rb_define_method(rb_mKernel, "enum_for", obj_to_enum, -1);
3391
3392 rb_cEnumerator = rb_define_class("Enumerator", rb_cObject);
3393 rb_include_module(rb_cEnumerator, rb_mEnumerable);
3394
3395 rb_define_alloc_func(rb_cEnumerator, enumerator_allocate);
3396 rb_define_method(rb_cEnumerator, "initialize", enumerator_initialize, -1);
3397 rb_define_method(rb_cEnumerator, "initialize_copy", enumerator_init_copy, 1);
3398 rb_define_method(rb_cEnumerator, "each", enumerator_each, -1);
3399 rb_define_method(rb_cEnumerator, "each_with_index", enumerator_each_with_index, 0);
3400 rb_define_method(rb_cEnumerator, "each_with_object", enumerator_with_object, 1);
3401 rb_define_method(rb_cEnumerator, "with_index", enumerator_with_index, -1);
3402 rb_define_method(rb_cEnumerator, "with_object", enumerator_with_object, 1);
3403 rb_define_method(rb_cEnumerator, "next_values", enumerator_next_values, 0);
3404 rb_define_method(rb_cEnumerator, "peek_values", enumerator_peek_values_m, 0);
3405 rb_define_method(rb_cEnumerator, "next", enumerator_next, 0);
3406 rb_define_method(rb_cEnumerator, "peek", enumerator_peek, 0);
3407 rb_define_method(rb_cEnumerator, "feed", enumerator_feed, 1);
3408 rb_define_method(rb_cEnumerator, "rewind", enumerator_rewind, 0);
3409 rb_define_method(rb_cEnumerator, "inspect", enumerator_inspect, 0);
3410 rb_define_method(rb_cEnumerator, "size", enumerator_size, 0);
3411 rb_define_method(rb_cEnumerator, "+", enumerator_plus, 1);
3412 rb_define_method(rb_mEnumerable, "chain", enum_chain, -1);
3413
3414 /* Lazy */
3415 rb_cLazy = rb_define_class_under(rb_cEnumerator, "Lazy", rb_cEnumerator);
3416 rb_define_method(rb_mEnumerable, "lazy", enumerable_lazy, 0);
3417 rb_define_method(rb_cLazy, "initialize", lazy_initialize, -1);
3418 rb_define_method(rb_cLazy, "to_enum", lazy_to_enum, -1);
3419 rb_define_method(rb_cLazy, "enum_for", lazy_to_enum, -1);
3420 rb_define_method(rb_cLazy, "map", lazy_map, 0);
3421 rb_define_method(rb_cLazy, "collect", lazy_map, 0);
3422 rb_define_method(rb_cLazy, "flat_map", lazy_flat_map, 0);
3423 rb_define_method(rb_cLazy, "collect_concat", lazy_flat_map, 0);
3424 rb_define_method(rb_cLazy, "select", lazy_select, 0);
3425 rb_define_method(rb_cLazy, "find_all", lazy_select, 0);
3426 rb_define_method(rb_cLazy, "filter", lazy_select, 0);
3427 rb_define_method(rb_cLazy, "reject", lazy_reject, 0);
3428 rb_define_method(rb_cLazy, "grep", lazy_grep, 1);
3429 rb_define_method(rb_cLazy, "grep_v", lazy_grep_v, 1);
3430 rb_define_method(rb_cLazy, "zip", lazy_zip, -1);
3431 rb_define_method(rb_cLazy, "take", lazy_take, 1);
3432 rb_define_method(rb_cLazy, "take_while", lazy_take_while, 0);
3433 rb_define_method(rb_cLazy, "drop", lazy_drop, 1);
3434 rb_define_method(rb_cLazy, "drop_while", lazy_drop_while, 0);
3435 rb_define_method(rb_cLazy, "lazy", lazy_lazy, 0);
3436 rb_define_method(rb_cLazy, "chunk", lazy_super, -1);
3437 rb_define_method(rb_cLazy, "slice_before", lazy_super, -1);
3438 rb_define_method(rb_cLazy, "slice_after", lazy_super, -1);
3439 rb_define_method(rb_cLazy, "slice_when", lazy_super, -1);
3440 rb_define_method(rb_cLazy, "chunk_while", lazy_super, -1);
3441 rb_define_method(rb_cLazy, "uniq", lazy_uniq, 0);
3442
3443 #if 0 /* for RDoc */
3444 rb_define_method(rb_cLazy, "to_a", lazy_to_a, 0);
3445 #endif
3446 rb_define_alias(rb_cLazy, "force", "to_a");
3447
3448 rb_eStopIteration = rb_define_class("StopIteration", rb_eIndexError);
3449 rb_define_method(rb_eStopIteration, "result", stop_result, 0);
3450
3451 /* Generator */
3452 rb_cGenerator = rb_define_class_under(rb_cEnumerator, "Generator", rb_cObject);
3453 rb_include_module(rb_cGenerator, rb_mEnumerable);
3454 rb_define_alloc_func(rb_cGenerator, generator_allocate);
3455 rb_define_method(rb_cGenerator, "initialize", generator_initialize, -1);
3456 rb_define_method(rb_cGenerator, "initialize_copy", generator_init_copy, 1);
3457 rb_define_method(rb_cGenerator, "each", generator_each, -1);
3458
3459 /* Yielder */
3460 rb_cYielder = rb_define_class_under(rb_cEnumerator, "Yielder", rb_cObject);
3461 rb_define_alloc_func(rb_cYielder, yielder_allocate);
3462 rb_define_method(rb_cYielder, "initialize", yielder_initialize, 0);
3463 rb_define_method(rb_cYielder, "yield", yielder_yield, -2);
3464 rb_define_method(rb_cYielder, "<<", yielder_yield_push, 1);
3465
3466 /* Chain */
3467 rb_cEnumChain = rb_define_class_under(rb_cEnumerator, "Chain", rb_cEnumerator);
3468 rb_define_alloc_func(rb_cEnumChain, enum_chain_allocate);
3469 rb_define_method(rb_cEnumChain, "initialize", enum_chain_initialize, -2);
3470 rb_define_method(rb_cEnumChain, "initialize_copy", enum_chain_init_copy, 1);
3471 rb_define_method(rb_cEnumChain, "each", enum_chain_each, -1);
3472 rb_define_method(rb_cEnumChain, "size", enum_chain_size, 0);
3473 rb_define_method(rb_cEnumChain, "rewind", enum_chain_rewind, 0);
3474 rb_define_method(rb_cEnumChain, "inspect", enum_chain_inspect, 0);
3475
3476 /* ArithmeticSequence */
3477 rb_cArithSeq = rb_define_class_under(rb_cEnumerator, "ArithmeticSequence", rb_cEnumerator);
3478 rb_undef_alloc_func(rb_cArithSeq);
3479 rb_undef_method(CLASS_OF(rb_cArithSeq), "new");
3480 rb_define_method(rb_cArithSeq, "begin", arith_seq_begin, 0);
3481 rb_define_method(rb_cArithSeq, "end", arith_seq_end, 0);
3482 rb_define_method(rb_cArithSeq, "exclude_end?", arith_seq_exclude_end, 0);
3483 rb_define_method(rb_cArithSeq, "step", arith_seq_step, 0);
3484 rb_define_method(rb_cArithSeq, "first", arith_seq_first, -1);
3485 rb_define_method(rb_cArithSeq, "last", arith_seq_last, -1);
3486 rb_define_method(rb_cArithSeq, "inspect", arith_seq_inspect, 0);
3487 rb_define_method(rb_cArithSeq, "==", arith_seq_eq, 1);
3488 rb_define_method(rb_cArithSeq, "===", arith_seq_eq, 1);
3489 rb_define_method(rb_cArithSeq, "eql?", arith_seq_eq, 1);
3490 rb_define_method(rb_cArithSeq, "hash", arith_seq_hash, 0);
3491 rb_define_method(rb_cArithSeq, "each", arith_seq_each, 0);
3492 rb_define_method(rb_cArithSeq, "size", arith_seq_size, 0);
3493
3494 rb_provide("enumerator.so"); /* for backward compatibility */
3495 }
3496
3497 #undef rb_intern
3498 void
Init_Enumerator(void)3499 Init_Enumerator(void)
3500 {
3501 id_rewind = rb_intern("rewind");
3502 id_new = rb_intern("new");
3503 id_next = rb_intern("next");
3504 id_result = rb_intern("result");
3505 id_receiver = rb_intern("receiver");
3506 id_arguments = rb_intern("arguments");
3507 id_memo = rb_intern("memo");
3508 id_method = rb_intern("method");
3509 id_force = rb_intern("force");
3510 id_to_enum = rb_intern("to_enum");
3511 id_begin = rb_intern("begin");
3512 id_end = rb_intern("end");
3513 id_step = rb_intern("step");
3514 id_exclude_end = rb_intern("exclude_end");
3515 sym_each = ID2SYM(id_each);
3516 sym_cycle = ID2SYM(rb_intern("cycle"));
3517
3518 InitVM(Enumerator);
3519 }
3520