1 /*
2 * %CopyrightBegin%
3 *
4 * Copyright Ericsson AB 1999-2020. All Rights Reserved.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * %CopyrightEnd%
19 */
20
21 /*
22 * BIFs logically belonging to the lists module.
23 */
24
25 #ifdef HAVE_CONFIG_H
26 # include "config.h"
27 #endif
28
29 #include "sys.h"
30 #include "erl_vm.h"
31 #include "global.h"
32 #include "bif.h"
33 #include "erl_binary.h"
34
35
36 static Eterm keyfind(int Bif, Process* p, Eterm Key, Eterm Pos, Eterm List);
37
38 /* erlang:'++'/2
39 *
40 * Adds a list to another (LHS ++ RHS). For historical reasons this is
41 * implemented by copying LHS and setting its tail to RHS without checking
42 * that RHS is a proper list. [] ++ 'not_a_list' will therefore result in
43 * 'not_a_list', and [1,2] ++ 3 will result in [1,2|3], and this is a bug that
44 * we have to live with. */
45
46 typedef struct {
47 Eterm lhs_original;
48 Eterm rhs_original;
49
50 Eterm iterator;
51
52 Eterm result;
53 Eterm *result_cdr;
54 } ErtsAppendContext;
55
append_ctx_bin_dtor(Binary * context_bin)56 static int append_ctx_bin_dtor(Binary *context_bin) {
57 return 1;
58 }
59
append_create_trap_state(Process * p,ErtsAppendContext * from_context)60 static Eterm append_create_trap_state(Process *p,
61 ErtsAppendContext *from_context) {
62 ErtsAppendContext *to_context;
63 Binary *state_bin;
64 Eterm *hp;
65
66 state_bin = erts_create_magic_binary(sizeof(ErtsAppendContext),
67 append_ctx_bin_dtor);
68
69 to_context = ERTS_MAGIC_BIN_DATA(state_bin);
70 *to_context = *from_context;
71
72 if (from_context->result_cdr == &from_context->result) {
73 to_context->result_cdr = &to_context->result;
74 }
75
76 hp = HAlloc(p, ERTS_MAGIC_REF_THING_SIZE);
77 return erts_mk_magic_ref(&hp, &MSO(p), state_bin);
78 }
79
lists_append_alloc(Process * p,ErtsAppendContext * context)80 static BIF_RETTYPE lists_append_alloc(Process *p, ErtsAppendContext *context) {
81 static const Uint CELLS_PER_RED = 40;
82
83 Eterm *alloc_top, *alloc_end;
84 Uint cells_left, max_cells;
85 Eterm lookahead;
86
87 cells_left = max_cells = CELLS_PER_RED * ERTS_BIF_REDS_LEFT(p);
88 lookahead = context->iterator;
89
90 #ifdef DEBUG
91 cells_left = max_cells = max_cells / 10 + 1;
92 #endif
93
94 while (cells_left != 0 && is_list(lookahead)) {
95 lookahead = CDR(list_val(lookahead));
96 cells_left--;
97 }
98
99 BUMP_REDS(p, (max_cells - cells_left) / CELLS_PER_RED);
100
101 if (is_not_list(lookahead) && is_not_nil(lookahead)) {
102 /* It's possible that we're erroring out with an incomplete list, so it
103 * must be terminated or we'll leave a hole in the heap. */
104 *context->result_cdr = NIL;
105 return -1;
106 }
107
108 alloc_top = HAlloc(p, 2 * (max_cells - cells_left));
109 alloc_end = alloc_top + 2 * (max_cells - cells_left);
110
111 while (alloc_top < alloc_end) {
112 Eterm *cell = list_val(context->iterator);
113
114 ASSERT(context->iterator != lookahead);
115
116 *context->result_cdr = make_list(alloc_top);
117 context->result_cdr = &CDR(alloc_top);
118 CAR(alloc_top) = CAR(cell);
119
120 context->iterator = CDR(cell);
121 alloc_top += 2;
122 }
123
124 if (is_list(context->iterator)) {
125 /* The result only has to be terminated when returning it to the user,
126 * but we're doing it when trapping as well to prevent headaches when
127 * debugging. */
128 *context->result_cdr = NIL;
129 ASSERT(cells_left == 0);
130 return 0;
131 }
132
133 *context->result_cdr = context->rhs_original;
134 ASSERT(is_nil(context->iterator));
135
136 if (is_nil(context->rhs_original)) {
137 /* The list we created was equal to the original, so we'll return that
138 * in the hopes that the garbage we created can be removed soon. */
139 context->result = context->lhs_original;
140 }
141
142 return 1;
143 }
144
lists_append_onheap(Process * p,ErtsAppendContext * context)145 static BIF_RETTYPE lists_append_onheap(Process *p, ErtsAppendContext *context) {
146 static const Uint CELLS_PER_RED = 60;
147
148 Eterm *alloc_start, *alloc_top, *alloc_end;
149 Uint cells_left, max_cells;
150
151 cells_left = max_cells = CELLS_PER_RED * ERTS_BIF_REDS_LEFT(p);
152
153 #ifdef DEBUG
154 cells_left = max_cells = max_cells / 10 + 1;
155 #endif
156
157 ASSERT(HEAP_LIMIT(p) >= HEAP_TOP(p) + 2);
158 alloc_start = HEAP_TOP(p);
159 alloc_end = HEAP_LIMIT(p) - 2;
160 alloc_top = alloc_start;
161
162 /* Don't process more cells than we have reductions for. */
163 alloc_end = MIN(alloc_top + (cells_left * 2), alloc_end);
164
165 while (alloc_top < alloc_end && is_list(context->iterator)) {
166 Eterm *cell = list_val(context->iterator);
167
168 *context->result_cdr = make_list(alloc_top);
169 context->result_cdr = &CDR(alloc_top);
170 CAR(alloc_top) = CAR(cell);
171
172 context->iterator = CDR(cell);
173 alloc_top += 2;
174 }
175
176 cells_left -= (alloc_top - alloc_start) / 2;
177 HEAP_TOP(p) = alloc_top;
178
179 ASSERT(cells_left >= 0 && cells_left <= max_cells);
180 BUMP_REDS(p, (max_cells - cells_left) / CELLS_PER_RED);
181
182 if (is_not_list(context->iterator) && is_not_nil(context->iterator)) {
183 *context->result_cdr = NIL;
184 return -1;
185 }
186
187 if (is_list(context->iterator)) {
188 if (cells_left > CELLS_PER_RED) {
189 return lists_append_alloc(p, context);
190 }
191
192 *context->result_cdr = NIL;
193 return 0;
194 }
195
196 *context->result_cdr = context->rhs_original;
197 ASSERT(is_nil(context->iterator));
198
199 if (is_nil(context->rhs_original)) {
200 context->result = context->lhs_original;
201 }
202
203 return 1;
204 }
205
append_continue(Process * p,ErtsAppendContext * context)206 static int append_continue(Process *p, ErtsAppendContext *context) {
207 /* We build the result on the unused part of the heap if possible to save
208 * us the trouble of having to figure out the list size. We fall back to
209 * lists_append_alloc when we run out of space. */
210 if (HeapWordsLeft(p) > 8) {
211 return lists_append_onheap(p, context);
212 }
213
214 return lists_append_alloc(p, context);
215 }
216
append_start(Process * p,Eterm lhs,Eterm rhs,ErtsAppendContext * context)217 static int append_start(Process *p, Eterm lhs, Eterm rhs,
218 ErtsAppendContext *context) {
219 context->lhs_original = lhs;
220 context->rhs_original = rhs;
221
222 context->result_cdr = &context->result;
223 context->result = NIL;
224
225 context->iterator = lhs;
226
227 return append_continue(p, context);
228 }
229
230 /* erlang:'++'/2 */
append(Export * bif_entry,BIF_ALIST_2)231 static Eterm append(Export *bif_entry, BIF_ALIST_2) {
232 Eterm lhs = BIF_ARG_1, rhs = BIF_ARG_2;
233
234 if (is_nil(lhs)) {
235 /* This is buggy but expected, `[] ++ 'not_a_list'` has always resulted
236 * in 'not_a_list'. */
237 return rhs;
238 } else if (is_list(lhs)) {
239 /* We start with the context on the stack in the hopes that we won't
240 * have to trap. */
241 ErtsAppendContext context;
242 int res;
243
244 res = append_start(BIF_P, lhs, rhs, &context);
245
246 if (res == 0) {
247 Eterm state_mref;
248
249 state_mref = append_create_trap_state(BIF_P, &context);
250 erts_set_gc_state(BIF_P, 0);
251
252 BIF_TRAP2(bif_entry, BIF_P, state_mref, NIL);
253 }
254
255 if (res < 0) {
256 ASSERT(is_nil(*context.result_cdr));
257 BIF_ERROR(BIF_P, BADARG);
258 }
259
260 ASSERT(*context.result_cdr == context.rhs_original);
261 BIF_RET(context.result);
262 } else if (is_internal_magic_ref(lhs)) {
263 ErtsAppendContext *context;
264 int (*dtor)(Binary*);
265 Binary *magic_bin;
266
267 int res;
268
269 magic_bin = erts_magic_ref2bin(lhs);
270 dtor = ERTS_MAGIC_BIN_DESTRUCTOR(magic_bin);
271
272 if (dtor != append_ctx_bin_dtor) {
273 BIF_ERROR(BIF_P, BADARG);
274 }
275
276 ASSERT(BIF_P->flags & F_DISABLE_GC);
277 ASSERT(rhs == NIL);
278
279 context = ERTS_MAGIC_BIN_DATA(magic_bin);
280 res = append_continue(BIF_P, context);
281
282 if (res == 0) {
283 BIF_TRAP2(bif_entry, BIF_P, lhs, NIL);
284 }
285
286 erts_set_gc_state(BIF_P, 1);
287
288 if (res < 0) {
289 ASSERT(is_nil(*context->result_cdr));
290 ERTS_BIF_ERROR_TRAPPED2(BIF_P, BADARG, bif_entry,
291 context->lhs_original,
292 context->rhs_original);
293 }
294
295 ASSERT(*context->result_cdr == context->rhs_original);
296 BIF_RET(context->result);
297 }
298
299 ASSERT(!(BIF_P->flags & F_DISABLE_GC));
300
301 BIF_ERROR(BIF_P, BADARG);
302 }
303
304 /*
305 * erlang:'++'/2
306 */
307
308 Eterm
ebif_plusplus_2(BIF_ALIST_2)309 ebif_plusplus_2(BIF_ALIST_2)
310 {
311 return append(bif_export[BIF_ebif_plusplus_2], BIF_CALL_ARGS);
312 }
313
append_2(BIF_ALIST_2)314 BIF_RETTYPE append_2(BIF_ALIST_2)
315 {
316 return append(bif_export[BIF_append_2], BIF_CALL_ARGS);
317 }
318
319 /* erlang:'--'/2
320 *
321 * Subtracts a list from another (LHS -- RHS), removing the first occurrence of
322 * each element in LHS from RHS. There is no type coercion so the elements must
323 * match exactly.
324 *
325 * The BIF is broken into several stages that can all trap individually, and it
326 * chooses its algorithm based on input size. If either input is small it will
327 * use a linear scan tuned to which side it's on, and if both inputs are large
328 * enough it will convert RHS into a multiset to provide good asymptotic
329 * behavior. */
330
331 #define SUBTRACT_LHS_THRESHOLD 16
332 #define SUBTRACT_RHS_THRESHOLD 16
333
334 typedef enum {
335 SUBTRACT_STAGE_START,
336 SUBTRACT_STAGE_LEN_LHS,
337
338 /* Naive linear scan that's efficient when
339 * LEN_LHS <= SUBTRACT_LHS_THRESHOLD. */
340 SUBTRACT_STAGE_NAIVE_LHS,
341
342 SUBTRACT_STAGE_LEN_RHS,
343
344 /* As SUBTRACT_STAGE_NAIVE_LHS but for RHS. */
345 SUBTRACT_STAGE_NAIVE_RHS,
346
347 /* Creates a multiset from RHS for faster lookups before sweeping through
348 * LHS. The set is implemented as a red-black tree and duplicate elements
349 * are handled by a counter on each node. */
350 SUBTRACT_STAGE_SET_BUILD,
351 SUBTRACT_STAGE_SET_FINISH
352 } ErtsSubtractCtxStage;
353
354 typedef struct subtract_node__ {
355 struct subtract_node__ *parent;
356 struct subtract_node__ *left;
357 struct subtract_node__ *right;
358 int is_red;
359
360 Eterm key;
361 Uint count;
362 } subtract_tree_t;
363
364 typedef struct {
365 ErtsSubtractCtxStage stage;
366
367 Eterm lhs_original;
368 Eterm rhs_original;
369
370 Uint lhs_remaining;
371 Uint rhs_remaining;
372
373 Eterm iterator;
374
375 Eterm *result_cdr;
376 Eterm result;
377
378 union {
379 Eterm lhs_elements[SUBTRACT_LHS_THRESHOLD];
380 Eterm rhs_elements[SUBTRACT_RHS_THRESHOLD];
381
382 struct {
383 subtract_tree_t *tree;
384
385 /* A memory area for the tree's nodes, saving us the need to have
386 * one allocation per node. */
387 subtract_tree_t *alloc_start;
388 subtract_tree_t *alloc;
389 } rhs_set;
390 } u;
391 } ErtsSubtractContext;
392
393 #define ERTS_RBT_PREFIX subtract
394 #define ERTS_RBT_T subtract_tree_t
395 #define ERTS_RBT_KEY_T Eterm
396 #define ERTS_RBT_FLAGS_T int
397 #define ERTS_RBT_INIT_EMPTY_TNODE(T) \
398 do { \
399 (T)->parent = NULL; \
400 (T)->left = NULL; \
401 (T)->right = NULL; \
402 } while(0)
403 #define ERTS_RBT_IS_RED(T) ((T)->is_red)
404 #define ERTS_RBT_SET_RED(T) ((T)->is_red = 1)
405 #define ERTS_RBT_IS_BLACK(T) (!ERTS_RBT_IS_RED(T))
406 #define ERTS_RBT_SET_BLACK(T) ((T)->is_red = 0)
407 #define ERTS_RBT_GET_FLAGS(T) ((T)->is_red)
408 #define ERTS_RBT_SET_FLAGS(T, F) ((T)->is_red = F)
409 #define ERTS_RBT_GET_PARENT(T) ((T)->parent)
410 #define ERTS_RBT_SET_PARENT(T, P) ((T)->parent = P)
411 #define ERTS_RBT_GET_RIGHT(T) ((T)->right)
412 #define ERTS_RBT_SET_RIGHT(T, R) ((T)->right = (R))
413 #define ERTS_RBT_GET_LEFT(T) ((T)->left)
414 #define ERTS_RBT_SET_LEFT(T, L) ((T)->left = (L))
415 #define ERTS_RBT_GET_KEY(T) ((T)->key)
416 #define ERTS_RBT_CMP_KEYS(KX, KY) subtract_term_cmp((KX), (KY))
417 #define ERTS_RBT_WANT_LOOKUP_INSERT
418 #define ERTS_RBT_WANT_LOOKUP
419 #define ERTS_RBT_WANT_DELETE
420 #define ERTS_RBT_UNDEF
421
422 /* erl_rbtree expects comparisons to return an int */
subtract_term_cmp(Eterm a,Eterm b)423 static int subtract_term_cmp(Eterm a, Eterm b) {
424 Sint res = CMP_TERM(a, b);
425
426 if (res < 0) {
427 return -1;
428 } else if (res > 0) {
429 return 1;
430 }
431
432 return 0;
433 }
434
435 #include "erl_rbtree.h"
436
437 static int subtract_continue(Process *p, ErtsSubtractContext *context);
438
subtract_ctx_dtor(ErtsSubtractContext * context)439 static void subtract_ctx_dtor(ErtsSubtractContext *context) {
440 switch (context->stage) {
441 case SUBTRACT_STAGE_SET_BUILD:
442 case SUBTRACT_STAGE_SET_FINISH:
443 erts_free(ERTS_ALC_T_LIST_TRAP, context->u.rhs_set.alloc_start);
444 break;
445 default:
446 break;
447 }
448 }
449
subtract_ctx_bin_dtor(Binary * context_bin)450 static int subtract_ctx_bin_dtor(Binary *context_bin) {
451 ErtsSubtractContext *context = ERTS_MAGIC_BIN_DATA(context_bin);
452 subtract_ctx_dtor(context);
453 return 1;
454 }
455
subtract_ctx_move(ErtsSubtractContext * from,ErtsSubtractContext * to)456 static void subtract_ctx_move(ErtsSubtractContext *from,
457 ErtsSubtractContext *to) {
458 int uses_result_cdr = 0;
459
460 to->stage = from->stage;
461
462 to->lhs_original = from->lhs_original;
463 to->rhs_original = from->rhs_original;
464
465 to->lhs_remaining = from->lhs_remaining;
466 to->rhs_remaining = from->rhs_remaining;
467
468 to->iterator = from->iterator;
469 to->result = from->result;
470
471 switch (to->stage) {
472 case SUBTRACT_STAGE_NAIVE_LHS:
473 sys_memcpy(to->u.lhs_elements,
474 from->u.lhs_elements,
475 sizeof(Eterm) * to->lhs_remaining);
476 break;
477 case SUBTRACT_STAGE_NAIVE_RHS:
478 sys_memcpy(to->u.rhs_elements,
479 from->u.rhs_elements,
480 sizeof(Eterm) * to->rhs_remaining);
481
482 uses_result_cdr = 1;
483 break;
484 case SUBTRACT_STAGE_SET_FINISH:
485 uses_result_cdr = 1;
486 /* FALL THROUGH */
487 case SUBTRACT_STAGE_SET_BUILD:
488 to->u.rhs_set.alloc_start = from->u.rhs_set.alloc_start;
489 to->u.rhs_set.alloc = from->u.rhs_set.alloc;
490 to->u.rhs_set.tree = from->u.rhs_set.tree;
491 break;
492 default:
493 break;
494 }
495
496 if (uses_result_cdr) {
497 if (from->result_cdr == &from->result) {
498 to->result_cdr = &to->result;
499 } else {
500 to->result_cdr = from->result_cdr;
501 }
502 }
503 }
504
subtract_create_trap_state(Process * p,ErtsSubtractContext * context)505 static Eterm subtract_create_trap_state(Process *p,
506 ErtsSubtractContext *context) {
507 Binary *state_bin;
508 Eterm *hp;
509
510 state_bin = erts_create_magic_binary(sizeof(ErtsSubtractContext),
511 subtract_ctx_bin_dtor);
512
513 subtract_ctx_move(context, ERTS_MAGIC_BIN_DATA(state_bin));
514
515 hp = HAlloc(p, ERTS_MAGIC_REF_THING_SIZE);
516
517 return erts_mk_magic_ref(&hp, &MSO(p), state_bin);
518 }
519
subtract_enter_len_lhs(Process * p,ErtsSubtractContext * context)520 static int subtract_enter_len_lhs(Process *p, ErtsSubtractContext *context) {
521 context->stage = SUBTRACT_STAGE_LEN_LHS;
522
523 context->iterator = context->lhs_original;
524 context->lhs_remaining = 0;
525
526 return subtract_continue(p, context);
527 }
528
subtract_enter_len_rhs(Process * p,ErtsSubtractContext * context)529 static int subtract_enter_len_rhs(Process *p, ErtsSubtractContext *context) {
530 context->stage = SUBTRACT_STAGE_LEN_RHS;
531
532 context->iterator = context->rhs_original;
533 context->rhs_remaining = 0;
534
535 return subtract_continue(p, context);
536 }
537
subtract_get_length(Process * p,Eterm * iterator_p,Uint * count_p)538 static int subtract_get_length(Process *p, Eterm *iterator_p, Uint *count_p) {
539 static const Sint ELEMENTS_PER_RED = 32;
540
541 Sint budget, count;
542 Eterm iterator;
543
544 budget = ELEMENTS_PER_RED * ERTS_BIF_REDS_LEFT(p);
545 iterator = *iterator_p;
546
547 #ifdef DEBUG
548 budget = budget / 10 + 1;
549 #endif
550
551 for (count = 0; count < budget && is_list(iterator); count++) {
552 iterator = CDR(list_val(iterator));
553 }
554
555 if (!is_list(iterator) && !is_nil(iterator)) {
556 return -1;
557 }
558
559 BUMP_REDS(p, count / ELEMENTS_PER_RED);
560
561 *iterator_p = iterator;
562 *count_p += count;
563
564 if (is_nil(iterator)) {
565 return 1;
566 }
567
568 return 0;
569 }
570
subtract_enter_naive_lhs(Process * p,ErtsSubtractContext * context)571 static int subtract_enter_naive_lhs(Process *p, ErtsSubtractContext *context) {
572 Eterm iterator;
573 int i = 0;
574
575 context->stage = SUBTRACT_STAGE_NAIVE_LHS;
576
577 context->iterator = context->rhs_original;
578 context->result = NIL;
579
580 iterator = context->lhs_original;
581
582 while (is_list(iterator)) {
583 const Eterm *cell = list_val(iterator);
584
585 ASSERT(i < SUBTRACT_LHS_THRESHOLD);
586
587 context->u.lhs_elements[i++] = CAR(cell);
588 iterator = CDR(cell);
589 }
590
591 ASSERT(i == context->lhs_remaining);
592
593 return subtract_continue(p, context);
594 }
595
subtract_naive_lhs(Process * p,ErtsSubtractContext * context)596 static int subtract_naive_lhs(Process *p, ErtsSubtractContext *context) {
597 const Sint CHECKS_PER_RED = 16;
598 Sint checks, budget;
599
600 budget = CHECKS_PER_RED * ERTS_BIF_REDS_LEFT(p);
601 checks = 0;
602
603 while (checks < budget && is_list(context->iterator)) {
604 const Eterm *cell;
605 Eterm value, next;
606 int found_at;
607
608 cell = list_val(context->iterator);
609
610 value = CAR(cell);
611 next = CDR(cell);
612
613 for (found_at = 0; found_at < context->lhs_remaining; found_at++) {
614 if (EQ(value, context->u.lhs_elements[found_at])) {
615 /* We shift the array one step down as we have to preserve
616 * order.
617 *
618 * Note that we can't exit early as that would suppress errors
619 * in the right-hand side (this runs prior to determining the
620 * length of RHS). */
621
622 context->lhs_remaining--;
623 sys_memmove(&context->u.lhs_elements[found_at],
624 &context->u.lhs_elements[found_at + 1],
625 (context->lhs_remaining - found_at) * sizeof(Eterm));
626 break;
627 }
628 }
629
630 checks += MAX(1, context->lhs_remaining);
631 context->iterator = next;
632 }
633
634 BUMP_REDS(p, MIN(checks, budget) / CHECKS_PER_RED);
635
636 if (is_list(context->iterator)) {
637 return 0;
638 } else if (!is_nil(context->iterator)) {
639 return -1;
640 }
641
642 if (context->lhs_remaining > 0) {
643 Eterm *hp;
644 int i;
645
646 hp = HAlloc(p, context->lhs_remaining * 2);
647
648 for (i = context->lhs_remaining - 1; i >= 0; i--) {
649 Eterm value = context->u.lhs_elements[i];
650
651 context->result = CONS(hp, value, context->result);
652 hp += 2;
653 }
654 }
655
656 ASSERT(context->lhs_remaining > 0 || context->result == NIL);
657
658 return 1;
659 }
660
subtract_enter_naive_rhs(Process * p,ErtsSubtractContext * context)661 static int subtract_enter_naive_rhs(Process *p, ErtsSubtractContext *context) {
662 Eterm iterator;
663 int i = 0;
664
665 context->stage = SUBTRACT_STAGE_NAIVE_RHS;
666
667 context->iterator = context->lhs_original;
668 context->result_cdr = &context->result;
669 context->result = NIL;
670
671 iterator = context->rhs_original;
672
673 while (is_list(iterator)) {
674 const Eterm *cell = list_val(iterator);
675
676 ASSERT(i < SUBTRACT_RHS_THRESHOLD);
677
678 context->u.rhs_elements[i++] = CAR(cell);
679 iterator = CDR(cell);
680 }
681
682 ASSERT(i == context->rhs_remaining);
683
684 return subtract_continue(p, context);
685 }
686
subtract_naive_rhs(Process * p,ErtsSubtractContext * context)687 static int subtract_naive_rhs(Process *p, ErtsSubtractContext *context) {
688 const Sint CHECKS_PER_RED = 16;
689 Sint checks, budget;
690
691 budget = CHECKS_PER_RED * ERTS_BIF_REDS_LEFT(p);
692 checks = 0;
693
694 #ifdef DEBUG
695 budget = budget / 10 + 1;
696 #endif
697
698 while (checks < budget && is_list(context->iterator)) {
699 const Eterm *cell;
700 Eterm value, next;
701 int found_at;
702
703 cell = list_val(context->iterator);
704 value = CAR(cell);
705 next = CDR(cell);
706
707 for (found_at = context->rhs_remaining - 1; found_at >= 0; found_at--) {
708 if (EQ(value, context->u.rhs_elements[found_at])) {
709 break;
710 }
711 }
712
713 if (found_at < 0) {
714 /* Destructively add the value to the result. This is safe
715 * since the GC is disabled and the unfinished term is never
716 * leaked to the outside world. */
717 Eterm *hp = HAllocX(p, 2, context->lhs_remaining * 2);
718
719 *context->result_cdr = make_list(hp);
720 context->result_cdr = &CDR(hp);
721
722 CAR(hp) = value;
723 } else if (found_at >= 0) {
724 Eterm swap;
725
726 if (context->rhs_remaining-- == 1) {
727 /* We've run out of items to remove, so the rest of the
728 * result will be equal to the remainder of the input. We know
729 * that LHS is well-formed as any errors would've been reported
730 * during length determination. */
731 *context->result_cdr = next;
732
733 BUMP_REDS(p, MIN(budget, checks) / CHECKS_PER_RED);
734
735 return 1;
736 }
737
738 swap = context->u.rhs_elements[context->rhs_remaining];
739 context->u.rhs_elements[found_at] = swap;
740 }
741
742 checks += context->rhs_remaining;
743 context->iterator = next;
744 context->lhs_remaining--;
745 }
746
747 /* The result only has to be terminated when returning it to the user, but
748 * we're doing it when trapping as well to prevent headaches when
749 * debugging. */
750 *context->result_cdr = NIL;
751
752 BUMP_REDS(p, MIN(budget, checks) / CHECKS_PER_RED);
753
754 if (is_list(context->iterator)) {
755 ASSERT(context->lhs_remaining > 0 && context->rhs_remaining > 0);
756 return 0;
757 }
758
759 return 1;
760 }
761
subtract_enter_set_build(Process * p,ErtsSubtractContext * context)762 static int subtract_enter_set_build(Process *p, ErtsSubtractContext *context) {
763 context->stage = SUBTRACT_STAGE_SET_BUILD;
764
765 context->u.rhs_set.alloc_start =
766 erts_alloc(ERTS_ALC_T_LIST_TRAP,
767 context->rhs_remaining * sizeof(subtract_tree_t));
768
769 context->u.rhs_set.alloc = context->u.rhs_set.alloc_start;
770 context->u.rhs_set.tree = NULL;
771
772 context->iterator = context->rhs_original;
773
774 return subtract_continue(p, context);
775 }
776
subtract_set_build(Process * p,ErtsSubtractContext * context)777 static int subtract_set_build(Process *p, ErtsSubtractContext *context) {
778 const static Sint INSERTIONS_PER_RED = 16;
779 Sint budget, insertions;
780
781 budget = INSERTIONS_PER_RED * ERTS_BIF_REDS_LEFT(p);
782 insertions = 0;
783
784 #ifdef DEBUG
785 budget = budget / 10 + 1;
786 #endif
787
788 while (insertions < budget && is_list(context->iterator)) {
789 subtract_tree_t *existing_node, *new_node;
790 const Eterm *cell;
791 Eterm value, next;
792
793 cell = list_val(context->iterator);
794 value = CAR(cell);
795 next = CDR(cell);
796
797 new_node = context->u.rhs_set.alloc;
798 new_node->key = value;
799 new_node->count = 1;
800
801 existing_node = subtract_rbt_lookup_insert(&context->u.rhs_set.tree,
802 new_node);
803
804 if (existing_node != NULL) {
805 existing_node->count++;
806 } else {
807 context->u.rhs_set.alloc++;
808 }
809
810 context->iterator = next;
811 insertions++;
812 }
813
814 BUMP_REDS(p, insertions / INSERTIONS_PER_RED);
815
816 ASSERT(is_list(context->iterator) || is_nil(context->iterator));
817 ASSERT(context->u.rhs_set.tree != NULL);
818
819 return is_nil(context->iterator);
820 }
821
subtract_enter_set_finish(Process * p,ErtsSubtractContext * context)822 static int subtract_enter_set_finish(Process *p, ErtsSubtractContext *context) {
823 context->stage = SUBTRACT_STAGE_SET_FINISH;
824
825 context->result_cdr = &context->result;
826 context->result = NIL;
827
828 context->iterator = context->lhs_original;
829
830 return subtract_continue(p, context);
831 }
832
subtract_set_finish(Process * p,ErtsSubtractContext * context)833 static int subtract_set_finish(Process *p, ErtsSubtractContext *context) {
834 const Sint CHECKS_PER_RED = 8;
835 Sint checks, budget;
836
837 budget = CHECKS_PER_RED * ERTS_BIF_REDS_LEFT(p);
838 checks = 0;
839
840 #ifdef DEBUG
841 budget = budget / 10 + 1;
842 #endif
843
844 while (checks < budget && is_list(context->iterator)) {
845 subtract_tree_t *node;
846 const Eterm *cell;
847 Eterm value, next;
848
849 cell = list_val(context->iterator);
850 value = CAR(cell);
851 next = CDR(cell);
852
853 ASSERT(context->rhs_remaining > 0);
854
855 node = subtract_rbt_lookup(context->u.rhs_set.tree, value);
856
857 if (node == NULL) {
858 Eterm *hp = HAllocX(p, 2, context->lhs_remaining * 2);
859
860 *context->result_cdr = make_list(hp);
861 context->result_cdr = &CDR(hp);
862
863 CAR(hp) = value;
864 } else {
865 if (context->rhs_remaining-- == 1) {
866 *context->result_cdr = next;
867
868 BUMP_REDS(p, checks / CHECKS_PER_RED);
869
870 return 1;
871 }
872
873 if (node->count-- == 1) {
874 subtract_rbt_delete(&context->u.rhs_set.tree, node);
875 }
876 }
877
878 context->iterator = next;
879 context->lhs_remaining--;
880 checks++;
881 }
882
883 *context->result_cdr = NIL;
884
885 BUMP_REDS(p, checks / CHECKS_PER_RED);
886
887 if (is_list(context->iterator)) {
888 ASSERT(context->lhs_remaining > 0 && context->rhs_remaining > 0);
889 return 0;
890 }
891
892 return 1;
893 }
894
subtract_continue(Process * p,ErtsSubtractContext * context)895 static int subtract_continue(Process *p, ErtsSubtractContext *context) {
896 switch (context->stage) {
897 case SUBTRACT_STAGE_START: {
898 return subtract_enter_len_lhs(p, context);
899 }
900
901 case SUBTRACT_STAGE_LEN_LHS: {
902 int res = subtract_get_length(p,
903 &context->iterator,
904 &context->lhs_remaining);
905
906 if (res != 1) {
907 return res;
908 }
909
910 if (context->lhs_remaining <= SUBTRACT_LHS_THRESHOLD) {
911 return subtract_enter_naive_lhs(p, context);
912 }
913
914 return subtract_enter_len_rhs(p, context);
915 }
916
917 case SUBTRACT_STAGE_NAIVE_LHS: {
918 return subtract_naive_lhs(p, context);
919 }
920
921 case SUBTRACT_STAGE_LEN_RHS: {
922 int res = subtract_get_length(p,
923 &context->iterator,
924 &context->rhs_remaining);
925
926 if (res != 1) {
927 return res;
928 }
929
930 /* We've walked through both lists fully now so we no longer need
931 * to check for errors past this point. */
932
933 if (context->rhs_remaining <= SUBTRACT_RHS_THRESHOLD) {
934 return subtract_enter_naive_rhs(p, context);
935 }
936
937 return subtract_enter_set_build(p, context);
938 }
939
940 case SUBTRACT_STAGE_NAIVE_RHS: {
941 return subtract_naive_rhs(p, context);
942 }
943
944 case SUBTRACT_STAGE_SET_BUILD: {
945 int res = subtract_set_build(p, context);
946
947 if (res != 1) {
948 return res;
949 }
950
951 return subtract_enter_set_finish(p, context);
952 }
953
954 case SUBTRACT_STAGE_SET_FINISH: {
955 return subtract_set_finish(p, context);
956 }
957
958 default:
959 ERTS_ASSERT(!"unreachable");
960 }
961 }
962
subtract_start(Process * p,Eterm lhs,Eterm rhs,ErtsSubtractContext * context)963 static int subtract_start(Process *p, Eterm lhs, Eterm rhs,
964 ErtsSubtractContext *context) {
965 context->stage = SUBTRACT_STAGE_START;
966
967 context->lhs_original = lhs;
968 context->rhs_original = rhs;
969
970 return subtract_continue(p, context);
971 }
972
973 /* erlang:'--'/2 */
subtract(Export * bif_entry,BIF_ALIST_2)974 static Eterm subtract(Export *bif_entry, BIF_ALIST_2) {
975 Eterm lhs = BIF_ARG_1, rhs = BIF_ARG_2;
976
977 if ((is_list(lhs) || is_nil(lhs)) && (is_list(rhs) || is_nil(rhs))) {
978 /* We start with the context on the stack in the hopes that we won't
979 * have to trap. */
980 ErtsSubtractContext context;
981 int res;
982
983 res = subtract_start(BIF_P, lhs, rhs, &context);
984
985 if (res == 0) {
986 Eterm state_mref;
987
988 state_mref = subtract_create_trap_state(BIF_P, &context);
989 erts_set_gc_state(BIF_P, 0);
990
991 BIF_TRAP2(bif_entry, BIF_P, state_mref, NIL);
992 }
993
994 subtract_ctx_dtor(&context);
995
996 if (res < 0) {
997 BIF_ERROR(BIF_P, BADARG);
998 }
999
1000 BIF_RET(context.result);
1001 } else if (is_internal_magic_ref(lhs)) {
1002 ErtsSubtractContext *context;
1003 int (*dtor)(Binary*);
1004 Binary *magic_bin;
1005
1006 int res;
1007
1008 magic_bin = erts_magic_ref2bin(lhs);
1009 dtor = ERTS_MAGIC_BIN_DESTRUCTOR(magic_bin);
1010
1011 if (dtor != subtract_ctx_bin_dtor) {
1012 BIF_ERROR(BIF_P, BADARG);
1013 }
1014
1015 ASSERT(BIF_P->flags & F_DISABLE_GC);
1016 ASSERT(rhs == NIL);
1017
1018 context = ERTS_MAGIC_BIN_DATA(magic_bin);
1019 res = subtract_continue(BIF_P, context);
1020
1021 if (res == 0) {
1022 BIF_TRAP2(bif_entry, BIF_P, lhs, NIL);
1023 }
1024
1025 erts_set_gc_state(BIF_P, 1);
1026
1027 if (res < 0) {
1028 ERTS_BIF_ERROR_TRAPPED2(BIF_P, BADARG, bif_entry,
1029 context->lhs_original,
1030 context->rhs_original);
1031 }
1032
1033 BIF_RET(context->result);
1034 }
1035
1036 ASSERT(!(BIF_P->flags & F_DISABLE_GC));
1037
1038 BIF_ERROR(BIF_P, BADARG);
1039 }
1040
ebif_minusminus_2(BIF_ALIST_2)1041 BIF_RETTYPE ebif_minusminus_2(BIF_ALIST_2) {
1042 return subtract(bif_export[BIF_ebif_minusminus_2], BIF_CALL_ARGS);
1043 }
1044
subtract_2(BIF_ALIST_2)1045 BIF_RETTYPE subtract_2(BIF_ALIST_2) {
1046 return subtract(bif_export[BIF_subtract_2], BIF_CALL_ARGS);
1047 }
1048
1049
lists_member_2(BIF_ALIST_2)1050 BIF_RETTYPE lists_member_2(BIF_ALIST_2)
1051 {
1052 Eterm term;
1053 Eterm list;
1054 Eterm item;
1055 int non_immed_key;
1056 int reds_left = ERTS_BIF_REDS_LEFT(BIF_P);
1057 int max_iter = 16 * reds_left;
1058
1059 if (is_nil(BIF_ARG_2)) {
1060 BIF_RET(am_false);
1061 } else if (is_not_list(BIF_ARG_2)) {
1062 BIF_ERROR(BIF_P, BADARG);
1063 }
1064
1065 term = BIF_ARG_1;
1066 non_immed_key = is_not_immed(term);
1067 list = BIF_ARG_2;
1068 while (is_list(list)) {
1069 if (--max_iter < 0) {
1070 BUMP_ALL_REDS(BIF_P);
1071 BIF_TRAP2(bif_export[BIF_lists_member_2], BIF_P, term, list);
1072 }
1073 item = CAR(list_val(list));
1074 if ((item == term) || (non_immed_key && eq(item, term))) {
1075 BIF_RET2(am_true, reds_left - max_iter/16);
1076 }
1077 list = CDR(list_val(list));
1078 }
1079 if (is_not_nil(list)) {
1080 BUMP_REDS(BIF_P, reds_left - max_iter/16);
1081 BIF_ERROR(BIF_P, BADARG);
1082 }
1083 BIF_RET2(am_false, reds_left - max_iter/16);
1084 }
1085
lists_reverse_alloc(Process * c_p,Eterm list_in,Eterm tail_in)1086 static BIF_RETTYPE lists_reverse_alloc(Process *c_p,
1087 Eterm list_in,
1088 Eterm tail_in)
1089 {
1090 static const Uint CELLS_PER_RED = 40;
1091
1092 Eterm *alloc_top, *alloc_end;
1093 Uint cells_left, max_cells;
1094 Eterm list, tail;
1095 Eterm lookahead;
1096
1097 list = list_in;
1098 tail = tail_in;
1099
1100 cells_left = max_cells = CELLS_PER_RED * ERTS_BIF_REDS_LEFT(c_p);
1101 lookahead = list;
1102
1103 while (cells_left != 0 && is_list(lookahead)) {
1104 lookahead = CDR(list_val(lookahead));
1105 cells_left--;
1106 }
1107
1108 BUMP_REDS(c_p, (max_cells - cells_left) / CELLS_PER_RED);
1109
1110 if (is_not_list(lookahead) && is_not_nil(lookahead)) {
1111 BIF_ERROR(c_p, BADARG);
1112 }
1113
1114 alloc_top = HAlloc(c_p, 2 * (max_cells - cells_left));
1115 alloc_end = alloc_top + 2 * (max_cells - cells_left);
1116
1117 while (alloc_top < alloc_end) {
1118 Eterm *pair = list_val(list);
1119
1120 tail = CONS(alloc_top, CAR(pair), tail);
1121 list = CDR(pair);
1122
1123 ASSERT(is_list(list) || is_nil(list));
1124
1125 alloc_top += 2;
1126 }
1127
1128 if (is_nil(list)) {
1129 BIF_RET(tail);
1130 }
1131
1132 ASSERT(is_list(tail) && cells_left == 0);
1133 BIF_TRAP2(bif_export[BIF_lists_reverse_2], c_p, list, tail);
1134 }
1135
lists_reverse_onheap(Process * c_p,Eterm list_in,Eterm tail_in)1136 static BIF_RETTYPE lists_reverse_onheap(Process *c_p,
1137 Eterm list_in,
1138 Eterm tail_in)
1139 {
1140 static const Uint CELLS_PER_RED = 60;
1141
1142 Eterm *alloc_start, *alloc_top, *alloc_end;
1143 Uint cells_left, max_cells;
1144 Eterm list, tail;
1145
1146 list = list_in;
1147 tail = tail_in;
1148
1149 cells_left = max_cells = CELLS_PER_RED * ERTS_BIF_REDS_LEFT(c_p);
1150
1151 ASSERT(HEAP_LIMIT(c_p) >= HEAP_TOP(c_p) + 2);
1152 alloc_start = HEAP_TOP(c_p);
1153 alloc_end = HEAP_LIMIT(c_p) - 2;
1154 alloc_top = alloc_start;
1155
1156 /* Don't process more cells than we have reductions for. */
1157 alloc_end = MIN(alloc_top + (cells_left * 2), alloc_end);
1158
1159 while (alloc_top < alloc_end && is_list(list)) {
1160 Eterm *pair = list_val(list);
1161
1162 tail = CONS(alloc_top, CAR(pair), tail);
1163 list = CDR(pair);
1164
1165 alloc_top += 2;
1166 }
1167
1168 cells_left -= (alloc_top - alloc_start) / 2;
1169 HEAP_TOP(c_p) = alloc_top;
1170
1171 ASSERT(cells_left >= 0 && cells_left <= max_cells);
1172 BUMP_REDS(c_p, (max_cells - cells_left) / CELLS_PER_RED);
1173
1174 if (is_nil(list)) {
1175 BIF_RET(tail);
1176 } else if (is_list(list)) {
1177 if (cells_left > CELLS_PER_RED) {
1178 return lists_reverse_alloc(c_p, list, tail);
1179 }
1180
1181 BUMP_ALL_REDS(c_p);
1182 BIF_TRAP2(bif_export[BIF_lists_reverse_2], c_p, list, tail);
1183 }
1184
1185 BIF_ERROR(c_p, BADARG);
1186 }
1187
lists_reverse_2(BIF_ALIST_2)1188 BIF_RETTYPE lists_reverse_2(BIF_ALIST_2)
1189 {
1190 /* Handle legal and illegal non-lists quickly. */
1191 if (is_nil(BIF_ARG_1)) {
1192 BIF_RET(BIF_ARG_2);
1193 } else if (is_not_list(BIF_ARG_1)) {
1194 BIF_ERROR(BIF_P, BADARG);
1195 }
1196
1197 /* We build the reversal on the unused part of the heap if possible to save
1198 * us the trouble of having to figure out the list size. We fall back to
1199 * lists_reverse_alloc when we run out of space. */
1200 if (HeapWordsLeft(BIF_P) > 8) {
1201 return lists_reverse_onheap(BIF_P, BIF_ARG_1, BIF_ARG_2);
1202 }
1203
1204 return lists_reverse_alloc(BIF_P, BIF_ARG_1, BIF_ARG_2);
1205 }
1206
1207 BIF_RETTYPE
lists_keymember_3(BIF_ALIST_3)1208 lists_keymember_3(BIF_ALIST_3)
1209 {
1210 Eterm res;
1211
1212 res = keyfind(BIF_lists_keymember_3, BIF_P,
1213 BIF_ARG_1, BIF_ARG_2, BIF_ARG_3);
1214 if (is_value(res) && is_tuple(res)) {
1215 return am_true;
1216 } else {
1217 return res;
1218 }
1219 }
1220
1221 BIF_RETTYPE
lists_keysearch_3(BIF_ALIST_3)1222 lists_keysearch_3(BIF_ALIST_3)
1223 {
1224 Eterm res;
1225
1226 res = keyfind(BIF_lists_keysearch_3, BIF_P,
1227 BIF_ARG_1, BIF_ARG_2, BIF_ARG_3);
1228 if (is_non_value(res) || is_not_tuple(res)) {
1229 return res;
1230 } else { /* Tuple */
1231 Eterm* hp = HAlloc(BIF_P, 3);
1232 return TUPLE2(hp, am_value, res);
1233 }
1234 }
1235
1236 BIF_RETTYPE
lists_keyfind_3(BIF_ALIST_3)1237 lists_keyfind_3(BIF_ALIST_3)
1238 {
1239 return keyfind(BIF_lists_keyfind_3, BIF_P,
1240 BIF_ARG_1, BIF_ARG_2, BIF_ARG_3);
1241 }
1242
1243 static Eterm
keyfind(int Bif,Process * p,Eterm Key,Eterm Pos,Eterm List)1244 keyfind(int Bif, Process* p, Eterm Key, Eterm Pos, Eterm List)
1245 {
1246 int max_iter = 10 * CONTEXT_REDS;
1247 Sint pos;
1248 Eterm term;
1249
1250 if (!is_small(Pos) || (pos = signed_val(Pos)) < 1) {
1251 BIF_ERROR(p, BADARG);
1252 }
1253
1254 if (is_small(Key)) {
1255 double float_key = (double) signed_val(Key);
1256
1257 while (is_list(List)) {
1258 if (--max_iter < 0) {
1259 BUMP_ALL_REDS(p);
1260 BIF_TRAP3(bif_export[Bif], p, Key, Pos, List);
1261 }
1262 term = CAR(list_val(List));
1263 List = CDR(list_val(List));
1264 if (is_tuple(term)) {
1265 Eterm *tuple_ptr = tuple_val(term);
1266 if (pos <= arityval(*tuple_ptr)) {
1267 Eterm element = tuple_ptr[pos];
1268 if (Key == element) {
1269 return term;
1270 } else if (is_float(element)) {
1271 FloatDef f;
1272
1273 GET_DOUBLE(element, f);
1274 if (f.fd == float_key) {
1275 return term;
1276 }
1277 }
1278 }
1279 }
1280 }
1281 } else if (is_immed(Key)) {
1282 while (is_list(List)) {
1283 if (--max_iter < 0) {
1284 BUMP_ALL_REDS(p);
1285 BIF_TRAP3(bif_export[Bif], p, Key, Pos, List);
1286 }
1287 term = CAR(list_val(List));
1288 List = CDR(list_val(List));
1289 if (is_tuple(term)) {
1290 Eterm *tuple_ptr = tuple_val(term);
1291 if (pos <= arityval(*tuple_ptr)) {
1292 Eterm element = tuple_ptr[pos];
1293 if (Key == element) {
1294 return term;
1295 }
1296 }
1297 }
1298 }
1299 } else {
1300 while (is_list(List)) {
1301 if (--max_iter < 0) {
1302 BUMP_ALL_REDS(p);
1303 BIF_TRAP3(bif_export[Bif], p, Key, Pos, List);
1304 }
1305 term = CAR(list_val(List));
1306 List = CDR(list_val(List));
1307 if (is_tuple(term)) {
1308 Eterm *tuple_ptr = tuple_val(term);
1309 if (pos <= arityval(*tuple_ptr)) {
1310 Eterm element = tuple_ptr[pos];
1311 if (CMP_EQ(Key, element)) {
1312 return term;
1313 }
1314 }
1315 }
1316 }
1317 }
1318
1319 if (is_not_nil(List)) {
1320 BIF_ERROR(p, BADARG);
1321 }
1322 return am_false;
1323 }
1324