1 /*
2 * %CopyrightBegin%
3 *
4 * Copyright Ericsson AB 1998-2020. All Rights Reserved.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * %CopyrightEnd%
19 */
20
21 /*
22 * Common utilities for the different types of db tables.
23 * Mostly matching etc.
24 */
25
26 #ifdef HAVE_CONFIG_H
27 # include "config.h"
28 #endif
29 #include "sys.h"
30 #include "erl_vm.h"
31 #include "global.h"
32 #include "erl_process.h"
33 #include "error.h"
34 #define ERTS_WANT_DB_INTERNAL__
35 #include "erl_db.h"
36 #include "bif.h"
37 #include "big.h"
38 #include "erl_binary.h"
39 #include "erl_map.h"
40 #include "erl_thr_progress.h"
41 #include "erl_proc_sig_queue.h"
42
43 #include "erl_db_util.h"
44
45
46 /*
47 ** Flags for the guard bif's
48 */
49
50 /* These are offsets from the DCOMP_* value */
51 #define DBIF_GUARD 1
52 #define DBIF_BODY 0
53
54 /* These are the DBIF flag bits corresponding to the DCOMP_* value.
55 * If a bit is set, the BIF is allowed in that context. */
56 #define DBIF_TABLE_GUARD (1 << (DCOMP_TABLE + DBIF_GUARD))
57 #define DBIF_TABLE_BODY (1 << (DCOMP_TABLE + DBIF_BODY))
58 #define DBIF_TRACE_GUARD (1 << (DCOMP_TRACE + DBIF_GUARD))
59 #define DBIF_TRACE_BODY (1 << (DCOMP_TRACE + DBIF_BODY))
60 #define DBIF_ALL \
61 DBIF_TABLE_GUARD | DBIF_TABLE_BODY | DBIF_TRACE_GUARD | DBIF_TRACE_BODY
62
63
64 #define HEAP_XTRA 100
65
66 /*
67 ** Some convenience macros for stacks (DMC == db_match_compile)
68 */
69
70 #define DMC_DEFAULT_SIZE 25
71
72 #define DMC_STACK_TYPE(Type) DMC_##Type##_stack
73
74 #define DMC_DECLARE_STACK_TYPE(Type) \
75 typedef struct DMC_STACK_TYPE(Type) { \
76 int pos; \
77 int siz; \
78 int bytes; \
79 Type *data; \
80 Type def[DMC_DEFAULT_SIZE]; \
81 } DMC_STACK_TYPE(Type)
82
83
84 typedef int Dummy;
85 DMC_DECLARE_STACK_TYPE(Dummy);
86
dmc_stack_grow(DMC_Dummy_stack * s)87 static void dmc_stack_grow(DMC_Dummy_stack* s)
88 {
89 int was_bytes = s->bytes;
90 s->siz *= 2;
91 s->bytes *= 2;
92 if (s->data == s->def) {
93 s->data = erts_alloc(ERTS_ALC_T_DB_MC_STK, s->bytes);
94 sys_memcpy(s->data, s->def, was_bytes);
95 }
96 else {
97 s->data = erts_realloc(ERTS_ALC_T_DB_MC_STK, s->data, s->bytes);
98 }
99 }
100
101 #define DMC_INIT_STACK(Name) do { \
102 (Name).pos = 0; \
103 (Name).siz = DMC_DEFAULT_SIZE; \
104 (Name).bytes = sizeof((Name).def); \
105 (Name).data = (Name).def; \
106 } while (0)
107
108 #define DMC_STACK_DATA(Name) (Name).data
109
110 #define DMC_STACK_NUM(Name) (Name).pos
111
112 #define DMC_PUSH(On, What) \
113 do { \
114 if ((On).pos >= (On).siz) \
115 dmc_stack_grow((DMC_Dummy_stack*)&(On)); \
116 (On).data[(On).pos++] = What; \
117 } while (0)
118
119 #define DMC_PUSH2(On, A, B) \
120 do { \
121 if ((On).pos+1 >= (On).siz) \
122 dmc_stack_grow((DMC_Dummy_stack*)&(On)); \
123 (On).data[(On).pos++] = A; \
124 (On).data[(On).pos++] = B; \
125 } while (0)
126
127 #define DMC_POP(From) (From).data[--(From).pos]
128
129 #define DMC_TOP(From) (From).data[(From).pos - 1]
130
131 #define DMC_EMPTY(Name) ((Name).pos == 0)
132
133 #define DMC_PEEK(On, At) (On).data[At]
134
135 #define DMC_POKE(On, At, Value) ((On).data[At] = (Value))
136
137 #define DMC_CLEAR(Name) (Name).pos = 0
138
139 #define DMC_FREE(Name) \
140 do { \
141 if ((Name).def != (Name).data) \
142 erts_free(ERTS_ALC_T_DB_MC_STK, (Name).data); \
143 } while (0)
144
145
146 #define add_dmc_err(EINFO, STR, VAR, TERM, SEV) \
147 vadd_dmc_err(EINFO, SEV, VAR, STR, TERM)
148
149 #define ERTS_DB_STACK_MARGIN (sizeof(void *)*1024)
150
151 static int
stack_guard_downwards(char * limit)152 stack_guard_downwards(char *limit)
153 {
154 char c;
155 ASSERT(limit);
156 return erts_check_below_limit(&c, limit + ERTS_DB_STACK_MARGIN);
157 }
158
159 static int
stack_guard_upwards(char * limit)160 stack_guard_upwards(char *limit)
161 {
162 char c;
163 ASSERT(limit);
164 return erts_check_above_limit(&c, limit - ERTS_DB_STACK_MARGIN);
165 }
166
167 static int (*stack_guard)(char *) = NULL;
168
169 static ERTS_INLINE Process *
get_proc(Process * cp,Uint32 cp_locks,Eterm id,Uint32 id_locks)170 get_proc(Process *cp, Uint32 cp_locks, Eterm id, Uint32 id_locks)
171 {
172 Process *proc = erts_pid2proc(cp, cp_locks, id, id_locks);
173 if (!proc && is_atom(id))
174 proc = erts_whereis_process(cp, cp_locks, id, id_locks, 0);
175 return proc;
176 }
177
178
179 static Eterm
set_tracee_flags(Process * tracee_p,ErtsTracer tracer,Uint d_flags,Uint e_flags)180 set_tracee_flags(Process *tracee_p, ErtsTracer tracer,
181 Uint d_flags, Uint e_flags) {
182 Eterm ret;
183 Uint flags;
184
185 if (ERTS_TRACER_IS_NIL(tracer)) {
186 flags = ERTS_TRACE_FLAGS(tracee_p) & ~TRACEE_FLAGS;
187 } else {
188 flags = ((ERTS_TRACE_FLAGS(tracee_p) & ~d_flags) | e_flags);
189 if (! flags) tracer = erts_tracer_nil;
190 }
191 ret = ((!ERTS_TRACER_COMPARE(ERTS_TRACER(tracee_p),tracer)
192 || ERTS_TRACE_FLAGS(tracee_p) != flags)
193 ? am_true
194 : am_false);
195 erts_tracer_replace(&tracee_p->common, tracer);
196 ERTS_TRACE_FLAGS(tracee_p) = flags;
197
198 return ret;
199 }
200 /*
201 ** Assuming all locks on tracee_p on entry
202 **
203 ** Changes ERTS_TRACE_FLAGS(tracee_p) and ERTS_TRACER_PROC(tracee_p)
204 ** according to input disable/enable flags and tracer.
205 **
206 ** Returns am_true|am_false on success, am_true if value changed,
207 ** returns fail_term on failure. Fails if tracer pid or port is invalid.
208 */
209 static Eterm
set_match_trace(Process * tracee_p,Eterm fail_term,ErtsTracer tracer,Uint d_flags,Uint e_flags)210 set_match_trace(Process *tracee_p, Eterm fail_term, ErtsTracer tracer,
211 Uint d_flags, Uint e_flags) {
212
213 ERTS_LC_ASSERT(
214 ERTS_PROC_LOCKS_ALL == erts_proc_lc_my_proc_locks(tracee_p)
215 || erts_thr_progress_is_blocking());
216
217 if (ERTS_TRACER_IS_NIL(tracer)
218 || erts_is_tracer_enabled(tracer, &tracee_p->common))
219 return set_tracee_flags(tracee_p, tracer, d_flags, e_flags);
220 return fail_term;
221 }
222
223 /*
224 **
225 ** Types and enum's (compiled matches)
226 **
227 */
228
229 /*
230 ** match VM instructions
231 */
232 typedef enum {
233 matchArray, /* Only when parameter is an array (DCOMP_TRACE) */
234 matchArrayBind, /* ------------- " ------------ */
235 matchTuple,
236 matchPushT,
237 matchPushL,
238 matchPushM,
239 matchPop,
240 matchSwap,
241 matchBind,
242 matchCmp,
243 matchEqBin,
244 matchEqFloat,
245 matchEqBig,
246 matchEqRef,
247 matchEq,
248 matchList,
249 matchMap,
250 matchKey,
251 matchSkip,
252 matchPushC,
253 matchConsA, /* Car is below Cdr */
254 matchConsB, /* Cdr is below Car (unusual) */
255 matchMkTuple,
256 matchMkFlatMap,
257 matchMkHashMap,
258 matchCall0,
259 matchCall1,
260 matchCall2,
261 matchCall3,
262 matchPushV,
263 matchPushVResult, /* First variable reference in result */
264 matchPushExpr, /* Push the whole expression we're matching ('$_') */
265 matchPushArrayAsList, /* Only when parameter is an Array and
266 not an erlang term (DCOMP_TRACE) */
267 matchPushArrayAsListU, /* As above but unknown size */
268 matchTrue,
269 matchOr,
270 matchAnd,
271 matchOrElse,
272 matchAndAlso,
273 matchJump,
274 matchSelf,
275 matchWaste,
276 matchReturn,
277 matchProcessDump,
278 matchDisplay,
279 matchIsSeqTrace,
280 matchSetSeqToken,
281 matchGetSeqToken,
282 matchSetReturnTrace,
283 matchSetExceptionTrace,
284 matchCatch,
285 matchEnableTrace,
286 matchDisableTrace,
287 matchEnableTrace2,
288 matchDisableTrace2,
289 matchTryMeElse,
290 matchCaller,
291 matchHalt,
292 matchSilent,
293 matchSetSeqTokenFake,
294 matchTrace2,
295 matchTrace3
296 } MatchOps;
297
298 /*
299 ** Guard bif's
300 */
301
302 typedef struct dmc_guard_bif {
303 Eterm name; /* atom */
304 void *biff;
305 /* BIF_RETTYPE (*biff)(); */
306 int arity;
307 Uint32 flags;
308 } DMCGuardBif;
309
310 /*
311 ** Error information (for lint)
312 */
313
314 /*
315 ** Type declarations for stacks
316 */
317 DMC_DECLARE_STACK_TYPE(Eterm);
318
319 DMC_DECLARE_STACK_TYPE(UWord);
320
321 DMC_DECLARE_STACK_TYPE(unsigned);
322
323 /*
324 ** Data about the heap during compilation
325 */
326
327 typedef struct DMCVariable {
328 int is_bound;
329 int is_in_body;
330 } DMCVariable;
331
332 typedef struct DMCHeap {
333 int size;
334 DMCVariable vars_def[DMC_DEFAULT_SIZE];
335 DMCVariable* vars;
336 int vars_used;
337 } DMCHeap;
338
339 /*
340 ** Return values from sub compilation steps (guard compilation)
341 */
342
343 typedef enum dmc_ret {
344 retOk,
345 retFail,
346 retRestart
347 } DMCRet;
348
349 /*
350 ** Diverse context information
351 */
352
353 typedef struct dmc_context {
354 int stack_need;
355 int stack_used;
356 ErlHeapFragment *save;
357 ErlHeapFragment *copy;
358 Eterm *matchexpr;
359 Eterm *guardexpr;
360 Eterm *bodyexpr;
361 int num_match;
362 int current_match;
363 Uint cflags;
364 int is_guard; /* 1 if in guard, 0 if in body */
365 int special; /* 1 if the head in the match was a single expression */
366 DMCErrInfo *err_info;
367 char *stack_limit;
368 Uint freason;
369 } DMCContext;
370
371 /*
372 **
373 ** Global variables
374 **
375 */
376
377 /*
378 ** Internal
379 */
380
381 /*
382 ** The pseudo process used by the VM (pam).
383 */
384
385 #define ERTS_DEFAULT_MS_HEAP_SIZE 128
386
387 /* Runtime info about a $-variable
388 */
389 typedef struct MatchVariable {
390 Eterm term;
391 #ifdef DEBUG
392 Process* proc;
393 #endif
394 } MatchVariable;
395
396 typedef struct {
397 Process process;
398 union {
399 Eterm* heap;
400 MatchVariable* variables; /* first on "heap" */
401 }u;
402 Eterm default_heap[ERTS_DEFAULT_MS_HEAP_SIZE];
403 } ErtsMatchPseudoProcess;
404
405
406 static erts_tsd_key_t match_pseudo_process_key;
407
408 static ERTS_INLINE void
cleanup_match_pseudo_process(ErtsMatchPseudoProcess * mpsp,int keep_heap)409 cleanup_match_pseudo_process(ErtsMatchPseudoProcess *mpsp, int keep_heap)
410 {
411 if (mpsp->process.mbuf || mpsp->process.off_heap.first) {
412 erts_cleanup_empty_process(&mpsp->process);
413 }
414 #ifdef DEBUG
415 else {
416 erts_debug_verify_clean_empty_process(&mpsp->process);
417 }
418 #endif
419 if (!keep_heap) {
420 if (mpsp->u.heap != mpsp->default_heap) {
421 /* Have to be done *after* call to erts_cleanup_empty_process() */
422 erts_free(ERTS_ALC_T_DB_MS_RUN_HEAP, (void *) mpsp->u.heap);
423 mpsp->u.heap = mpsp->default_heap;
424 }
425 #ifdef DEBUG
426 else {
427 int i;
428 for (i = 0; i < ERTS_DEFAULT_MS_HEAP_SIZE; i++) {
429 #if defined(ARCH_64)
430 mpsp->default_heap[i] = (Eterm) 0xdeadbeefdeadbeef;
431 #else
432 mpsp->default_heap[i] = (Eterm) 0xdeadbeef;
433 #endif
434 }
435 }
436 #endif
437 }
438 }
439
440 static ErtsMatchPseudoProcess *
create_match_pseudo_process(void)441 create_match_pseudo_process(void)
442 {
443 ErtsMatchPseudoProcess *mpsp;
444 mpsp = (ErtsMatchPseudoProcess *)erts_alloc(ERTS_ALC_T_DB_MS_PSDO_PROC,
445 sizeof(ErtsMatchPseudoProcess));
446 erts_init_empty_process(&mpsp->process);
447 mpsp->u.heap = mpsp->default_heap;
448 return mpsp;
449 }
450
451 static ERTS_INLINE ErtsMatchPseudoProcess *
get_match_pseudo_process(Process * c_p,Uint heap_size)452 get_match_pseudo_process(Process *c_p, Uint heap_size)
453 {
454 ErtsMatchPseudoProcess *mpsp;
455 ErtsSchedulerData *esdp;
456
457 esdp = c_p ? c_p->scheduler_data : erts_get_scheduler_data();
458
459 mpsp = esdp ? esdp->match_pseudo_process :
460 (ErtsMatchPseudoProcess*) erts_tsd_get(match_pseudo_process_key);
461
462 if (mpsp) {
463 ASSERT(mpsp == erts_tsd_get(match_pseudo_process_key));
464 ASSERT(mpsp->process.scheduler_data == esdp);
465 cleanup_match_pseudo_process(mpsp, 0);
466 }
467 else {
468 ASSERT(erts_tsd_get(match_pseudo_process_key) == NULL);
469 mpsp = create_match_pseudo_process();
470 if (esdp) {
471 esdp->match_pseudo_process = (void *) mpsp;
472 }
473 mpsp->process.scheduler_data = esdp;
474 erts_tsd_set(match_pseudo_process_key, (void *) mpsp);
475 }
476 if (heap_size > ERTS_DEFAULT_MS_HEAP_SIZE*sizeof(Eterm)) {
477 mpsp->u.heap = (Eterm*) erts_alloc(ERTS_ALC_T_DB_MS_RUN_HEAP, heap_size);
478 }
479 else {
480 ASSERT(mpsp->u.heap == mpsp->default_heap);
481 }
482 return mpsp;
483 }
484
485 static void
destroy_match_pseudo_process(void)486 destroy_match_pseudo_process(void)
487 {
488 ErtsMatchPseudoProcess *mpsp;
489 mpsp = (ErtsMatchPseudoProcess *)erts_tsd_get(match_pseudo_process_key);
490 if (mpsp) {
491 cleanup_match_pseudo_process(mpsp, 0);
492 erts_free(ERTS_ALC_T_DB_MS_PSDO_PROC, (void *) mpsp);
493 erts_tsd_set(match_pseudo_process_key, (void *) NULL);
494 }
495 }
496
497 static
498 void
match_pseudo_process_init(void)499 match_pseudo_process_init(void)
500 {
501 erts_tsd_key_create(&match_pseudo_process_key,
502 "erts_match_pseudo_process_key");
503 erts_thr_install_exit_handler(destroy_match_pseudo_process);
504 }
505
506 void
erts_match_set_release_result(Process * c_p)507 erts_match_set_release_result(Process* c_p)
508 {
509 (void) get_match_pseudo_process(c_p, 0); /* Clean it up */
510 }
511
512 /* The trace control word. */
513
514 static erts_atomic32_t trace_control_word;
515
516 /* This needs to be here, before the bif table... */
517
518 static Eterm db_set_trace_control_word_fake_1(BIF_ALIST_1);
519 static Eterm db_length_1(BIF_ALIST_1);
520
521 /*
522 ** The table of callable bif's, i e guard bif's and
523 ** some special animals that can provide us with trace
524 ** information. This array is sorted on init.
525 */
526 static DMCGuardBif guard_tab[] =
527 {
528 {
529 am_is_atom,
530 &is_atom_1,
531 1,
532 DBIF_ALL
533 },
534 {
535 am_is_float,
536 &is_float_1,
537 1,
538 DBIF_ALL
539 },
540 {
541 am_is_integer,
542 &is_integer_1,
543 1,
544 DBIF_ALL
545 },
546 {
547 am_is_list,
548 &is_list_1,
549 1,
550 DBIF_ALL
551 },
552 {
553 am_is_number,
554 &is_number_1,
555 1,
556 DBIF_ALL
557 },
558 {
559 am_is_pid,
560 &is_pid_1,
561 1,
562 DBIF_ALL
563 },
564 {
565 am_is_port,
566 &is_port_1,
567 1,
568 DBIF_ALL
569 },
570 {
571 am_is_reference,
572 &is_reference_1,
573 1,
574 DBIF_ALL
575 },
576 {
577 am_is_tuple,
578 &is_tuple_1,
579 1,
580 DBIF_ALL
581 },
582 {
583 am_is_map,
584 &is_map_1,
585 1,
586 DBIF_ALL
587 },
588 {
589 am_is_binary,
590 &is_binary_1,
591 1,
592 DBIF_ALL
593 },
594 {
595 am_is_function,
596 &is_function_1,
597 1,
598 DBIF_ALL
599 },
600 {
601 am_is_record,
602 &is_record_3,
603 3,
604 DBIF_ALL
605 },
606 {
607 am_abs,
608 &abs_1,
609 1,
610 DBIF_ALL
611 },
612 {
613 am_element,
614 &element_2,
615 2,
616 DBIF_ALL
617 },
618 {
619 am_hd,
620 &hd_1,
621 1,
622 DBIF_ALL
623 },
624 {
625 am_length,
626 &db_length_1,
627 1,
628 DBIF_ALL
629 },
630 {
631 am_node,
632 &node_1,
633 1,
634 DBIF_ALL
635 },
636 {
637 am_node,
638 &node_0,
639 0,
640 DBIF_ALL
641 },
642 {
643 am_round,
644 &round_1,
645 1,
646 DBIF_ALL
647 },
648 {
649 am_size,
650 &size_1,
651 1,
652 DBIF_ALL
653 },
654 {
655 am_map_size,
656 &map_size_1,
657 1,
658 DBIF_ALL
659 },
660 {
661 am_map_get,
662 &map_get_2,
663 2,
664 DBIF_ALL
665 },
666 {
667 am_is_map_key,
668 &is_map_key_2,
669 2,
670 DBIF_ALL
671 },
672 {
673 am_bit_size,
674 &bit_size_1,
675 1,
676 DBIF_ALL
677 },
678 {
679 am_tl,
680 &tl_1,
681 1,
682 DBIF_ALL
683 },
684 {
685 am_trunc,
686 &trunc_1,
687 1,
688 DBIF_ALL
689 },
690 {
691 am_float,
692 &float_1,
693 1,
694 DBIF_ALL
695 },
696 {
697 am_Plus,
698 &splus_1,
699 1,
700 DBIF_ALL
701 },
702 {
703 am_Minus,
704 &sminus_1,
705 1,
706 DBIF_ALL
707 },
708 {
709 am_Plus,
710 &splus_2,
711 2,
712 DBIF_ALL
713 },
714 {
715 am_Minus,
716 &sminus_2,
717 2,
718 DBIF_ALL
719 },
720 {
721 am_Times,
722 &stimes_2,
723 2,
724 DBIF_ALL
725 },
726 {
727 am_Div,
728 &div_2,
729 2,
730 DBIF_ALL
731 },
732 {
733 am_div,
734 &intdiv_2,
735 2,
736 DBIF_ALL
737 },
738 {
739 am_rem,
740 &rem_2,
741 2,
742 DBIF_ALL
743 },
744 {
745 am_band,
746 &band_2,
747 2,
748 DBIF_ALL
749 },
750 {
751 am_bor,
752 &bor_2,
753 2,
754 DBIF_ALL
755 },
756 {
757 am_bxor,
758 &bxor_2,
759 2,
760 DBIF_ALL
761 },
762 {
763 am_bnot,
764 &bnot_1,
765 1,
766 DBIF_ALL
767 },
768 {
769 am_bsl,
770 &bsl_2,
771 2,
772 DBIF_ALL
773 },
774 {
775 am_bsr,
776 &bsr_2,
777 2,
778 DBIF_ALL
779 },
780 {
781 am_Gt,
782 &sgt_2,
783 2,
784 DBIF_ALL
785 },
786 {
787 am_Ge,
788 &sge_2,
789 2,
790 DBIF_ALL
791 },
792 {
793 am_Lt,
794 &slt_2,
795 2,
796 DBIF_ALL
797 },
798 {
799 am_Le,
800 &sle_2,
801 2,
802 DBIF_ALL
803 },
804 {
805 am_Eq,
806 &seq_2,
807 2,
808 DBIF_ALL
809 },
810 {
811 am_Eqeq,
812 &seqeq_2,
813 2,
814 DBIF_ALL
815 },
816 {
817 am_Neq,
818 &sneq_2,
819 2,
820 DBIF_ALL
821 },
822 {
823 am_Neqeq,
824 &sneqeq_2,
825 2,
826 DBIF_ALL
827 },
828 {
829 am_not,
830 ¬_1,
831 1,
832 DBIF_ALL
833 },
834 {
835 am_xor,
836 &xor_2,
837 2,
838 DBIF_ALL
839 },
840 {
841 am_get_tcw,
842 &db_get_trace_control_word_0,
843 0,
844 DBIF_TRACE_GUARD | DBIF_TRACE_BODY
845 },
846 {
847 am_set_tcw,
848 &db_set_trace_control_word_1,
849 1,
850 DBIF_TRACE_BODY
851 },
852 {
853 am_set_tcw_fake,
854 &db_set_trace_control_word_fake_1,
855 1,
856 DBIF_TRACE_BODY
857 }
858 };
859
860 /*
861 ** Exported
862 */
863 Eterm db_am_eot; /* Atom '$end_of_table' */
864
865 /*
866 ** Forward decl's
867 */
868
869
870 /*
871 ** ... forwards for compiled matches
872 */
873 /* Utility code */
874 static DMCGuardBif *dmc_lookup_bif(Eterm t, int arity);
875 #ifdef DMC_DEBUG
876 static Eterm dmc_lookup_bif_reversed(void *f);
877 #endif
878 static int cmp_uint(void *a, void *b);
879 static int cmp_guard_bif(void *a, void *b);
880 static int match_compact(ErlHeapFragment *expr, DMCErrInfo *err_info);
881 static Uint my_size_object(Eterm t);
882 static Eterm my_copy_struct(Eterm t, Eterm **hp, ErlOffHeap* off_heap);
883
884 /* Guard subroutines */
885 static void
886 dmc_rearrange_constants(DMCContext *context, DMC_STACK_TYPE(UWord) *text,
887 int textpos, Eterm *p, Uint nelems);
888 static DMCRet
889 dmc_array(DMCContext *context, DMCHeap *heap, DMC_STACK_TYPE(UWord) *text,
890 Eterm *p, Uint nelems, int *constant);
891 /* Guard compilation */
892 static void do_emit_constant(DMCContext *context, DMC_STACK_TYPE(UWord) *text,
893 Eterm t);
894 static DMCRet dmc_list(DMCContext *context,
895 DMCHeap *heap,
896 DMC_STACK_TYPE(UWord) *text,
897 Eterm t,
898 int *constant);
899 static DMCRet dmc_tuple(DMCContext *context,
900 DMCHeap *heap,
901 DMC_STACK_TYPE(UWord) *text,
902 Eterm t,
903 int *constant);
904 static DMCRet
905 dmc_map(DMCContext *context, DMCHeap *heap, DMC_STACK_TYPE(UWord) *text,
906 Eterm t, int *constant);
907 static DMCRet dmc_variable(DMCContext *context,
908 DMCHeap *heap,
909 DMC_STACK_TYPE(UWord) *text,
910 Eterm t,
911 int *constant);
912 static DMCRet dmc_fun(DMCContext *context,
913 DMCHeap *heap,
914 DMC_STACK_TYPE(UWord) *text,
915 Eterm t,
916 int *constant);
917 static DMCRet dmc_expr(DMCContext *context,
918 DMCHeap *heap,
919 DMC_STACK_TYPE(UWord) *text,
920 Eterm t,
921 int *constant);
922 static DMCRet compile_guard_expr(DMCContext *context,
923 DMCHeap *heap,
924 DMC_STACK_TYPE(UWord) *text,
925 Eterm t);
926 /* match expression subroutines */
927 static DMCRet dmc_one_term(DMCContext *context,
928 DMCHeap *heap,
929 DMC_STACK_TYPE(Eterm) *stack,
930 DMC_STACK_TYPE(UWord) *text,
931 Eterm c);
932 static Eterm
933 dmc_private_copy(DMCContext *context, Eterm c);
934
935
936 #ifdef DMC_DEBUG
937 static int test_disassemble_next = 0;
938 void db_match_dis(Binary *prog);
939 #define TRACE erts_fprintf(stderr,"Trace: %s:%d\n",__FILE__,__LINE__)
940 #define FENCE_PATTERN_SIZE (1*sizeof(Uint))
941 #define FENCE_PATTERN 0xDEADBEEFUL
942 #else
943 #define TRACE /* Nothing */
944 #define FENCE_PATTERN_SIZE 0
945 #endif
946 static void vadd_dmc_err(DMCErrInfo*, DMCErrorSeverity, int var, const char *str, ...);
947
948 static Eterm dpm_array_to_list(Process *psp, Eterm *arr, int arity);
949
950 static Eterm match_spec_test(Process *p, Eterm against, Eterm spec, int trace);
951
952 static Eterm seq_trace_fake(Process *p, Eterm arg1);
953
954
955 /*
956 ** Interface routines.
957 */
958
959 /*
960 ** Pseudo BIF:s to be callable from the PAM VM.
961 */
db_get_trace_control_word(Process * p)962 BIF_RETTYPE db_get_trace_control_word(Process *p)
963 {
964 Uint32 tcw = (Uint32) erts_atomic32_read_acqb(&trace_control_word);
965 BIF_RET(erts_make_integer((Uint) tcw, p));
966 }
967
db_get_trace_control_word_0(BIF_ALIST_0)968 BIF_RETTYPE db_get_trace_control_word_0(BIF_ALIST_0)
969 {
970 BIF_RET(db_get_trace_control_word(BIF_P));
971 }
972
db_set_trace_control_word(Process * p,Eterm new)973 BIF_RETTYPE db_set_trace_control_word(Process *p, Eterm new)
974 {
975 Uint val;
976 Uint32 old_tcw;
977 if (!term_to_Uint(new, &val))
978 BIF_ERROR(p, BADARG);
979 if (val != ((Uint32)val))
980 BIF_ERROR(p, BADARG);
981
982 old_tcw = (Uint32) erts_atomic32_xchg_relb(&trace_control_word,
983 (erts_aint32_t) val);
984 BIF_RET(erts_make_integer((Uint) old_tcw, p));
985 }
986
db_set_trace_control_word_1(BIF_ALIST_1)987 BIF_RETTYPE db_set_trace_control_word_1(BIF_ALIST_1)
988 {
989 BIF_RET(db_set_trace_control_word(BIF_P, BIF_ARG_1));
990 }
991
992 /*
993 * Implementation of length/1 for match specs (non-trapping).
994 */
db_length_1(BIF_ALIST_1)995 static Eterm db_length_1(BIF_ALIST_1)
996 {
997 Eterm list;
998 Uint i;
999
1000 list = BIF_ARG_1;
1001 i = 0;
1002 while (is_list(list)) {
1003 i++;
1004 list = CDR(list_val(list));
1005 }
1006 if (is_not_nil(list)) {
1007 BIF_ERROR(BIF_P, BADARG);
1008 }
1009 BIF_RET(make_small(i));
1010 }
1011
db_set_trace_control_word_fake_1(BIF_ALIST_1)1012 static Eterm db_set_trace_control_word_fake_1(BIF_ALIST_1)
1013 {
1014 Process *p = BIF_P;
1015 Eterm new = BIF_ARG_1;
1016 Uint val;
1017 if (!term_to_Uint(new, &val))
1018 BIF_ERROR(p, BADARG);
1019 if (val != ((Uint32)val))
1020 BIF_ERROR(p, BADARG);
1021 BIF_RET(db_get_trace_control_word(p));
1022 }
1023
1024 /*
1025 ** The API used by the tracer (declared in global.h):
1026 */
1027
1028 /*
1029 ** Matchexpr is a list of tuples containing match-code, i e:
1030 **
1031 ** Matchexpr = [{Pattern, Guards, Body}, ...]
1032 ** Pattern = [ PatternExpr , ...]
1033 ** PatternExpr = Constant | PatternTuple | PatternList | Variable
1034 ** Constant = Any erlang term
1035 ** PatternTuple = { PatternExpr ... }
1036 ** PatternList = [ PatternExpr ]
1037 ** Variable = '$' ++ <number>
1038 ** Guards = [Guard ...]
1039 ** Guard = {GuardFunc, GuardExpr, ...}
1040 ** GuardExpr = BoundVariable | Guard | GuardList | GuardTuple | ConstExpr
1041 ** BoundVariable = Variable (existing in Pattern)
1042 ** GuardList = [ GuardExpr , ... ]
1043 ** GuardTuple = {{ GuardExpr, ... }}
1044 ** ConstExpr = {const, Constant}
1045 ** GuardFunc = is_list | .... | element | ...
1046 ** Body = [ BodyExpr, ... ]
1047 ** BodyExpr = GuardExpr | { BodyFunc, GuardExpr, ... }
1048 ** BodyFunc = return_trace | seq_trace | trace | ...
1049 ** - or something like that...
1050 */
1051
1052
erts_match_set_get_source(Binary * mpsp)1053 Eterm erts_match_set_get_source(Binary *mpsp)
1054 {
1055 MatchProg *prog = Binary2MatchProg(mpsp);
1056 return prog->saved_program;
1057 }
1058
1059 /* This one is for the tracing */
erts_match_set_compile(Process * p,Eterm matchexpr,Eterm MFA,Uint * freasonp)1060 Binary *erts_match_set_compile(Process *p, Eterm matchexpr, Eterm MFA, Uint *freasonp) {
1061 Binary *bin;
1062 Uint sz;
1063 Eterm *hp;
1064 Uint flags;
1065
1066 switch (MFA) {
1067 case am_receive: flags = DCOMP_TRACE; break;
1068 case am_send: flags = DCOMP_TRACE | DCOMP_ALLOW_TRACE_OPS; break;
1069 default:
1070 flags = DCOMP_TRACE | DCOMP_CALL_TRACE | DCOMP_ALLOW_TRACE_OPS;
1071 }
1072
1073 bin = db_match_set_compile(p, matchexpr, flags, freasonp);
1074 if (bin != NULL) {
1075 MatchProg *prog = Binary2MatchProg(bin);
1076 sz = size_object(matchexpr);
1077 prog->saved_program_buf = new_message_buffer(sz);
1078 hp = prog->saved_program_buf->mem;
1079 prog->saved_program =
1080 copy_struct(matchexpr, sz, &hp,
1081 &(prog->saved_program_buf->off_heap));
1082 }
1083 return bin;
1084 }
1085
db_match_set_compile(Process * p,Eterm matchexpr,Uint flags,Uint * freasonp)1086 Binary *db_match_set_compile(Process *p, Eterm matchexpr,
1087 Uint flags, Uint *freasonp)
1088 {
1089 Eterm l;
1090 Eterm t;
1091 Eterm l2;
1092 Eterm *tp;
1093 Eterm *hp;
1094 int n = 0;
1095 int num_heads;
1096 int i;
1097 Binary *mps = NULL;
1098 int compiled = 0;
1099 Eterm *matches,*guards, *bodies;
1100 Eterm *buff;
1101 Eterm sbuff[15];
1102
1103 *freasonp = BADARG;
1104
1105 if (!is_list(matchexpr))
1106 return NULL;
1107 num_heads = 0;
1108 for (l = matchexpr; is_list(l); l = CDR(list_val(l)))
1109 ++num_heads;
1110
1111 if (l != NIL) /* proper list... */
1112 return NULL;
1113
1114 if (num_heads > 5) {
1115 buff = erts_alloc(ERTS_ALC_T_DB_TMP,
1116 sizeof(Eterm) * num_heads * 3);
1117 } else {
1118 buff = sbuff;
1119 }
1120
1121 matches = buff;
1122 guards = buff + num_heads;
1123 bodies = buff + (num_heads * 2);
1124
1125 i = 0;
1126 for (l = matchexpr; is_list(l); l = CDR(list_val(l))) {
1127 t = CAR(list_val(l));
1128 if (!is_tuple(t) || (tp = tuple_val(t))[0] != make_arityval(3)) {
1129 goto error;
1130 }
1131 if (!(flags & DCOMP_TRACE) || (!is_list(tp[1]) &&
1132 !is_nil(tp[1]))) {
1133 t = tp[1];
1134 } else {
1135 /* This is when tracing, the parameter is a list,
1136 that I convert to a tuple and that is matched
1137 against an array (strange, but gives the semantics
1138 of matching against a parameter list) */
1139 n = 0;
1140 for (l2 = tp[1]; is_list(l2); l2 = CDR(list_val(l2))) {
1141 ++n;
1142 }
1143 if (l2 != NIL) {
1144 goto error;
1145 }
1146 hp = HAlloc(p, n + 1);
1147 t = make_tuple(hp);
1148 *hp++ = make_arityval((Uint) n);
1149 l2 = tp[1];
1150 while (n--) {
1151 *hp++ = CAR(list_val(l2));
1152 l2 = CDR(list_val(l2));
1153 }
1154 }
1155 matches[i] = t;
1156 guards[i] = tp[2];
1157 bodies[i] = tp[3];
1158 ++i;
1159 }
1160 if ((mps = db_match_compile(matches, guards, bodies,
1161 num_heads,
1162 flags,
1163 NULL,
1164 freasonp)) == NULL) {
1165 goto error;
1166 }
1167 compiled = 1;
1168 if (buff != sbuff) {
1169 erts_free(ERTS_ALC_T_DB_TMP, buff);
1170 }
1171 return mps;
1172
1173 error:
1174 if (compiled) {
1175 erts_bin_free(mps);
1176 }
1177 if (buff != sbuff) {
1178 erts_free(ERTS_ALC_T_DB_TMP, buff);
1179 }
1180 return NULL;
1181 }
1182
1183 /*
1184 * Compare a matching term 'a' with a constructing term 'b' for equality.
1185 *
1186 * Returns true if 'b' is guaranteed to always construct
1187 * the same term as 'a' has matched.
1188 */
db_match_eq_body(Eterm a,Eterm b,int const_mode)1189 static int db_match_eq_body(Eterm a, Eterm b, int const_mode)
1190 {
1191 DECLARE_ESTACK(s);
1192 Uint arity;
1193 Eterm *ap, *bp;
1194 const Eterm CONST_MODE_OFF = THE_NON_VALUE;
1195
1196 while (1) {
1197 switch(b & _TAG_PRIMARY_MASK) {
1198 case TAG_PRIMARY_LIST:
1199 if (!is_list(a))
1200 return 0;
1201 ESTACK_PUSH2(s, CDR(list_val(a)), CDR(list_val(b)));
1202 a = CAR(list_val(a));
1203 b = CAR(list_val(b));
1204 continue; /* loop without pop */
1205
1206 case TAG_PRIMARY_BOXED:
1207 if (is_tuple(b)) {
1208 bp = tuple_val(b);
1209 if (!const_mode) {
1210 if (bp[0] == make_arityval(1) && is_tuple(bp[1])) {
1211 b = bp[1]; /* double-tuple syntax */
1212 }
1213 else if (bp[0] == make_arityval(2) && bp[1] == am_const) {
1214 ESTACK_PUSH(s, CONST_MODE_OFF);
1215 const_mode = 1; /* {const, term()} syntax */
1216 b = bp[2];
1217 continue; /* loop without pop */
1218 }
1219 else
1220 return 0; /* function call or invalid tuple syntax */
1221 }
1222 if (!is_tuple(a))
1223 return 0;
1224
1225 ap = tuple_val(a);
1226 bp = tuple_val(b);
1227 if (ap[0] != bp[0])
1228 return 0;
1229 arity = arityval(ap[0]);
1230 if (arity > 0) {
1231 a = *(++ap);
1232 b = *(++bp);
1233 while(--arity) {
1234 ESTACK_PUSH2(s, *(++ap), *(++bp));
1235 }
1236 continue; /* loop without pop */
1237 }
1238 }
1239 else if (is_map(b)) {
1240 /* We don't know what other pairs the matched map may contain */
1241 return 0;
1242 }
1243 else if (!eq(a,b)) /* other boxed */
1244 return 0;
1245 break;
1246
1247 case TAG_PRIMARY_IMMED1:
1248 if (a != b || a == am_Underscore || a == am_DollarDollar
1249 || a == am_DollarUnderscore
1250 || (const_mode && db_is_variable(a) >= 0)) {
1251
1252 return 0;
1253 }
1254 break;
1255 default:
1256 erts_exit(ERTS_ABORT_EXIT, "db_compare: "
1257 "Bad object on ESTACK: 0x%bex\n", b);
1258 }
1259
1260 pop_next:
1261 if (ESTACK_ISEMPTY(s))
1262 break; /* done */
1263
1264 b = ESTACK_POP(s);
1265 if (b == CONST_MODE_OFF) {
1266 ASSERT(const_mode);
1267 const_mode = 0;
1268 goto pop_next;
1269 }
1270 a = ESTACK_POP(s);
1271 }
1272
1273 DESTROY_ESTACK(s);
1274 return 1;
1275 }
1276
1277 /* This is used by select_replace */
db_match_keeps_key(int keypos,Eterm match,Eterm guard,Eterm body)1278 int db_match_keeps_key(int keypos, Eterm match, Eterm guard, Eterm body)
1279 {
1280 Eterm match_key;
1281 Eterm* body_list;
1282 Eterm single_body_term;
1283 Eterm* single_body_term_tpl;
1284 Eterm single_body_subterm;
1285 Eterm single_body_subterm_key;
1286 Eterm* single_body_subterm_key_tpl;
1287 int const_mode;
1288
1289 if (!is_list(body)) {
1290 return 0;
1291 }
1292
1293 body_list = list_val(body);
1294 if (CDR(body_list) != NIL) {
1295 return 0;
1296 }
1297
1298 single_body_term = CAR(body_list);
1299 if (single_body_term == am_DollarUnderscore) {
1300 /* same tuple is returned */
1301 return 1;
1302 }
1303
1304 if (!is_tuple(single_body_term)) {
1305 return 0;
1306 }
1307
1308 match_key = db_getkey(keypos, match);
1309 if (!is_value(match_key)) {
1310 // can't get key out of match
1311 return 0;
1312 }
1313
1314 single_body_term_tpl = tuple_val(single_body_term);
1315 if (single_body_term_tpl[0] == make_arityval(2) &&
1316 single_body_term_tpl[1] == am_const) {
1317 /* {const, {"ets-tuple constant"}} */
1318 single_body_subterm = single_body_term_tpl[2];
1319 const_mode = 1;
1320 }
1321 else if (*single_body_term_tpl == make_arityval(1)) {
1322 /* {{"ets-tuple construction"}} */
1323 single_body_subterm = single_body_term_tpl[1];
1324 const_mode = 0;
1325 }
1326 else {
1327 /* not a tuple construction */
1328 return 0;
1329 }
1330
1331 single_body_subterm_key = db_getkey(keypos, single_body_subterm);
1332 if (!is_value(single_body_subterm_key)) {
1333 // can't get key out of single body subterm
1334 return 0;
1335 }
1336
1337 if (db_match_eq_body(match_key, single_body_subterm_key, const_mode)) {
1338 /* tuple with same key is returned */
1339 return 1;
1340 }
1341
1342 if (const_mode) {
1343 /* constant key did not match */
1344 return 0;
1345 }
1346
1347 if (!is_tuple(single_body_subterm_key)) {
1348 /* can't possibly be an element instruction */
1349 return 0;
1350 }
1351
1352 single_body_subterm_key_tpl = tuple_val(single_body_subterm_key);
1353 if (single_body_subterm_key_tpl[0] == make_arityval(3) &&
1354 single_body_subterm_key_tpl[1] == am_element &&
1355 single_body_subterm_key_tpl[3] == am_DollarUnderscore &&
1356 single_body_subterm_key_tpl[2] == make_small(keypos))
1357 {
1358 /* {element, KeyPos, '$_'} */
1359 return 1;
1360 }
1361
1362 return 0;
1363 }
1364
db_match_set_lint(Process * p,Eterm matchexpr,Uint flags)1365 static Eterm db_match_set_lint(Process *p, Eterm matchexpr, Uint flags)
1366 {
1367 Eterm l;
1368 Eterm t;
1369 Eterm l2;
1370 Eterm *tp;
1371 Eterm *hp;
1372 DMCErrInfo *err_info = db_new_dmc_err_info();
1373 Eterm ret;
1374 int n = 0;
1375 int num_heads;
1376 Binary *mp;
1377 Eterm *matches,*guards, *bodies;
1378 Eterm sbuff[15];
1379 Eterm *buff = sbuff;
1380 int i;
1381 Uint freason = BADARG;
1382
1383 if (!is_list(matchexpr)) {
1384 add_dmc_err(err_info, "Match programs are not in a list.",
1385 -1, 0UL, dmcError);
1386 goto done;
1387 }
1388 num_heads = 0;
1389 for (l = matchexpr; is_list(l); l = CDR(list_val(l)))
1390 ++num_heads;
1391
1392 if (l != NIL) { /* proper list... */
1393 add_dmc_err(err_info, "Match programs are not in a proper list.",
1394 -1, 0UL, dmcError);
1395 goto done;
1396 }
1397
1398 if (num_heads > 5) {
1399 buff = erts_alloc(ERTS_ALC_T_DB_TMP,
1400 sizeof(Eterm) * num_heads * 3);
1401 }
1402
1403 matches = buff;
1404 guards = buff + num_heads;
1405 bodies = buff + (num_heads * 2);
1406
1407 i = 0;
1408 for (l = matchexpr; is_list(l); l = CDR(list_val(l))) {
1409 t = CAR(list_val(l));
1410 if (!is_tuple(t) || (tp = tuple_val(t))[0] != make_arityval(3)) {
1411 add_dmc_err(err_info,
1412 "Match program part is not a tuple of "
1413 "arity 3.",
1414 -1, 0UL, dmcError);
1415 goto done;
1416 }
1417 if (!(flags & DCOMP_TRACE) || (!is_list(tp[1]) &&
1418 !is_nil(tp[1]))) {
1419 t = tp[1];
1420 } else {
1421 n = 0;
1422 for (l2 = tp[1]; is_list(l2); l2 = CDR(list_val(l2))) {
1423 ++n;
1424 }
1425 if (l2 != NIL) {
1426 add_dmc_err(err_info,
1427 "Match expression part %T is not a "
1428 "proper list.",
1429 -1, tp[1], dmcError);
1430
1431 goto done;
1432 }
1433 hp = HAlloc(p, n + 1);
1434 t = make_tuple(hp);
1435 *hp++ = make_arityval((Uint) n);
1436 l2 = tp[1];
1437 while (n--) {
1438 *hp++ = CAR(list_val(l2));
1439 l2 = CDR(list_val(l2));
1440 }
1441 }
1442 matches[i] = t;
1443 guards[i] = tp[2];
1444 bodies[i] = tp[3];
1445 ++i;
1446 }
1447 mp = db_match_compile(matches, guards, bodies, num_heads,
1448 flags, err_info, &freason);
1449 if (mp != NULL) {
1450 erts_bin_free(mp);
1451 }
1452 done:
1453 ret = db_format_dmc_err_info(p, err_info);
1454 db_free_dmc_err_info(err_info);
1455 if (buff != sbuff) {
1456 erts_free(ERTS_ALC_T_DB_TMP, buff);
1457 }
1458 return ret;
1459 }
1460
1461 /* Returns
1462 * am_false if no match or
1463 * if {message,false} has been called,
1464 * am_true if {message,_} has NOT been called or
1465 * if {message,true} has been called,
1466 * Msg if {message,Msg} has been called.
1467 *
1468 * If return value is_not_immed
1469 * then erts_match_set_release_result_trace() must be called to release it.
1470 */
erts_match_set_run_trace(Process * c_p,Process * self,Binary * mpsp,Eterm * args,int num_args,enum erts_pam_run_flags in_flags,Uint32 * return_flags)1471 Eterm erts_match_set_run_trace(Process *c_p,
1472 Process *self,
1473 Binary *mpsp,
1474 Eterm *args, int num_args,
1475 enum erts_pam_run_flags in_flags,
1476 Uint32 *return_flags)
1477 {
1478 Eterm ret;
1479
1480 ret = db_prog_match(c_p, self, mpsp, NIL, args, num_args,
1481 in_flags, return_flags);
1482
1483 ASSERT(!(is_non_value(ret) && *return_flags));
1484
1485 if (is_non_value(ret) || ret == am_false) {
1486 erts_match_set_release_result(c_p);
1487 return am_false;
1488 }
1489 if (is_immed(ret))
1490 erts_match_set_release_result(c_p);
1491 return ret;
1492 }
1493
erts_match_set_run_ets(Process * p,Binary * mpsp,Eterm args,int num_args,Uint32 * return_flags)1494 static Eterm erts_match_set_run_ets(Process *p, Binary *mpsp,
1495 Eterm args, int num_args,
1496 Uint32 *return_flags)
1497 {
1498 Eterm ret;
1499
1500 ret = db_prog_match(p, p,
1501 mpsp, args, NULL, num_args,
1502 ERTS_PAM_COPY_RESULT,
1503 return_flags);
1504 #if defined(HARDDEBUG)
1505 if (is_non_value(ret)) {
1506 erts_fprintf(stderr, "Failed\n");
1507 } else {
1508 erts_fprintf(stderr, "Returning : %T\n", ret);
1509 }
1510 #endif
1511 return ret;
1512 /* Returns
1513 * THE_NON_VALUE if no match
1514 * am_false if {message,false} has been called,
1515 * am_true if {message,_} has not been called or
1516 * if {message,true} has been called,
1517 * Msg if {message,Msg} has been called.
1518 */
1519 }
1520
1521 /*
1522 ** API Used by other erl_db modules.
1523 */
1524
db_initialize_util(void)1525 void db_initialize_util(void){
1526 char c;
1527 qsort(guard_tab,
1528 sizeof(guard_tab) / sizeof(DMCGuardBif),
1529 sizeof(DMCGuardBif),
1530 (int (*)(const void *, const void *)) &cmp_guard_bif);
1531 match_pseudo_process_init();
1532 erts_atomic32_init_nob(&trace_control_word, 0);
1533 if (erts_check_if_stack_grows_downwards(&c))
1534 stack_guard = stack_guard_downwards;
1535 else
1536 stack_guard = stack_guard_upwards;
1537 }
1538
1539
1540
db_getkey(int keypos,Eterm obj)1541 Eterm db_getkey(int keypos, Eterm obj)
1542 {
1543 if (is_tuple(obj)) {
1544 Eterm *tptr = tuple_val(obj);
1545 if (arityval(*tptr) >= keypos)
1546 return *(tptr + keypos);
1547 }
1548 return THE_NON_VALUE;
1549 }
1550
1551 /*
1552 ** Matching compiled (executed by "Pam" :-)
1553 */
1554
1555 /*
1556 ** The actual compiling of the match expression and the guards
1557 */
db_match_compile(Eterm * matchexpr,Eterm * guards,Eterm * body,int num_progs,Uint flags,DMCErrInfo * err_info,Uint * freasonp)1558 Binary *db_match_compile(Eterm *matchexpr,
1559 Eterm *guards,
1560 Eterm *body,
1561 int num_progs,
1562 Uint flags,
1563 DMCErrInfo *err_info,
1564 Uint *freasonp)
1565 {
1566 DMCHeap heap;
1567 DMC_STACK_TYPE(Eterm) stack;
1568 DMC_STACK_TYPE(UWord) text;
1569 DMCContext context;
1570 MatchProg *ret = NULL;
1571 Eterm t;
1572 Uint i;
1573 Uint num_iters;
1574 int structure_checked;
1575 DMCRet res;
1576 int current_try_label;
1577 Binary *bp = NULL;
1578 unsigned clause_start;
1579
1580 context.stack_limit = (char *) erts_get_stacklimit();
1581 context.freason = BADARG;
1582
1583 DMC_INIT_STACK(stack);
1584 DMC_INIT_STACK(text);
1585
1586 context.stack_need = context.stack_used = 0;
1587 context.save = context.copy = NULL;
1588 context.num_match = num_progs;
1589 context.matchexpr = matchexpr;
1590 context.guardexpr = guards;
1591 context.bodyexpr = body;
1592 context.err_info = err_info;
1593 context.cflags = flags;
1594
1595 heap.size = DMC_DEFAULT_SIZE;
1596 heap.vars = heap.vars_def;
1597
1598 /*
1599 ** Compile the match expression
1600 */
1601 restart:
1602 heap.vars_used = 0;
1603 for (context.current_match = 0;
1604 context.current_match < num_progs;
1605 ++context.current_match) { /* This loop is long,
1606 too long */
1607 sys_memset(heap.vars, 0, heap.size * sizeof(*heap.vars));
1608 t = context.matchexpr[context.current_match];
1609 context.stack_used = 0;
1610 structure_checked = 0;
1611 if (context.current_match < num_progs - 1) {
1612 DMC_PUSH(text,matchTryMeElse);
1613 current_try_label = DMC_STACK_NUM(text);
1614 DMC_PUSH(text,0);
1615 } else {
1616 current_try_label = -1;
1617 }
1618 clause_start = DMC_STACK_NUM(text); /* the "special" test needs it */
1619 DMC_PUSH(stack,NIL);
1620 for (;;) {
1621 switch (t & _TAG_PRIMARY_MASK) {
1622 case TAG_PRIMARY_BOXED:
1623 if (is_flatmap(t)) {
1624 num_iters = flatmap_get_size(flatmap_val(t));
1625 if (!structure_checked) {
1626 DMC_PUSH2(text, matchMap, num_iters);
1627 }
1628 structure_checked = 0;
1629 for (i = 0; i < num_iters; ++i) {
1630 Eterm key = flatmap_get_keys(flatmap_val(t))[i];
1631 if (db_is_variable(key) >= 0) {
1632 if (context.err_info) {
1633 add_dmc_err(context.err_info,
1634 "Variable found in map key.",
1635 -1, 0UL, dmcError);
1636 }
1637 goto error;
1638 } else if (key == am_Underscore) {
1639 if (context.err_info) {
1640 add_dmc_err(context.err_info,
1641 "Underscore found in map key.",
1642 -1, 0UL, dmcError);
1643 }
1644 goto error;
1645 }
1646 DMC_PUSH2(text, matchKey, dmc_private_copy(&context, key));
1647 {
1648 int old_stack = ++(context.stack_used);
1649 Eterm value = flatmap_get_values(flatmap_val(t))[i];
1650 res = dmc_one_term(&context, &heap, &stack, &text,
1651 value);
1652 ASSERT(res != retFail);
1653 if (res == retRestart) {
1654 goto restart;
1655 }
1656 if (old_stack != context.stack_used) {
1657 ASSERT(old_stack + 1 == context.stack_used);
1658 DMC_PUSH(text, matchSwap);
1659 }
1660 if (context.stack_used > context.stack_need) {
1661 context.stack_need = context.stack_used;
1662 }
1663 DMC_PUSH(text, matchPop);
1664 --(context.stack_used);
1665 }
1666 }
1667 break;
1668 }
1669 if (is_hashmap(t)) {
1670 DECLARE_WSTACK(wstack);
1671 Eterm *kv;
1672 num_iters = hashmap_size(t);
1673 if (!structure_checked) {
1674 DMC_PUSH2(text, matchMap, num_iters);
1675 }
1676 structure_checked = 0;
1677
1678 hashmap_iterator_init(&wstack, t, 0);
1679
1680 while ((kv=hashmap_iterator_next(&wstack)) != NULL) {
1681 Eterm key = CAR(kv);
1682 Eterm value = CDR(kv);
1683 if (db_is_variable(key) >= 0) {
1684 if (context.err_info) {
1685 add_dmc_err(context.err_info,
1686 "Variable found in map key.",
1687 -1, 0UL, dmcError);
1688 }
1689 DESTROY_WSTACK(wstack);
1690 goto error;
1691 } else if (key == am_Underscore) {
1692 if (context.err_info) {
1693 add_dmc_err(context.err_info,
1694 "Underscore found in map key.",
1695 -1, 0UL, dmcError);
1696 }
1697 DESTROY_WSTACK(wstack);
1698 goto error;
1699 }
1700 DMC_PUSH2(text, matchKey, dmc_private_copy(&context, key));
1701 {
1702 int old_stack = ++(context.stack_used);
1703 res = dmc_one_term(&context, &heap, &stack, &text,
1704 value);
1705 ASSERT(res != retFail);
1706 if (res == retRestart) {
1707 DESTROY_WSTACK(wstack);
1708 goto restart;
1709 }
1710 if (old_stack != context.stack_used) {
1711 ASSERT(old_stack + 1 == context.stack_used);
1712 DMC_PUSH(text, matchSwap);
1713 }
1714 if (context.stack_used > context.stack_need) {
1715 context.stack_need = context.stack_used;
1716 }
1717 DMC_PUSH(text, matchPop);
1718 --(context.stack_used);
1719 }
1720 }
1721 DESTROY_WSTACK(wstack);
1722 break;
1723 }
1724 if (!is_tuple(t)) {
1725 goto simple_term;
1726 }
1727 num_iters = arityval(*tuple_val(t));
1728 if (!structure_checked) { /* i.e. we did not
1729 pop it */
1730 DMC_PUSH2(text, matchTuple, num_iters);
1731 }
1732 structure_checked = 0;
1733 for (i = 1; i <= num_iters; ++i) {
1734 if ((res = dmc_one_term(&context,
1735 &heap,
1736 &stack,
1737 &text,
1738 tuple_val(t)[i]))
1739 != retOk) {
1740 if (res == retRestart) {
1741 goto restart; /* restart the
1742 surrounding
1743 loop */
1744 } else goto error;
1745 }
1746 }
1747 break;
1748 case TAG_PRIMARY_LIST:
1749 if (!structure_checked) {
1750 DMC_PUSH(text, matchList);
1751 }
1752 structure_checked = 0; /* Whatever it is, we did
1753 not pop it */
1754 if ((res = dmc_one_term(&context, &heap, &stack,
1755 &text, CAR(list_val(t))))
1756 != retOk) {
1757 if (res == retRestart) {
1758 goto restart;
1759 } else goto error;
1760 }
1761 t = CDR(list_val(t));
1762 continue;
1763 default: /* Nil and non proper tail end's or
1764 single terms as match
1765 expressions */
1766 simple_term:
1767 structure_checked = 0;
1768 if ((res = dmc_one_term(&context, &heap, &stack,
1769 &text, t))
1770 != retOk) {
1771 if (res == retRestart) {
1772 goto restart;
1773 } else goto error;
1774 }
1775 break;
1776 }
1777
1778 /* The *program's* stack just *grows* while we are
1779 traversing one composite data structure, we can
1780 check the stack usage here */
1781
1782 if (context.stack_used > context.stack_need)
1783 context.stack_need = context.stack_used;
1784
1785 /* We are at the end of one composite data structure,
1786 pop sub structures and emit a matchPop instruction
1787 (or break) */
1788 if ((t = DMC_POP(stack)) == NIL) {
1789 break;
1790 } else {
1791 DMC_PUSH(text, matchPop);
1792 structure_checked = 1; /*
1793 * Checked with matchPushT
1794 * or matchPushL
1795 */
1796 --(context.stack_used);
1797 }
1798 }
1799
1800 /*
1801 ** There is one single top variable in the match expression
1802 ** iff the text is two Uint's and the single instruction
1803 ** is 'matchBind' or it is only a skip.
1804 */
1805 context.special =
1806 (DMC_STACK_NUM(text) == 2 + clause_start &&
1807 DMC_PEEK(text,clause_start) == matchBind) ||
1808 (DMC_STACK_NUM(text) == 1 + clause_start &&
1809 DMC_PEEK(text, clause_start) == matchSkip);
1810
1811 if (flags & DCOMP_TRACE) {
1812 if (context.special) {
1813 if (DMC_PEEK(text, clause_start) == matchBind) {
1814 DMC_POKE(text, clause_start, matchArrayBind);
1815 }
1816 } else {
1817 ASSERT(DMC_STACK_NUM(text) >= 1);
1818 if (DMC_PEEK(text, clause_start) != matchTuple) {
1819 /* If it isn't "special" and the argument is
1820 not a tuple, the expression is not valid
1821 when matching an array*/
1822 if (context.err_info) {
1823 add_dmc_err(context.err_info,
1824 "Match head is invalid in "
1825 "this context.",
1826 -1, 0UL,
1827 dmcError);
1828 }
1829 goto error;
1830 }
1831 DMC_POKE(text, clause_start, matchArray);
1832 }
1833 }
1834
1835
1836 /*
1837 ** ... and the guards
1838 */
1839 context.is_guard = 1;
1840 if (compile_guard_expr
1841 (&context,
1842 &heap,
1843 &text,
1844 context.guardexpr[context.current_match]) != retOk)
1845 goto error;
1846 context.is_guard = 0;
1847 if ((context.cflags & DCOMP_TABLE) &&
1848 !is_list(context.bodyexpr[context.current_match])) {
1849 if (context.err_info) {
1850 add_dmc_err(context.err_info,
1851 "Body clause does not return "
1852 "anything.", -1, 0UL,
1853 dmcError);
1854 }
1855 goto error;
1856 }
1857 if (compile_guard_expr
1858 (&context,
1859 &heap,
1860 &text,
1861 context.bodyexpr[context.current_match]) != retOk)
1862 goto error;
1863
1864 /*
1865 * The compilation does not bail out when error information
1866 * is requested, so we need to detect that here...
1867 */
1868 if (context.err_info != NULL &&
1869 (context.err_info)->error_added) {
1870 goto error;
1871 }
1872
1873
1874 /* If the matchprogram comes here, the match is
1875 successful */
1876 DMC_PUSH(text,matchHalt);
1877 /* Fill in try-me-else label if there is one. */
1878 if (current_try_label >= 0) {
1879 DMC_POKE(text, current_try_label, DMC_STACK_NUM(text));
1880 }
1881 } /* for (context.current_match = 0 ...) */
1882
1883
1884 /*
1885 ** Done compiling
1886 ** Allocate enough space for the program,
1887 ** heap size is in 'heap_used', stack size is in 'stack_need'
1888 ** and text size is simply DMC_STACK_NUM(text).
1889 ** The "program memory" is allocated like this:
1890 ** text ----> +-------------+
1891 ** | |
1892 ** ..........
1893 ** +-------------+
1894 **
1895 ** The heap-eheap-stack block of a MatchProg is nowadays allocated
1896 ** when the match program is run (see db_prog_match()).
1897 **
1898 ** heap ----> +-------------+
1899 ** ..........
1900 ** eheap ---> + +
1901 ** ..........
1902 ** stack ---> + +
1903 ** ..........
1904 ** +-------------+
1905 ** The stack is expected to grow towards *higher* adresses.
1906 ** A special case is when the match expression is a single binding
1907 ** (i.e '$1').
1908 */
1909 bp = erts_create_magic_binary(((sizeof(MatchProg) - sizeof(UWord)) +
1910 (DMC_STACK_NUM(text) * sizeof(UWord))),
1911 erts_db_match_prog_destructor);
1912 ret = Binary2MatchProg(bp);
1913 ret->saved_program_buf = NULL;
1914 ret->saved_program = NIL;
1915 ret->term_save = context.save;
1916 ret->num_bindings = heap.vars_used;
1917 sys_memcpy(ret->text, DMC_STACK_DATA(text),
1918 DMC_STACK_NUM(text) * sizeof(UWord));
1919 ret->stack_offset = heap.vars_used*sizeof(MatchVariable) + FENCE_PATTERN_SIZE;
1920 ret->heap_size = ret->stack_offset + context.stack_need * sizeof(Eterm*) + FENCE_PATTERN_SIZE;
1921
1922 #ifdef DMC_DEBUG
1923 ret->prog_end = ret->text + DMC_STACK_NUM(text);
1924 #endif
1925
1926 /*
1927 * Fall through to cleanup code, but context.save should not be free'd
1928 */
1929 context.save = NULL;
1930 error: /* Here is were we land when compilation failed. */
1931 if (context.save != NULL) {
1932 free_message_buffer(context.save);
1933 context.save = NULL;
1934 }
1935 DMC_FREE(stack);
1936 DMC_FREE(text);
1937 if (context.copy != NULL)
1938 free_message_buffer(context.copy);
1939 if (heap.vars != heap.vars_def)
1940 erts_free(ERTS_ALC_T_DB_MS_CMPL_HEAP, (void *) heap.vars);
1941 *freasonp = context.freason;
1942 return bp;
1943 }
1944
1945 /*
1946 ** Free a match program (in a binary)
1947 */
erts_db_match_prog_destructor(Binary * bprog)1948 int erts_db_match_prog_destructor(Binary *bprog)
1949 {
1950 MatchProg *prog;
1951 if (bprog == NULL)
1952 return 1;
1953 prog = Binary2MatchProg(bprog);
1954 if (prog->term_save != NULL) {
1955 free_message_buffer(prog->term_save);
1956 }
1957 if (prog->saved_program_buf != NULL)
1958 free_message_buffer(prog->saved_program_buf);
1959 return 1;
1960 }
1961
1962 void
erts_match_prog_foreach_offheap(Binary * bprog,void (* func)(ErlOffHeap *,void *),void * arg)1963 erts_match_prog_foreach_offheap(Binary *bprog,
1964 void (*func)(ErlOffHeap *, void *),
1965 void *arg)
1966 {
1967 MatchProg *prog;
1968 ErlHeapFragment *tmp;
1969 if (bprog == NULL)
1970 return;
1971 prog = Binary2MatchProg(bprog);
1972 tmp = prog->term_save;
1973 while (tmp) {
1974 (*func)(&(tmp->off_heap), arg);
1975 tmp = tmp->next;
1976 }
1977 if (prog->saved_program_buf)
1978 (*func)(&(prog->saved_program_buf->off_heap), arg);
1979 }
1980
1981 /*
1982 ** This is not the most efficient way to do it, but it's a rare
1983 ** and not especially nice case when this is used.
1984 */
dpm_array_to_list(Process * psp,Eterm * arr,int arity)1985 static Eterm dpm_array_to_list(Process *psp, Eterm *arr, int arity)
1986 {
1987 Eterm *hp = HAllocX(psp, arity * 2, HEAP_XTRA);
1988 Eterm ret = NIL;
1989 while (--arity >= 0) {
1990 ret = CONS(hp, arr[arity], ret);
1991 hp += 2;
1992 }
1993 return ret;
1994 }
1995
1996 /*
1997 ** Execution of the match program, this is Pam.
1998 ** May return THE_NON_VALUE, which is a bailout.
1999 ** the parameter 'arity' is only used if 'term' is actually an array,
2000 ** i.e. 'DCOMP_TRACE' was specified
2001 */
db_prog_match(Process * c_p,Process * self,Binary * bprog,Eterm term,Eterm * termp,int arity,enum erts_pam_run_flags in_flags,Uint32 * return_flags)2002 Eterm db_prog_match(Process *c_p,
2003 Process *self,
2004 Binary *bprog,
2005 Eterm term,
2006 Eterm *termp,
2007 int arity,
2008 enum erts_pam_run_flags in_flags,
2009 Uint32 *return_flags)
2010 {
2011 MatchProg *prog = Binary2MatchProg(bprog);
2012 const Eterm *ep, *tp, **sp;
2013 Eterm t;
2014 Eterm *esp;
2015 MatchVariable* variables;
2016 const ErtsCodeMFA *cp;
2017 const UWord *pc = prog->text;
2018 Eterm *ehp;
2019 Eterm ret;
2020 Uint n;
2021 int i;
2022 unsigned do_catch;
2023 ErtsMatchPseudoProcess *mpsp;
2024 Process *psp;
2025 Process* build_proc;
2026 Process *tmpp;
2027 Process *current_scheduled;
2028 ErtsSchedulerData *esdp;
2029 BIF_RETTYPE (*bif)(BIF_ALIST);
2030 Eterm bif_args[3];
2031 int fail_label;
2032 #ifdef DEBUG
2033 Eterm *orig_esp;
2034 #endif
2035 #ifdef DMC_DEBUG
2036 Uint *heap_fence;
2037 Uint *stack_fence;
2038 Uint save_op;
2039 #endif /* DMC_DEBUG */
2040
2041 ERTS_UNDEF(n,0);
2042 ERTS_UNDEF(current_scheduled,NULL);
2043
2044 ASSERT(c_p || !(in_flags & ERTS_PAM_COPY_RESULT));
2045
2046 mpsp = get_match_pseudo_process(c_p, prog->heap_size);
2047 psp = &mpsp->process;
2048
2049 /* We need to lure the scheduler into believing in the pseudo process,
2050 because of floating point exceptions. Do *after* mpsp is set!!! */
2051
2052 esdp = erts_get_scheduler_data();
2053 if (esdp)
2054 current_scheduled = esdp->current_process;
2055 /* SMP: psp->scheduler_data is set by get_match_pseudo_process */
2056
2057 #ifdef DMC_DEBUG
2058 save_op = 0;
2059 heap_fence = (Eterm*)((char*) mpsp->u.heap + prog->stack_offset) - 1;
2060 stack_fence = (Eterm*)((char*) mpsp->u.heap + prog->heap_size) - 1;
2061 *heap_fence = FENCE_PATTERN;
2062 *stack_fence = FENCE_PATTERN;
2063 #endif /* DMC_DEBUG */
2064
2065 #ifdef HARDDEBUG
2066 #define FAIL() {erts_printf("Fail line %d\n",__LINE__); goto fail;}
2067 #else
2068 #define FAIL() goto fail
2069 #endif
2070 #define FAIL_TERM am_EXIT /* The term to set as return when bif fails and
2071 do_catch != 0 */
2072
2073 *return_flags = 0U;
2074 variables = mpsp->u.variables;
2075
2076 restart:
2077 ep = &term;
2078 esp = (Eterm*)((char*)mpsp->u.heap + prog->stack_offset);
2079 sp = (const Eterm **)esp;
2080 ret = am_true;
2081 do_catch = 0;
2082 fail_label = -1;
2083 build_proc = psp;
2084 if (esdp)
2085 esdp->current_process = psp;
2086
2087 #ifdef DEBUG
2088 orig_esp = esp;
2089 ASSERT(variables == mpsp->u.variables);
2090 for (i=0; i<prog->num_bindings; i++) {
2091 variables[i].term = THE_NON_VALUE;
2092 variables[i].proc = NULL;
2093 }
2094 #endif
2095
2096 for (;;) {
2097
2098 #ifdef DMC_DEBUG
2099 if (*heap_fence != FENCE_PATTERN) {
2100 erts_exit(ERTS_ABORT_EXIT, "Heap fence overwritten in db_prog_match after op "
2101 "0x%08x, overwritten with 0x%08x.", save_op, *heap_fence);
2102 }
2103 if (*stack_fence != FENCE_PATTERN) {
2104 erts_exit(ERTS_ABORT_EXIT, "Stack fence overwritten in db_prog_match after op "
2105 "0x%08x, overwritten with 0x%08x.", save_op,
2106 *stack_fence);
2107 }
2108 save_op = *pc;
2109 #endif
2110 switch (*pc++) {
2111 case matchTryMeElse:
2112 ASSERT(fail_label == -1);
2113 fail_label = *pc++;
2114 break;
2115 case matchArray: /* only when DCOMP_TRACE, is always first
2116 instruction. */
2117 n = *pc++;
2118 if ((int) n != arity)
2119 FAIL();
2120 ep = termp;
2121 break;
2122 case matchArrayBind: /* When the array size is unknown. */
2123 ASSERT(termp || arity==0);
2124 n = *pc++;
2125 variables[n].term = dpm_array_to_list(psp, termp, arity);
2126 break;
2127 case matchTuple: /* *ep is a tuple of arity n */
2128 if (!is_tuple(*ep))
2129 FAIL();
2130 ep = tuple_val(*ep);
2131 n = *pc++;
2132 if (arityval(*ep) != n)
2133 FAIL();
2134 ++ep;
2135 break;
2136 case matchPushT: /* *ep is a tuple of arity n,
2137 push ptr to first element */
2138 if (!is_tuple(*ep))
2139 FAIL();
2140 tp = tuple_val(*ep);
2141 n = *pc++;
2142 if (arityval(*tp) != n)
2143 FAIL();
2144 *sp++ = tp + 1;
2145 ++ep;
2146 break;
2147 case matchList:
2148 if (!is_list(*ep))
2149 FAIL();
2150 ep = list_val(*ep);
2151 break;
2152 case matchPushL:
2153 if (!is_list(*ep))
2154 FAIL();
2155 *sp++ = list_val(*ep);
2156 ++ep;
2157 break;
2158 case matchMap:
2159 if (!is_map(*ep)) {
2160 FAIL();
2161 }
2162 n = *pc++;
2163 if (is_flatmap(*ep)) {
2164 if (flatmap_get_size(flatmap_val(*ep)) < n) {
2165 FAIL();
2166 }
2167 } else {
2168 ASSERT(is_hashmap(*ep));
2169 if (hashmap_size(*ep) < n) {
2170 FAIL();
2171 }
2172 }
2173 ep = flatmap_val(*ep);
2174 break;
2175 case matchPushM:
2176 if (!is_map(*ep)) {
2177 FAIL();
2178 }
2179 n = *pc++;
2180 if (is_flatmap(*ep)) {
2181 if (flatmap_get_size(flatmap_val(*ep)) < n) {
2182 FAIL();
2183 }
2184 } else {
2185 ASSERT(is_hashmap(*ep));
2186 if (hashmap_size(*ep) < n) {
2187 FAIL();
2188 }
2189 }
2190 *sp++ = flatmap_val(*ep++);
2191 break;
2192 case matchKey:
2193 t = (Eterm) *pc++;
2194 tp = erts_maps_get(t, make_boxed(ep));
2195 if (!tp) {
2196 FAIL();
2197 }
2198 *sp++ = ep;
2199 ep = tp;
2200 break;
2201 case matchPop:
2202 ep = *(--sp);
2203 break;
2204 case matchSwap:
2205 tp = sp[-1];
2206 sp[-1] = sp[-2];
2207 sp[-2] = tp;
2208 break;
2209 case matchBind:
2210 n = *pc++;
2211 variables[n].term = *ep++;
2212 break;
2213 case matchCmp:
2214 n = *pc++;
2215 if (!EQ(variables[n].term, *ep))
2216 FAIL();
2217 ++ep;
2218 break;
2219 case matchEqBin:
2220 t = (Eterm) *pc++;
2221 if (!EQ(t,*ep))
2222 FAIL();
2223 ++ep;
2224 break;
2225 case matchEqFloat:
2226 if (!is_float(*ep))
2227 FAIL();
2228 if (sys_memcmp(float_val(*ep) + 1, pc, sizeof(double)))
2229 FAIL();
2230 pc += sizeof(double) / sizeof(*pc);
2231 ++ep;
2232 break;
2233 case matchEqRef: {
2234 Eterm* epc = (Eterm*)pc;
2235 if (!is_ref(*ep))
2236 FAIL();
2237 if (!EQ(make_internal_ref(epc), *ep)) {
2238 FAIL();
2239 }
2240 i = thing_arityval(*epc);
2241 pc += i+1;
2242 ++ep;
2243 break;
2244 }
2245 case matchEqBig:
2246 if (!is_big(*ep))
2247 FAIL();
2248 tp = big_val(*ep);
2249 {
2250 Eterm *epc = (Eterm *) pc;
2251 if (*tp != *epc)
2252 FAIL();
2253 i = BIG_ARITY(epc);
2254 pc += i+1;
2255 while(i--) {
2256 if (*++tp != *++epc) {
2257 FAIL();
2258 }
2259 }
2260 }
2261 ++ep;
2262 break;
2263 case matchEq:
2264 t = (Eterm) *pc++;
2265 ASSERT(is_immed(t));
2266 if (t != *ep++)
2267 FAIL();
2268 break;
2269 case matchSkip:
2270 ++ep;
2271 break;
2272 /*
2273 * Here comes guard & body instructions
2274 */
2275 case matchPushC: /* Push constant */
2276 if ((in_flags & ERTS_PAM_COPY_RESULT)
2277 && do_catch && !is_immed(*pc)) {
2278 *esp++ = copy_object(*pc++, c_p);
2279 }
2280 else {
2281 *esp++ = *pc++;
2282 }
2283 break;
2284 case matchConsA:
2285 ehp = HAllocX(build_proc, 2, HEAP_XTRA);
2286 CDR(ehp) = *--esp;
2287 CAR(ehp) = esp[-1];
2288 esp[-1] = make_list(ehp);
2289 break;
2290 case matchConsB:
2291 ehp = HAllocX(build_proc, 2, HEAP_XTRA);
2292 CAR(ehp) = *--esp;
2293 CDR(ehp) = esp[-1];
2294 esp[-1] = make_list(ehp);
2295 break;
2296 case matchMkTuple:
2297 n = *pc++;
2298 ehp = HAllocX(build_proc, n+1, HEAP_XTRA);
2299 t = make_tuple(ehp);
2300 *ehp++ = make_arityval(n);
2301 while (n--) {
2302 *ehp++ = *--esp;
2303 }
2304 *esp++ = t;
2305 break;
2306 case matchMkFlatMap:
2307 n = *pc++;
2308 ehp = HAllocX(build_proc, MAP_HEADER_FLATMAP_SZ + n, HEAP_XTRA);
2309 t = *--esp;
2310 {
2311 flatmap_t *m = (flatmap_t *)ehp;
2312 m->thing_word = MAP_HEADER_FLATMAP;
2313 m->size = n;
2314 m->keys = t;
2315 }
2316 t = make_flatmap(ehp);
2317 ehp += MAP_HEADER_FLATMAP_SZ;
2318 while (n--) {
2319 *ehp++ = *--esp;
2320 }
2321 erts_usort_flatmap((flatmap_t*)flatmap_val(t));
2322 *esp++ = t;
2323 break;
2324 case matchMkHashMap:
2325 n = *pc++;
2326 esp -= 2*n;
2327 ehp = HAllocX(build_proc, 2*n, HEAP_XTRA);
2328 {
2329 ErtsHeapFactory factory;
2330 Uint ix;
2331 for (ix = 0; ix < 2*n; ix++){
2332 ehp[ix] = esp[ix];
2333 }
2334 erts_factory_proc_init(&factory, build_proc);
2335 t = erts_hashmap_from_array(&factory, ehp, n, 0);
2336 erts_factory_close(&factory);
2337
2338 /* There were duplicate keys in hashmap so we
2339 may have to recreate the hashmap as a flatmap */
2340 if (hashmap_size(t) <= MAP_SMALL_MAP_LIMIT) {
2341 DECLARE_WSTACK(wstack);
2342 Eterm *kv;
2343 Eterm *ks;
2344 Eterm *vs;
2345 flatmap_t *mp;
2346 Eterm keys, *hp;
2347 Uint n = hashmap_size(t);
2348 erts_factory_proc_init(&factory, build_proc);
2349
2350 /* build flat structure */
2351 hp = erts_produce_heap(&factory, 3 + 1 + (2 * n), 0);
2352 keys = make_tuple(hp);
2353 *hp++ = make_arityval(n);
2354 ks = hp;
2355 hp += n;
2356 mp = (flatmap_t*)hp;
2357 hp += MAP_HEADER_FLATMAP_SZ;
2358 vs = hp;
2359
2360 mp->thing_word = MAP_HEADER_FLATMAP;
2361 mp->size = n;
2362 mp->keys = keys;
2363
2364 hashmap_iterator_init(&wstack, t, 0);
2365
2366 while ((kv=hashmap_iterator_next(&wstack)) != NULL) {
2367 *ks++ = CAR(kv);
2368 *vs++ = CDR(kv);
2369 }
2370
2371 /* it cannot have multiple keys */
2372 erts_validate_and_sort_flatmap(mp);
2373
2374 t = make_flatmap(mp);
2375
2376 DESTROY_WSTACK(wstack);
2377 erts_factory_close(&factory);
2378 }
2379 }
2380 *esp++ = t;
2381 break;
2382 case matchCall0:
2383 bif = (BIF_RETTYPE (*)(BIF_ALIST)) *pc++;
2384 t = (*bif)(build_proc, bif_args, NULL);
2385 if (is_non_value(t)) {
2386 if (do_catch)
2387 t = FAIL_TERM;
2388 else
2389 FAIL();
2390 }
2391 *esp++ = t;
2392 break;
2393 case matchCall1:
2394 bif = (BIF_RETTYPE (*)(BIF_ALIST)) *pc++;
2395 t = (*bif)(build_proc, esp-1, NULL);
2396 if (is_non_value(t)) {
2397 if (do_catch)
2398 t = FAIL_TERM;
2399 else
2400 FAIL();
2401 }
2402 esp[-1] = t;
2403 break;
2404 case matchCall2:
2405 bif = (BIF_RETTYPE (*)(BIF_ALIST)) *pc++;
2406 bif_args[0] = esp[-1];
2407 bif_args[1] = esp[-2];
2408 t = (*bif)(build_proc, bif_args, NULL);
2409 if (is_non_value(t)) {
2410 if (do_catch)
2411 t = FAIL_TERM;
2412 else
2413 FAIL();
2414 }
2415 --esp;
2416 esp[-1] = t;
2417 break;
2418 case matchCall3:
2419 bif = (BIF_RETTYPE (*)(BIF_ALIST)) *pc++;
2420 bif_args[0] = esp[-1];
2421 bif_args[1] = esp[-2];
2422 bif_args[2] = esp[-3];
2423 t = (*bif)(build_proc, bif_args, NULL);
2424 if (is_non_value(t)) {
2425 if (do_catch)
2426 t = FAIL_TERM;
2427 else
2428 FAIL();
2429 }
2430 esp -= 2;
2431 esp[-1] = t;
2432 break;
2433 case matchPushVResult:
2434 if (!(in_flags & ERTS_PAM_COPY_RESULT)) goto case_matchPushV;
2435 /* Build copy on callers heap */
2436 n = *pc++;
2437 ASSERT(is_value(variables[n].term));
2438 ASSERT(!variables[n].proc);
2439 variables[n].term = copy_object_x(variables[n].term, c_p, HEAP_XTRA);
2440 *esp++ = variables[n].term;
2441 #ifdef DEBUG
2442 variables[n].proc = c_p;
2443 #endif
2444 break;
2445 case matchPushV:
2446 case_matchPushV:
2447 n = *pc++;
2448 ASSERT(is_value(variables[n].term));
2449 *esp++ = variables[n].term;
2450 break;
2451 case matchPushExpr:
2452 if (in_flags & ERTS_PAM_COPY_RESULT) {
2453 Uint sz;
2454 Eterm* top;
2455 sz = size_object(term);
2456 top = HAllocX(build_proc, sz, HEAP_XTRA);
2457 if (in_flags & ERTS_PAM_CONTIGUOUS_TUPLE) {
2458 ASSERT(is_tuple(term));
2459 *esp++ = copy_shallow(tuple_val(term), sz, &top, &MSO(build_proc));
2460 }
2461 else {
2462 *esp++ = copy_struct(term, sz, &top, &MSO(build_proc));
2463 }
2464 }
2465 else {
2466 *esp++ = term;
2467 }
2468 break;
2469 case matchPushArrayAsList:
2470 n = arity; /* Only happens when 'term' is an array */
2471 tp = termp;
2472 ehp = HAllocX(build_proc, n*2, HEAP_XTRA);
2473 *esp++ = make_list(ehp);
2474 while (n--) {
2475 *ehp++ = *tp++;
2476 *ehp = make_list(ehp + 1);
2477 ehp++; /* As pointed out by Mikael Pettersson the expression
2478 (*ehp++ = make_list(ehp + 1)) that I previously
2479 had written here has undefined behaviour. */
2480 }
2481 ehp[-1] = NIL;
2482 break;
2483 case matchPushArrayAsListU:
2484 /* This instruction is NOT efficient. */
2485 *esp++ = dpm_array_to_list(build_proc, termp, arity);
2486 break;
2487 case matchTrue:
2488 if (*--esp != am_true)
2489 FAIL();
2490 break;
2491 case matchOr:
2492 n = *pc++;
2493 t = am_false;
2494 while (n--) {
2495 if (*--esp == am_true) {
2496 t = am_true;
2497 } else if (*esp != am_false) {
2498 esp -= n;
2499 if (do_catch) {
2500 t = FAIL_TERM;
2501 break;
2502 } else {
2503 FAIL();
2504 }
2505 }
2506 }
2507 *esp++ = t;
2508 break;
2509 case matchAnd:
2510 n = *pc++;
2511 t = am_true;
2512 while (n--) {
2513 if (*--esp == am_false) {
2514 t = am_false;
2515 } else if (*esp != am_true) {
2516 esp -= n;
2517 if (do_catch) {
2518 t = FAIL_TERM;
2519 break;
2520 } else {
2521 FAIL();
2522 }
2523 }
2524 }
2525 *esp++ = t;
2526 break;
2527 case matchOrElse:
2528 n = *pc++;
2529 if (*--esp == am_true) {
2530 ++esp;
2531 pc += n;
2532 } else if (*esp != am_false) {
2533 if (do_catch) {
2534 *esp++ = FAIL_TERM;
2535 pc += n;
2536 } else {
2537 FAIL();
2538 }
2539 }
2540 break;
2541 case matchAndAlso:
2542 n = *pc++;
2543 if (*--esp == am_false) {
2544 esp++;
2545 pc += n;
2546 } else if (*esp != am_true) {
2547 if (do_catch) {
2548 *esp++ = FAIL_TERM;
2549 pc += n;
2550 } else {
2551 FAIL();
2552 }
2553 }
2554 break;
2555 case matchJump:
2556 n = *pc++;
2557 pc += n;
2558 break;
2559 case matchSelf:
2560 *esp++ = self->common.id;
2561 break;
2562 case matchWaste:
2563 --esp;
2564 break;
2565 case matchReturn:
2566 ret = *--esp;
2567 break;
2568 case matchProcessDump: {
2569 erts_dsprintf_buf_t *dsbufp = erts_create_tmp_dsbuf(0);
2570 ASSERT(c_p == self);
2571 print_process_info(ERTS_PRINT_DSBUF, (void *) dsbufp, c_p, ERTS_PROC_LOCK_MAIN);
2572 *esp++ = new_binary(build_proc, (byte *)dsbufp->str,
2573 dsbufp->str_len);
2574 erts_destroy_tmp_dsbuf(dsbufp);
2575 break;
2576 }
2577 case matchDisplay: /* Debugging, not for production! */
2578 erts_printf("%T\n", esp[-1]);
2579 esp[-1] = am_true;
2580 break;
2581 case matchSetReturnTrace:
2582 *return_flags |= MATCH_SET_RETURN_TRACE;
2583 *esp++ = am_true;
2584 break;
2585 case matchSetExceptionTrace:
2586 *return_flags |= MATCH_SET_EXCEPTION_TRACE;
2587 *esp++ = am_true;
2588 break;
2589 case matchIsSeqTrace:
2590 ASSERT(c_p == self);
2591 if (have_seqtrace(SEQ_TRACE_TOKEN(c_p)))
2592 *esp++ = am_true;
2593 else
2594 *esp++ = am_false;
2595 break;
2596 case matchSetSeqToken:
2597 ASSERT(c_p == self);
2598 t = erts_seq_trace(c_p, esp[-1], esp[-2], 0);
2599 if (is_non_value(t)) {
2600 esp[-2] = FAIL_TERM;
2601 } else {
2602 esp[-2] = t;
2603 }
2604 --esp;
2605 break;
2606 case matchSetSeqTokenFake:
2607 ASSERT(c_p == self);
2608 t = seq_trace_fake(c_p, esp[-1]);
2609 if (is_non_value(t)) {
2610 esp[-2] = FAIL_TERM;
2611 } else {
2612 esp[-2] = t;
2613 }
2614 --esp;
2615 break;
2616 case matchGetSeqToken:
2617 ASSERT(c_p == self);
2618 if (have_no_seqtrace(SEQ_TRACE_TOKEN(c_p)))
2619 *esp++ = NIL;
2620 else {
2621 Eterm token;
2622 Uint token_sz;
2623
2624 ASSERT(SEQ_TRACE_TOKEN_ARITY(c_p) == 5);
2625 ASSERT(is_immed(SEQ_TRACE_TOKEN_FLAGS(c_p)));
2626 ASSERT(is_immed(SEQ_TRACE_TOKEN_SERIAL(c_p)));
2627 ASSERT(is_immed(SEQ_TRACE_TOKEN_LASTCNT(c_p)));
2628
2629 token = SEQ_TRACE_TOKEN(c_p);
2630 token_sz = size_object(token);
2631
2632 ehp = HAllocX(build_proc, token_sz, HEAP_XTRA);
2633 *esp++ = copy_struct(token, token_sz, &ehp, &MSO(build_proc));
2634 }
2635 break;
2636 case matchEnableTrace:
2637 ASSERT(c_p == self);
2638 if ( (n = erts_trace_flag2bit(esp[-1]))) {
2639 erts_proc_lock(c_p, ERTS_PROC_LOCKS_ALL_MINOR);
2640 set_tracee_flags(c_p, ERTS_TRACER(c_p), 0, n);
2641 erts_proc_unlock(c_p, ERTS_PROC_LOCKS_ALL_MINOR);
2642 esp[-1] = am_true;
2643 } else {
2644 esp[-1] = FAIL_TERM;
2645 }
2646 break;
2647 case matchEnableTrace2:
2648 ASSERT(c_p == self);
2649 n = erts_trace_flag2bit((--esp)[-1]);
2650 esp[-1] = FAIL_TERM;
2651 if (n) {
2652 if ( (tmpp = get_proc(c_p, ERTS_PROC_LOCK_MAIN, esp[0], ERTS_PROC_LOCKS_ALL))) {
2653 /* Always take over the tracer of the current process */
2654 set_tracee_flags(tmpp, ERTS_TRACER(c_p), 0, n);
2655 if (tmpp == c_p)
2656 erts_proc_unlock(tmpp, ERTS_PROC_LOCKS_ALL_MINOR);
2657 else
2658 erts_proc_unlock(tmpp, ERTS_PROC_LOCKS_ALL);
2659 esp[-1] = am_true;
2660 }
2661 }
2662 break;
2663 case matchDisableTrace:
2664 ASSERT(c_p == self);
2665 if ( (n = erts_trace_flag2bit(esp[-1]))) {
2666 erts_proc_lock(c_p, ERTS_PROC_LOCKS_ALL_MINOR);
2667 set_tracee_flags(c_p, ERTS_TRACER(c_p), n, 0);
2668 erts_proc_unlock(c_p, ERTS_PROC_LOCKS_ALL_MINOR);
2669 esp[-1] = am_true;
2670 } else {
2671 esp[-1] = FAIL_TERM;
2672 }
2673 break;
2674 case matchDisableTrace2:
2675 ASSERT(c_p == self);
2676 n = erts_trace_flag2bit((--esp)[-1]);
2677 esp[-1] = FAIL_TERM;
2678 if (n) {
2679 if ( (tmpp = get_proc(c_p, ERTS_PROC_LOCK_MAIN, esp[0], ERTS_PROC_LOCKS_ALL))) {
2680 /* Always take over the tracer of the current process */
2681 set_tracee_flags(tmpp, ERTS_TRACER(c_p), n, 0);
2682 if (tmpp == c_p)
2683 erts_proc_unlock(tmpp, ERTS_PROC_LOCKS_ALL_MINOR);
2684 else
2685 erts_proc_unlock(tmpp, ERTS_PROC_LOCKS_ALL);
2686 esp[-1] = am_true;
2687 }
2688 }
2689 break;
2690 case matchCaller:
2691 ASSERT(c_p == self);
2692 t = c_p->stop[0];
2693 if (is_not_CP(t)) {
2694 *esp++ = am_undefined;
2695 } else if (!(cp = erts_find_function_from_pc(cp_val(t)))) {
2696 *esp++ = am_undefined;
2697 } else {
2698 ehp = HAllocX(build_proc, 4, HEAP_XTRA);
2699 *esp++ = make_tuple(ehp);
2700 ehp[0] = make_arityval(3);
2701 ehp[1] = cp->module;
2702 ehp[2] = cp->function;
2703 ehp[3] = make_small((Uint) cp->arity);
2704 }
2705 break;
2706 case matchSilent:
2707 ASSERT(c_p == self);
2708 --esp;
2709 if (in_flags & ERTS_PAM_IGNORE_TRACE_SILENT)
2710 break;
2711 if (*esp == am_true) {
2712 erts_proc_lock(c_p, ERTS_PROC_LOCKS_ALL_MINOR);
2713 ERTS_TRACE_FLAGS(c_p) |= F_TRACE_SILENT;
2714 erts_proc_unlock(c_p, ERTS_PROC_LOCKS_ALL_MINOR);
2715 }
2716 else if (*esp == am_false) {
2717 erts_proc_lock(c_p, ERTS_PROC_LOCKS_ALL_MINOR);
2718 ERTS_TRACE_FLAGS(c_p) &= ~F_TRACE_SILENT;
2719 erts_proc_unlock(c_p, ERTS_PROC_LOCKS_ALL_MINOR);
2720 }
2721 break;
2722 case matchTrace2:
2723 ASSERT(c_p == self);
2724 {
2725 /* disable enable */
2726 Uint d_flags = 0, e_flags = 0; /* process trace flags */
2727 ErtsTracer tracer = erts_tracer_nil;
2728 /* XXX Atomicity note: Not fully atomic. Default tracer
2729 * is sampled from current process but applied to
2730 * tracee and tracer later after releasing main
2731 * locks on current process, so ERTS_TRACER_PROC(c_p)
2732 * may actually have changed when tracee and tracer
2733 * gets updated. I do not think nobody will notice.
2734 * It is just the default value that is not fully atomic.
2735 * and the real argument settable from match spec
2736 * {trace,[],[{{tracer,Tracer}}]} is much, much older.
2737 */
2738 int cputs = 0;
2739 erts_tracer_update(&tracer, ERTS_TRACER(c_p));
2740
2741 if (! erts_trace_flags(esp[-1], &d_flags, &tracer, &cputs) ||
2742 ! erts_trace_flags(esp[-2], &e_flags, &tracer, &cputs) ||
2743 cputs ) {
2744 (--esp)[-1] = FAIL_TERM;
2745 ERTS_TRACER_CLEAR(&tracer);
2746 break;
2747 }
2748 erts_proc_lock(c_p, ERTS_PROC_LOCKS_ALL_MINOR);
2749 (--esp)[-1] = set_match_trace(c_p, FAIL_TERM, tracer,
2750 d_flags, e_flags);
2751 erts_proc_unlock(c_p, ERTS_PROC_LOCKS_ALL_MINOR);
2752 ERTS_TRACER_CLEAR(&tracer);
2753 }
2754 break;
2755 case matchTrace3:
2756 ASSERT(c_p == self);
2757 {
2758 /* disable enable */
2759 Uint d_flags = 0, e_flags = 0; /* process trace flags */
2760 ErtsTracer tracer = erts_tracer_nil;
2761 /* XXX Atomicity note. Not fully atomic. See above.
2762 * Above it could possibly be solved, but not here.
2763 */
2764 int cputs = 0;
2765 Eterm tracee = (--esp)[0];
2766
2767 erts_tracer_update(&tracer, ERTS_TRACER(c_p));
2768
2769 if (! erts_trace_flags(esp[-1], &d_flags, &tracer, &cputs) ||
2770 ! erts_trace_flags(esp[-2], &e_flags, &tracer, &cputs) ||
2771 cputs ||
2772 ! (tmpp = get_proc(c_p, ERTS_PROC_LOCK_MAIN,
2773 tracee, ERTS_PROC_LOCKS_ALL))) {
2774 (--esp)[-1] = FAIL_TERM;
2775 ERTS_TRACER_CLEAR(&tracer);
2776 break;
2777 }
2778 if (tmpp == c_p) {
2779 (--esp)[-1] = set_match_trace(c_p, FAIL_TERM, tracer,
2780 d_flags, e_flags);
2781 erts_proc_unlock(c_p, ERTS_PROC_LOCKS_ALL_MINOR);
2782 } else {
2783 erts_proc_unlock(c_p, ERTS_PROC_LOCK_MAIN);
2784 (--esp)[-1] = set_match_trace(tmpp, FAIL_TERM, tracer,
2785 d_flags, e_flags);
2786 erts_proc_unlock(tmpp, ERTS_PROC_LOCKS_ALL);
2787 erts_proc_lock(c_p, ERTS_PROC_LOCK_MAIN);
2788 }
2789 ERTS_TRACER_CLEAR(&tracer);
2790 }
2791 break;
2792 case matchCatch: /* Match success, now build result */
2793 do_catch = 1;
2794 if (in_flags & ERTS_PAM_COPY_RESULT) {
2795 build_proc = c_p;
2796 if (esdp)
2797 esdp->current_process = c_p;
2798 }
2799 break;
2800 case matchHalt:
2801 goto success;
2802 default:
2803 erts_exit(ERTS_ERROR_EXIT, "Internal error: unexpected opcode in match program.");
2804 }
2805 }
2806 fail:
2807 *return_flags = 0U;
2808 if (fail_label >= 0) { /* We failed during a "TryMeElse",
2809 lets restart, with the next match
2810 program */
2811 pc = (prog->text) + fail_label;
2812 cleanup_match_pseudo_process(mpsp, 1);
2813 goto restart;
2814 }
2815 ret = THE_NON_VALUE;
2816 success:
2817 ASSERT(ret == THE_NON_VALUE || esp == orig_esp);
2818
2819 #ifdef DMC_DEBUG
2820 if (*heap_fence != FENCE_PATTERN) {
2821 erts_exit(ERTS_ABORT_EXIT, "Heap fence overwritten in db_prog_match after op "
2822 "0x%08x, overwritten with 0x%08x.", save_op, *heap_fence);
2823 }
2824 if (*stack_fence != FENCE_PATTERN) {
2825 erts_exit(ERTS_ABORT_EXIT, "Stack fence overwritten in db_prog_match after op "
2826 "0x%08x, overwritten with 0x%08x.", save_op,
2827 *stack_fence);
2828 }
2829 #endif
2830
2831 if (esdp)
2832 esdp->current_process = current_scheduled;
2833
2834 return ret;
2835 #undef FAIL
2836 #undef FAIL_TERM
2837 }
2838
2839
db_new_dmc_err_info(void)2840 DMCErrInfo *db_new_dmc_err_info(void)
2841 {
2842 DMCErrInfo *ret = erts_alloc(ERTS_ALC_T_DB_DMC_ERR_INFO,
2843 sizeof(DMCErrInfo));
2844 ret->var_trans = NULL;
2845 ret->num_trans = 0;
2846 ret->error_added = 0;
2847 ret->first = NULL;
2848 return ret;
2849 }
2850
db_format_dmc_err_info(Process * p,DMCErrInfo * ei)2851 Eterm db_format_dmc_err_info(Process *p, DMCErrInfo *ei)
2852 {
2853 int sl;
2854 int vnum;
2855 DMCError *tmp;
2856 Eterm *shp;
2857 Eterm ret = NIL;
2858 Eterm tlist, tpl, sev;
2859 char buff[DMC_ERR_STR_LEN + 20 /* for the number */];
2860
2861 for (tmp = ei->first; tmp != NULL; tmp = tmp->next) {
2862 if (tmp->variable >= 0 &&
2863 tmp->variable < ei->num_trans &&
2864 ei->var_trans != NULL) {
2865 vnum = (int) ei->var_trans[tmp->variable];
2866 } else {
2867 vnum = tmp->variable;
2868 }
2869 if (vnum >= 0)
2870 erts_snprintf(buff,sizeof(buff)+20,tmp->error_string, vnum);
2871 else
2872 sys_strcpy(buff,tmp->error_string);
2873 sl = sys_strlen(buff);
2874 shp = HAlloc(p, sl * 2 + 5);
2875 sev = (tmp->severity == dmcWarning) ? am_warning : am_error;
2876 tlist = buf_to_intlist(&shp, buff, sl, NIL);
2877 tpl = TUPLE2(shp, sev, tlist);
2878 shp += 3;
2879 ret = CONS(shp, tpl, ret);
2880 shp += 2;
2881 }
2882 return ret;
2883 }
2884
db_free_dmc_err_info(DMCErrInfo * ei)2885 void db_free_dmc_err_info(DMCErrInfo *ei){
2886 while (ei->first != NULL) {
2887 DMCError *ll = ei->first->next;
2888 erts_free(ERTS_ALC_T_DB_DMC_ERROR, ei->first);
2889 ei->first = ll;
2890 }
2891 if (ei->var_trans)
2892 erts_free(ERTS_ALC_T_DB_TRANS_TAB, ei->var_trans);
2893 erts_free(ERTS_ALC_T_DB_DMC_ERR_INFO, ei);
2894 }
2895
2896 /* Calculate integer addition: counter+incr.
2897 ** Store bignum in *hpp and increase *hpp accordingly.
2898 ** *hpp is assumed to be large enough to hold the result.
2899 */
db_add_counter(Eterm ** hpp,Wterm counter,Eterm incr)2900 Eterm db_add_counter(Eterm** hpp, Wterm counter, Eterm incr)
2901 {
2902 DeclareTmpHeapNoproc(big_tmp,2);
2903 Eterm res;
2904 Sint ires;
2905 Wterm arg1;
2906 Wterm arg2;
2907
2908 if (is_both_small(counter,incr)) {
2909 ires = signed_val(counter) + signed_val(incr);
2910 if (IS_SSMALL(ires)) {
2911 return make_small(ires);
2912 } else {
2913 res = small_to_big(ires, *hpp);
2914 ASSERT(BIG_NEED_SIZE(big_size(res))==2);
2915 *hpp += 2;
2916 return res;
2917 }
2918 }
2919 else {
2920 UseTmpHeapNoproc(2);
2921 switch(NUMBER_CODE(counter, incr)) {
2922 case SMALL_BIG:
2923 arg1 = small_to_big(signed_val(counter), big_tmp);
2924 arg2 = incr;
2925 break;
2926 case BIG_SMALL:
2927 arg1 = counter;
2928 arg2 = small_to_big(signed_val(incr), big_tmp);
2929 break;
2930 case BIG_BIG:
2931 arg1 = incr;
2932 arg2 = counter;
2933 break;
2934 default:
2935 UnUseTmpHeapNoproc(2);
2936 return THE_NON_VALUE;
2937 }
2938 res = big_plus(arg1, arg2, *hpp);
2939 if (is_big(res)) {
2940 *hpp += BIG_NEED_SIZE(big_size(res));
2941 }
2942 UnUseTmpHeapNoproc(2);
2943 return res;
2944 }
2945 }
2946
2947 /* Must be called to read elements after db_lookup_dbterm.
2948 ** Will decompress if needed.
2949 */
db_do_read_element(DbUpdateHandle * handle,Sint position)2950 Wterm db_do_read_element(DbUpdateHandle* handle, Sint position)
2951 {
2952 Eterm elem = handle->dbterm->tpl[position];
2953 if (!is_header(elem)) {
2954 return elem;
2955 }
2956
2957 ASSERT(((DbTableCommon*)handle->tb)->compress);
2958 ASSERT(!(handle->flags & DB_MUST_RESIZE));
2959 handle->dbterm = db_alloc_tmp_uncompressed(&handle->tb->common,
2960 handle->dbterm);
2961 handle->flags |= DB_MUST_RESIZE;
2962 return handle->dbterm->tpl[position];
2963 }
2964
2965 /*
2966 ** Update one element:
2967 ** handle: Initialized by db_lookup_dbterm()
2968 ** position: The tuple position of the elements to be updated.
2969 ** newval: The new value of the element.
2970 ** Can not fail.
2971 */
db_do_update_element(DbUpdateHandle * handle,Sint position,Eterm newval)2972 void db_do_update_element(DbUpdateHandle* handle,
2973 Sint position,
2974 Eterm newval)
2975 {
2976 Eterm oldval = handle->dbterm->tpl[position];
2977 Eterm* newp;
2978 Eterm* oldp;
2979 Uint newval_sz;
2980 Uint oldval_sz;
2981
2982 if (is_both_immed(newval,oldval)) {
2983 handle->dbterm->tpl[position] = newval;
2984 #ifdef DEBUG_CLONE
2985 if (handle->dbterm->debug_clone) {
2986 handle->dbterm->debug_clone[position] = newval;
2987 }
2988 #endif
2989 return;
2990 }
2991 if (!(handle->flags & DB_MUST_RESIZE)) {
2992 if (handle->tb->common.compress) {
2993 handle->dbterm = db_alloc_tmp_uncompressed(&handle->tb->common,
2994 handle->dbterm);
2995 handle->flags |= DB_MUST_RESIZE;
2996 oldval = handle->dbterm->tpl[position];
2997 }
2998 else {
2999 if (is_boxed(newval)) {
3000 newp = boxed_val(newval);
3001 switch (*newp & _TAG_HEADER_MASK) {
3002 case _TAG_HEADER_POS_BIG:
3003 case _TAG_HEADER_NEG_BIG:
3004 case _TAG_HEADER_FLOAT:
3005 case _TAG_HEADER_HEAP_BIN:
3006 newval_sz = header_arity(*newp) + 1;
3007 if (is_boxed(oldval)) {
3008 oldp = boxed_val(oldval);
3009 switch (*oldp & _TAG_HEADER_MASK) {
3010 case _TAG_HEADER_POS_BIG:
3011 case _TAG_HEADER_NEG_BIG:
3012 case _TAG_HEADER_FLOAT:
3013 case _TAG_HEADER_HEAP_BIN:
3014 oldval_sz = header_arity(*oldp) + 1;
3015 if (oldval_sz == newval_sz) {
3016 /* "self contained" terms of same size, do memcpy */
3017 sys_memcpy(oldp, newp, newval_sz*sizeof(Eterm));
3018 return;
3019 }
3020 goto both_size_set;
3021 }
3022 }
3023 goto new_size_set;
3024 }
3025 }
3026 }
3027 }
3028 /* Not possible for simple memcpy or dbterm is already non-contiguous, */
3029 /* need to realloc... */
3030
3031 newval_sz = is_immed(newval) ? 0 : size_object(newval);
3032 new_size_set:
3033
3034 oldval_sz = is_immed(oldval) ? 0 : size_object(oldval);
3035 both_size_set:
3036
3037 handle->new_size = handle->new_size - oldval_sz + newval_sz;
3038
3039 /* write new value in old dbterm, finalize will make a flat copy */
3040 handle->dbterm->tpl[position] = newval;
3041 handle->flags |= DB_MUST_RESIZE;
3042 }
3043
db_realloc_term(DbTableCommon * tb,void * old,Uint old_sz,Uint new_sz,Uint offset)3044 static ERTS_INLINE byte* db_realloc_term(DbTableCommon* tb, void* old,
3045 Uint old_sz, Uint new_sz, Uint offset)
3046 {
3047 byte* ret;
3048 if (erts_ets_realloc_always_moves) {
3049 ret = erts_db_alloc(ERTS_ALC_T_DB_TERM, (DbTable*)tb, new_sz);
3050 sys_memcpy(ret, old, offset);
3051 erts_db_free(ERTS_ALC_T_DB_TERM, (DbTable*)tb, old, old_sz);
3052 } else {
3053 ret = erts_db_realloc(ERTS_ALC_T_DB_TERM, (DbTable*)tb,
3054 old, old_sz, new_sz);
3055 }
3056 return ret;
3057 }
3058
3059 /* Allocated size of a compressed dbterm
3060 */
db_alloced_size_comp(DbTerm * obj)3061 static ERTS_INLINE Uint db_alloced_size_comp(DbTerm* obj)
3062 {
3063 return obj->tpl[arityval(*obj->tpl) + 1];
3064 }
3065
db_free_term(DbTable * tb,void * basep,Uint offset)3066 void db_free_term(DbTable *tb, void* basep, Uint offset)
3067 {
3068 DbTerm* db = (DbTerm*) ((byte*)basep + offset);
3069 Uint size;
3070 if (tb->common.compress) {
3071 db_cleanup_offheap_comp(db);
3072 size = db_alloced_size_comp(db);
3073 }
3074 else {
3075 ErlOffHeap tmp_oh;
3076 tmp_oh.first = db->first_oh;
3077 erts_cleanup_offheap(&tmp_oh);
3078 size = offset + offsetof(DbTerm,tpl) + db->size*sizeof(Eterm);
3079 }
3080 erts_db_free(ERTS_ALC_T_DB_TERM, tb, basep, size);
3081 }
3082
db_term_size(DbTable * tb,void * basep,Uint offset)3083 Uint db_term_size(DbTable *tb, void* basep, Uint offset)
3084 {
3085 DbTerm* db = (DbTerm*) ((byte*)basep + offset);
3086 if (tb->common.compress) {
3087 return db_alloced_size_comp(db);
3088 }
3089 else {
3090 return offset + offsetof(DbTerm,tpl) + db->size*sizeof(Eterm);
3091 }
3092 }
3093
db_free_term_no_tab(int compress,void * basep,Uint offset)3094 void db_free_term_no_tab(int compress, void* basep, Uint offset)
3095 {
3096 DbTerm* db = (DbTerm*) ((byte*)basep + offset);
3097 Uint size;
3098 if (compress) {
3099 db_cleanup_offheap_comp(db);
3100 size = db_alloced_size_comp(db);
3101 }
3102 else {
3103 ErlOffHeap tmp_oh;
3104 tmp_oh.first = db->first_oh;
3105 erts_cleanup_offheap(&tmp_oh);
3106 size = offset + offsetof(DbTerm,tpl) + db->size*sizeof(Eterm);
3107 }
3108 erts_db_free(ERTS_ALC_T_DB_TERM, NULL, basep, size);
3109 }
3110
align_up(Uint value,Uint pow2)3111 static ERTS_INLINE Uint align_up(Uint value, Uint pow2)
3112 {
3113 ASSERT((pow2 & (pow2-1)) == 0);
3114 return (value + (pow2-1)) & ~(pow2-1);
3115 }
3116
3117 /* Compressed size of an uncompressed term
3118 */
db_size_dbterm_comp(int keypos,Eterm obj)3119 static Uint db_size_dbterm_comp(int keypos, Eterm obj)
3120 {
3121 Eterm* tpl = tuple_val(obj);
3122 int i;
3123 Uint size = sizeof(DbTerm)
3124 + arityval(*tpl) * sizeof(Eterm)
3125 + sizeof(Uint); /* "alloc_size" */
3126
3127 for (i = arityval(*tpl); i>0; i--) {
3128 if (i != keypos && is_not_immed(tpl[i])) {
3129 size += erts_encode_ext_size_ets(tpl[i]);
3130 }
3131 }
3132 size += size_object(tpl[keypos]) * sizeof(Eterm);
3133 return align_up(size, sizeof(Uint));
3134 }
3135
3136 /* Conversion between top tuple element and pointer to compressed data
3137 */
ext2elem(Eterm * tpl,byte * ext)3138 static ERTS_INLINE Eterm ext2elem(Eterm* tpl, byte* ext)
3139 {
3140 return (((Uint)(ext - (byte*)tpl)) << _TAG_PRIMARY_SIZE) | TAG_PRIMARY_HEADER;
3141 }
elem2ext(Eterm * tpl,Uint ix)3142 static ERTS_INLINE byte* elem2ext(Eterm* tpl, Uint ix)
3143 {
3144 ASSERT(is_header(tpl[ix]));
3145 return (byte*)tpl + (tpl[ix] >> _TAG_PRIMARY_SIZE);
3146 }
3147
copy_to_comp(int keypos,Eterm obj,DbTerm * dest,Uint alloc_size)3148 static void* copy_to_comp(int keypos, Eterm obj, DbTerm* dest,
3149 Uint alloc_size)
3150 {
3151 ErlOffHeap tmp_offheap;
3152 Eterm* src = tuple_val(obj);
3153 Eterm* tpl = dest->tpl;
3154 Eterm key = src[keypos];
3155 int arity = arityval(src[0]);
3156 union {
3157 Eterm* ep;
3158 byte* cp;
3159 UWord ui;
3160 }top;
3161 int i;
3162
3163 top.ep = tpl+ 1 + arity + 1;
3164 tpl[0] = src[0];
3165 tpl[arity + 1] = alloc_size;
3166
3167 tmp_offheap.first = NULL;
3168 tpl[keypos] = copy_struct(key, size_object(key), &top.ep, &tmp_offheap);
3169 dest->first_oh = tmp_offheap.first;
3170 for (i=1; i<=arity; i++) {
3171 if (i != keypos) {
3172 if (is_immed(src[i])) {
3173 tpl[i] = src[i];
3174 }
3175 else {
3176 tpl[i] = ext2elem(tpl, top.cp);
3177 top.cp = erts_encode_ext_ets(src[i], top.cp, &dest->first_oh);
3178 }
3179 }
3180 }
3181
3182 #ifdef DEBUG_CLONE
3183 {
3184 Eterm* dbg_top = erts_alloc(ERTS_ALC_T_DB_TERM, dest->size * sizeof(Eterm));
3185 dest->debug_clone = dbg_top;
3186 tmp_offheap.first = dest->first_oh;
3187 copy_struct(obj, dest->size, &dbg_top, &tmp_offheap);
3188 dest->first_oh = tmp_offheap.first;
3189 ASSERT(dbg_top == dest->debug_clone + dest->size);
3190 }
3191 #endif
3192 return top.cp;
3193 }
3194
3195 /*
3196 ** Copy the object into a possibly new DbTerm,
3197 ** offset is the offset of the DbTerm from the start
3198 ** of the allocated structure, The possibly realloced and copied
3199 ** structure is returned. Make sure (((char *) old) - offset) is a
3200 ** pointer to a ERTS_ALC_T_DB_TERM allocated data area.
3201 */
db_store_term(DbTableCommon * tb,DbTerm * old,Uint offset,Eterm obj)3202 void* db_store_term(DbTableCommon *tb, DbTerm* old, Uint offset, Eterm obj)
3203 {
3204 byte* basep;
3205 DbTerm* newp;
3206 Eterm* top;
3207 int size = size_object(obj);
3208 ErlOffHeap tmp_offheap;
3209
3210 if (old != 0) {
3211 basep = ((byte*) old) - offset;
3212 tmp_offheap.first = old->first_oh;
3213 erts_cleanup_offheap(&tmp_offheap);
3214 old->first_oh = tmp_offheap.first;
3215 if (size == old->size) {
3216 newp = old;
3217 }
3218 else {
3219 Uint new_sz = offset + sizeof(DbTerm) + sizeof(Eterm)*(size-1);
3220 Uint old_sz = offset + sizeof(DbTerm) + sizeof(Eterm)*(old->size-1);
3221
3222 basep = db_realloc_term(tb, basep, old_sz, new_sz, offset);
3223 newp = (DbTerm*) (basep + offset);
3224 }
3225 }
3226 else {
3227 basep = erts_db_alloc(ERTS_ALC_T_DB_TERM, (DbTable *)tb,
3228 (offset + sizeof(DbTerm) + sizeof(Eterm)*(size-1)));
3229 newp = (DbTerm*) (basep + offset);
3230 }
3231 newp->size = size;
3232 top = newp->tpl;
3233 tmp_offheap.first = NULL;
3234 copy_struct(obj, size, &top, &tmp_offheap);
3235 newp->first_oh = tmp_offheap.first;
3236 #ifdef DEBUG_CLONE
3237 newp->debug_clone = NULL;
3238 #endif
3239 return basep;
3240 }
3241
3242
db_store_term_comp(DbTableCommon * tb,int keypos,DbTerm * old,Uint offset,Eterm obj)3243 void* db_store_term_comp(DbTableCommon *tb, /* May be NULL */
3244 int keypos,
3245 DbTerm* old,
3246 Uint offset,Eterm obj)
3247 {
3248 Uint new_sz = offset + db_size_dbterm_comp(keypos, obj);
3249 byte* basep;
3250 DbTerm* newp;
3251 byte* top;
3252
3253 ASSERT(tb == NULL || tb->compress);
3254 if (old != 0) {
3255 Uint old_sz = db_alloced_size_comp(old);
3256 db_cleanup_offheap_comp(old);
3257
3258 basep = ((byte*) old) - offset;
3259 if (new_sz == old_sz) {
3260 newp = old;
3261 }
3262 else {
3263 basep = db_realloc_term(tb, basep, old_sz, new_sz, offset);
3264 newp = (DbTerm*) (basep + offset);
3265 }
3266 }
3267 else {
3268 basep = erts_db_alloc(ERTS_ALC_T_DB_TERM, (DbTable*)tb, new_sz);
3269 newp = (DbTerm*) (basep + offset);
3270 }
3271
3272 newp->size = size_object(obj);
3273 top = copy_to_comp(keypos, obj, newp, new_sz);
3274 ASSERT(top <= basep + new_sz); (void)top;
3275
3276 /* ToDo: Maybe realloc if ((basep+new_sz) - top) > WASTED_SPACE_LIMIT */
3277
3278 return basep;
3279 }
3280
3281
db_finalize_resize(DbUpdateHandle * handle,Uint offset)3282 void db_finalize_resize(DbUpdateHandle* handle, Uint offset)
3283 {
3284 DbTable* tbl = handle->tb;
3285 DbTerm* newDbTerm;
3286 Uint alloc_sz = offset +
3287 (tbl->common.compress ?
3288 db_size_dbterm_comp(tbl->common.keypos, make_tuple(handle->dbterm->tpl)) :
3289 sizeof(DbTerm)+sizeof(Eterm)*(handle->new_size-1));
3290 byte* newp = erts_db_alloc(ERTS_ALC_T_DB_TERM, tbl, alloc_sz);
3291 byte* oldp = *(handle->bp);
3292
3293 sys_memcpy(newp, oldp, offset); /* copy only hash/tree header */
3294 *(handle->bp) = newp;
3295 newDbTerm = (DbTerm*) (newp + offset);
3296 newDbTerm->size = handle->new_size;
3297 #ifdef DEBUG_CLONE
3298 newDbTerm->debug_clone = NULL;
3299 #endif
3300
3301 /* make a flat copy */
3302
3303 if (tbl->common.compress) {
3304 copy_to_comp(tbl->common.keypos, make_tuple(handle->dbterm->tpl),
3305 newDbTerm, alloc_sz);
3306 db_free_tmp_uncompressed(handle->dbterm);
3307 }
3308 else {
3309 ErlOffHeap tmp_offheap;
3310 Eterm* tpl = handle->dbterm->tpl;
3311 Eterm* top = newDbTerm->tpl;
3312
3313 tmp_offheap.first = NULL;
3314
3315 {
3316 copy_struct(make_tuple(tpl), handle->new_size, &top, &tmp_offheap);
3317 newDbTerm->first_oh = tmp_offheap.first;
3318 ASSERT((byte*)top == (newp + alloc_sz));
3319 }
3320 }
3321 }
3322
db_copy_from_comp(DbTableCommon * tb,DbTerm * bp,Eterm ** hpp,ErlOffHeap * off_heap)3323 Eterm db_copy_from_comp(DbTableCommon* tb, DbTerm* bp, Eterm** hpp,
3324 ErlOffHeap* off_heap)
3325 {
3326 Eterm* hp = *hpp;
3327 int i, arity = arityval(bp->tpl[0]);
3328 ErtsHeapFactory factory;
3329
3330 hp[0] = bp->tpl[0];
3331 *hpp += arity + 1;
3332
3333 hp[tb->keypos] = copy_struct(bp->tpl[tb->keypos],
3334 size_object(bp->tpl[tb->keypos]),
3335 hpp, off_heap);
3336
3337 erts_factory_static_init(&factory, *hpp, bp->size - (arity+1), off_heap);
3338
3339 for (i=arity; i>0; i--) {
3340 if (i != tb->keypos) {
3341 if (is_immed(bp->tpl[i])) {
3342 hp[i] = bp->tpl[i];
3343 }
3344 else {
3345 hp[i] = erts_decode_ext_ets(&factory,
3346 elem2ext(bp->tpl, i));
3347 }
3348 }
3349 }
3350 *hpp = factory.hp;
3351 erts_factory_close(&factory);
3352
3353 ASSERT((*hpp - hp) <= bp->size);
3354 #ifdef DEBUG_CLONE
3355 ASSERT(EQ(make_tuple(hp),make_tuple(bp->debug_clone)));
3356 #endif
3357 return make_tuple(hp);
3358 }
3359
db_copy_element_from_ets(DbTableCommon * tb,Process * p,DbTerm * obj,Uint pos,Eterm ** hpp,Uint extra)3360 Eterm db_copy_element_from_ets(DbTableCommon* tb, Process* p,
3361 DbTerm* obj, Uint pos,
3362 Eterm** hpp, Uint extra)
3363 {
3364 if (is_immed(obj->tpl[pos])) {
3365 *hpp = HAlloc(p, extra);
3366 return obj->tpl[pos];
3367 }
3368 if (tb->compress && pos != tb->keypos) {
3369 byte* ext = elem2ext(obj->tpl, pos);
3370 Sint sz = erts_decode_ext_size_ets(ext, db_alloced_size_comp(obj)) + extra;
3371 Eterm copy;
3372 ErtsHeapFactory factory;
3373
3374 erts_factory_proc_prealloc_init(&factory, p, sz);
3375 copy = erts_decode_ext_ets(&factory, ext);
3376 *hpp = erts_produce_heap(&factory, extra, 0);
3377 erts_factory_close(&factory);
3378 #ifdef DEBUG_CLONE
3379 ASSERT(EQ(copy, obj->debug_clone[pos]));
3380 #endif
3381 return copy;
3382 }
3383 else {
3384 Uint sz = size_object(obj->tpl[pos]);
3385 *hpp = HAlloc(p, sz + extra);
3386 return copy_struct(obj->tpl[pos], sz, hpp, &MSO(p));
3387 }
3388 }
3389
3390
3391 /* Our own "cleanup_offheap"
3392 * as refc-binaries may be unaligned in compressed terms
3393 */
db_cleanup_offheap_comp(DbTerm * obj)3394 void db_cleanup_offheap_comp(DbTerm* obj)
3395 {
3396 union erl_off_heap_ptr u;
3397 struct erts_tmp_aligned_offheap tmp;
3398
3399 for (u.hdr = obj->first_oh; u.hdr; u.hdr = u.hdr->next) {
3400 erts_align_offheap(&u, &tmp);
3401 switch (thing_subtag(u.hdr->thing_word)) {
3402 case REFC_BINARY_SUBTAG:
3403 erts_bin_release(u.pb->val);
3404 break;
3405 case FUN_SUBTAG:
3406 if (erts_refc_dectest(&u.fun->fe->refc, 0) == 0) {
3407 erts_erase_fun_entry(u.fun->fe);
3408 }
3409 break;
3410 case REF_SUBTAG:
3411 ASSERT(is_magic_ref_thing(u.hdr));
3412 erts_bin_release((Binary *)u.mref->mb);
3413 break;
3414 default:
3415 ASSERT(is_external_header(u.hdr->thing_word));
3416 erts_deref_node_entry(u.ext->node, make_boxed(u.ep));
3417 break;
3418 }
3419 }
3420 #ifdef DEBUG_CLONE
3421 if (obj->debug_clone != NULL) {
3422 erts_free(ERTS_ALC_T_DB_TERM, obj->debug_clone);
3423 obj->debug_clone = NULL;
3424 }
3425 #endif
3426 }
3427
db_eq_comp(DbTableCommon * tb,Eterm a,DbTerm * b)3428 int db_eq_comp(DbTableCommon* tb, Eterm a, DbTerm* b)
3429 {
3430 ErlOffHeap tmp_offheap;
3431 Eterm* allocp;
3432 Eterm* hp;
3433 Eterm tmp_b;
3434 int is_eq;
3435
3436 ASSERT(tb->compress);
3437 hp = allocp = erts_alloc(ERTS_ALC_T_TMP, b->size*sizeof(Eterm));
3438 tmp_offheap.first = NULL;
3439 tmp_b = db_copy_from_comp(tb, b, &hp, &tmp_offheap);
3440 is_eq = eq(a,tmp_b);
3441 erts_cleanup_offheap(&tmp_offheap);
3442 erts_free(ERTS_ALC_T_TMP, allocp);
3443 return is_eq;
3444 }
3445
3446 /*
3447 ** Check if object represents a "match" variable
3448 ** i.e and atom $N where N is an integer
3449 **
3450 */
3451
db_is_variable(Eterm obj)3452 int db_is_variable(Eterm obj)
3453 {
3454 byte *b;
3455 int n;
3456 int N;
3457
3458 if (is_not_atom(obj))
3459 return -1;
3460 b = atom_tab(atom_val(obj))->name;
3461 if ((n = atom_tab(atom_val(obj))->len) < 2)
3462 return -1;
3463 if (*b++ != '$')
3464 return -1;
3465 n--;
3466 /* Handle first digit */
3467 if (*b == '0')
3468 return (n == 1) ? 0 : -1;
3469 if (*b >= '1' && *b <= '9')
3470 N = *b++ - '0';
3471 else
3472 return -1;
3473 n--;
3474 while(n--) {
3475 if (*b >= '0' && *b <= '9') {
3476 N = N*10 + (*b - '0');
3477 b++;
3478 }
3479 else
3480 return -1;
3481 }
3482 return N;
3483 }
3484
3485 /* check if node is (or contains) a map
3486 * return 1 if node contains a map
3487 * return 0 otherwise
3488 */
3489
db_has_map(Eterm node)3490 int db_has_map(Eterm node) {
3491 DECLARE_ESTACK(s);
3492
3493 ESTACK_PUSH(s,node);
3494 while (!ESTACK_ISEMPTY(s)) {
3495 node = ESTACK_POP(s);
3496 if (is_list(node)) {
3497 while (is_list(node)) {
3498 ESTACK_PUSH(s,CAR(list_val(node)));
3499 node = CDR(list_val(node));
3500 }
3501 ESTACK_PUSH(s,node); /* Non wellformed list or [] */
3502 } else if (is_tuple(node)) {
3503 Eterm *tuple = tuple_val(node);
3504 int arity = arityval(*tuple);
3505 while(arity--) {
3506 ESTACK_PUSH(s,*(++tuple));
3507 }
3508 } else if is_map(node) {
3509 DESTROY_ESTACK(s);
3510 return 1;
3511 }
3512 }
3513 DESTROY_ESTACK(s);
3514 return 0;
3515 }
3516
3517 /* check if obj is (or contains) a variable */
3518 /* return 1 if obj contains a variable or underscore */
3519 /* return 0 if obj is fully ground */
3520
db_has_variable(Eterm node)3521 int db_has_variable(Eterm node) {
3522 DECLARE_ESTACK(s);
3523
3524 ESTACK_PUSH(s,node);
3525 while (!ESTACK_ISEMPTY(s)) {
3526 node = ESTACK_POP(s);
3527 switch(node & _TAG_PRIMARY_MASK) {
3528 case TAG_PRIMARY_LIST:
3529 while (is_list(node)) {
3530 ESTACK_PUSH(s,CAR(list_val(node)));
3531 node = CDR(list_val(node));
3532 }
3533 ESTACK_PUSH(s,node); /* Non wellformed list or [] */
3534 break;
3535 case TAG_PRIMARY_BOXED:
3536 if (is_tuple(node)) {
3537 Eterm *tuple = tuple_val(node);
3538 int arity = arityval(*tuple);
3539 while(arity--) {
3540 ESTACK_PUSH(s,*(++tuple));
3541 }
3542 } else if (is_flatmap(node)) {
3543 Eterm *values = flatmap_get_values(flatmap_val(node));
3544 Uint size = flatmap_get_size(flatmap_val(node));
3545 ESTACK_PUSH(s, ((flatmap_t *) flatmap_val(node))->keys);
3546 while (size--) {
3547 ESTACK_PUSH(s, *(values++));
3548 }
3549 } else if (is_map(node)) { /* other map-nodes or map-heads */
3550 Eterm *ptr = hashmap_val(node);
3551 int i = hashmap_bitcount(MAP_HEADER_VAL(*ptr));
3552 ptr += MAP_HEADER_ARITY(*ptr);
3553 while(i--) { ESTACK_PUSH(s, *++ptr); }
3554 }
3555 break;
3556 case TAG_PRIMARY_IMMED1:
3557 if (node == am_Underscore || db_is_variable(node) >= 0) {
3558 DESTROY_ESTACK(s);
3559 return 1;
3560 }
3561 break;
3562 }
3563 }
3564 DESTROY_ESTACK(s);
3565 return 0;
3566 }
3567
3568 /*
3569 ** Local (static) utilities.
3570 */
3571
3572 /*
3573 ***************************************************************************
3574 ** Compiled matches
3575 ***************************************************************************
3576 */
3577 /*
3578 ** Utility to add an error
3579 */
3580
vadd_dmc_err(DMCErrInfo * err_info,DMCErrorSeverity severity,int variable,const char * str,...)3581 static void vadd_dmc_err(DMCErrInfo *err_info,
3582 DMCErrorSeverity severity,
3583 int variable,
3584 const char *str,
3585 ...)
3586 {
3587 DMCError *e;
3588 va_list args;
3589 va_start(args, str);
3590
3591
3592 /* Linked in in reverse order, to ease the formatting */
3593 e = erts_alloc(ERTS_ALC_T_DB_DMC_ERROR, sizeof(DMCError));
3594 erts_vsnprintf(e->error_string, DMC_ERR_STR_LEN, str, args);
3595 e->variable = variable;
3596 e->severity = severity;
3597 e->next = err_info->first;
3598 #ifdef HARDDEBUG
3599 erts_fprintf(stderr,"add_dmc_err: %s\n",e->error_string);
3600 #endif
3601 err_info->first = e;
3602 if (severity >= dmcError)
3603 err_info->error_added = 1;
3604
3605 va_end(args);
3606 }
3607
3608
3609 /*
3610 ** Handle one term in the match expression (not the guard)
3611 */
dmc_one_term(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (Eterm)* stack,DMC_STACK_TYPE (UWord)* text,Eterm c)3612 static DMCRet dmc_one_term(DMCContext *context,
3613 DMCHeap *heap,
3614 DMC_STACK_TYPE(Eterm) *stack,
3615 DMC_STACK_TYPE(UWord) *text,
3616 Eterm c)
3617 {
3618 Sint n;
3619 Eterm *hp;
3620 Uint sz, sz2, sz3;
3621 Uint i, j;
3622
3623 switch (c & _TAG_PRIMARY_MASK) {
3624 case TAG_PRIMARY_IMMED1:
3625 if ((n = db_is_variable(c)) >= 0) { /* variable */
3626 if (n >= heap->size) {
3627 /*
3628 ** Ouch, big integer in match variable.
3629 */
3630 Eterm *save_hp;
3631 ASSERT(heap->vars == heap->vars_def);
3632 sz = sz2 = sz3 = 0;
3633 for (j = 0; j < context->num_match; ++j) {
3634 sz += size_object(context->matchexpr[j]);
3635 sz2 += size_object(context->guardexpr[j]);
3636 sz3 += size_object(context->bodyexpr[j]);
3637 }
3638 context->copy =
3639 new_message_buffer(sz + sz2 + sz3 +
3640 context->num_match);
3641 save_hp = hp = context->copy->mem;
3642 hp += context->num_match;
3643 for (j = 0; j < context->num_match; ++j) {
3644 context->matchexpr[j] =
3645 copy_struct(context->matchexpr[j],
3646 size_object(context->matchexpr[j]), &hp,
3647 &(context->copy->off_heap));
3648 context->guardexpr[j] =
3649 copy_struct(context->guardexpr[j],
3650 size_object(context->guardexpr[j]), &hp,
3651 &(context->copy->off_heap));
3652 context->bodyexpr[j] =
3653 copy_struct(context->bodyexpr[j],
3654 size_object(context->bodyexpr[j]), &hp,
3655 &(context->copy->off_heap));
3656 }
3657 for (j = 0; j < context->num_match; ++j) {
3658 /* the actual expressions can be
3659 atoms in their selves, place them first */
3660 *save_hp++ = context->matchexpr[j];
3661 }
3662 heap->size = match_compact(context->copy,
3663 context->err_info);
3664 for (j = 0; j < context->num_match; ++j) {
3665 /* restore the match terms, as they
3666 may be atoms that changed */
3667 context->matchexpr[j] = context->copy->mem[j];
3668 }
3669 heap->vars = erts_alloc(ERTS_ALC_T_DB_MS_CMPL_HEAP,
3670 heap->size*sizeof(DMCVariable));
3671 sys_memset(heap->vars, 0, heap->size * sizeof(DMCVariable));
3672 DMC_CLEAR(*stack);
3673 /*DMC_PUSH(*stack,NIL);*/
3674 DMC_CLEAR(*text);
3675 return retRestart;
3676 }
3677 if (heap->vars[n].is_bound) {
3678 DMC_PUSH2(*text, matchCmp, n);
3679 } else { /* Not bound, bind! */
3680 if (n >= heap->vars_used)
3681 heap->vars_used = n + 1;
3682 DMC_PUSH2(*text, matchBind, n);
3683 heap->vars[n].is_bound = 1;
3684 }
3685 } else if (c == am_Underscore) {
3686 DMC_PUSH(*text, matchSkip);
3687 } else { /* Any immediate value */
3688 DMC_PUSH2(*text, matchEq, (Uint) c);
3689 }
3690 break;
3691 case TAG_PRIMARY_LIST:
3692 DMC_PUSH(*text, matchPushL);
3693 ++(context->stack_used);
3694 DMC_PUSH(*stack, c);
3695 break;
3696 case TAG_PRIMARY_BOXED: {
3697 Eterm hdr = *boxed_val(c);
3698 switch ((hdr & _TAG_HEADER_MASK) >> _TAG_PRIMARY_SIZE) {
3699 case (_TAG_HEADER_ARITYVAL >> _TAG_PRIMARY_SIZE):
3700 n = arityval(*tuple_val(c));
3701 DMC_PUSH2(*text, matchPushT, n);
3702 ++(context->stack_used);
3703 DMC_PUSH(*stack, c);
3704 break;
3705 case (_TAG_HEADER_MAP >> _TAG_PRIMARY_SIZE):
3706 if (is_flatmap(c))
3707 n = flatmap_get_size(flatmap_val(c));
3708 else
3709 n = hashmap_size(c);
3710 DMC_PUSH2(*text, matchPushM, n);
3711 ++(context->stack_used);
3712 DMC_PUSH(*stack, c);
3713 break;
3714 case (_TAG_HEADER_REF >> _TAG_PRIMARY_SIZE):
3715 {
3716 Eterm* ref_val = internal_ref_val(c);
3717 DMC_PUSH(*text, matchEqRef);
3718 n = thing_arityval(ref_val[0]);
3719 for (i = 0; i <= n; ++i) {
3720 DMC_PUSH(*text, ref_val[i]);
3721 }
3722 break;
3723 }
3724 case (_TAG_HEADER_POS_BIG >> _TAG_PRIMARY_SIZE):
3725 case (_TAG_HEADER_NEG_BIG >> _TAG_PRIMARY_SIZE):
3726 {
3727 Eterm* bval = big_val(c);
3728 n = thing_arityval(bval[0]);
3729 DMC_PUSH(*text, matchEqBig);
3730 for (i = 0; i <= n; ++i) {
3731 DMC_PUSH(*text, (Uint) bval[i]);
3732 }
3733 break;
3734 }
3735 case (_TAG_HEADER_FLOAT >> _TAG_PRIMARY_SIZE):
3736 DMC_PUSH2(*text, matchEqFloat, (Uint) float_val(c)[1]);
3737 #ifdef ARCH_32
3738 DMC_PUSH(*text, (Uint) float_val(c)[2]);
3739 #endif
3740 break;
3741 default: /* BINARY, FUN, VECTOR, or EXTERNAL */
3742 DMC_PUSH2(*text, matchEqBin, dmc_private_copy(context, c));
3743 break;
3744 }
3745 break;
3746 }
3747 default:
3748 erts_exit(ERTS_ERROR_EXIT, "db_match_compile: "
3749 "Bad object on heap: 0x%bex\n", c);
3750 }
3751 return retOk;
3752 }
3753
3754 /*
3755 ** Make a private copy of a term in a context.
3756 */
3757
3758 static Eterm
dmc_private_copy(DMCContext * context,Eterm c)3759 dmc_private_copy(DMCContext *context, Eterm c)
3760 {
3761 if (is_immed(c)) {
3762 return c;
3763 } else {
3764 Uint n = size_object(c);
3765 ErlHeapFragment *tmp_mb = new_message_buffer(n);
3766 Eterm *hp = tmp_mb->mem;
3767 Eterm copy = copy_struct(c, n, &hp, &(tmp_mb->off_heap));
3768 tmp_mb->next = context->save;
3769 context->save = tmp_mb;
3770 return copy;
3771 }
3772 }
3773
3774 /*
3775 ** Match guard compilation
3776 */
3777
do_emit_constant(DMCContext * context,DMC_STACK_TYPE (UWord)* text,Eterm t)3778 static void do_emit_constant(DMCContext *context, DMC_STACK_TYPE(UWord) *text,
3779 Eterm t)
3780 {
3781 int sz;
3782 ErlHeapFragment *emb;
3783 Eterm *hp;
3784 Eterm tmp;
3785
3786 if (is_immed(t)) {
3787 tmp = t;
3788 } else {
3789 sz = my_size_object(t);
3790 if (sz) {
3791 emb = new_message_buffer(sz);
3792 hp = emb->mem;
3793 tmp = my_copy_struct(t,&hp,&(emb->off_heap));
3794 emb->next = context->save;
3795 context->save = emb;
3796 }
3797 else {
3798 /* must be {const, Immed} */
3799 ASSERT(is_tuple_arity(t,2) && tuple_val(t)[1] == am_const);
3800 ASSERT(is_immed(tuple_val(t)[2]));
3801 tmp = tuple_val(t)[2];
3802 }
3803 }
3804 DMC_PUSH2(*text, matchPushC, (Uint)tmp);
3805 if (++context->stack_used > context->stack_need)
3806 context->stack_need = context->stack_used;
3807 }
3808
3809 #define RETURN_ERROR_X(VAR, ContextP, ConstantF, String, ARG) \
3810 (((ContextP)->err_info != NULL) \
3811 ? ((ConstantF) = 0, \
3812 vadd_dmc_err((ContextP)->err_info, dmcError, VAR, String, ARG), \
3813 retOk) \
3814 : retFail)
3815
3816 #define RETURN_ERROR(String, ContextP, ConstantF) \
3817 return RETURN_ERROR_X(-1, ContextP, ConstantF, String, 0)
3818
3819 #define RETURN_VAR_ERROR(String, N, ContextP, ConstantF) \
3820 return RETURN_ERROR_X(N, ContextP, ConstantF, String, 0)
3821
3822 #define RETURN_TERM_ERROR(String, T, ContextP, ConstantF) \
3823 return RETURN_ERROR_X(-1, ContextP, ConstantF, String, T)
3824
3825 #define WARNING(String, ContextP) \
3826 add_dmc_err((ContextP)->err_info, String, -1, 0UL, dmcWarning)
3827
3828 #define VAR_WARNING(String, N, ContextP) \
3829 add_dmc_err((ContextP)->err_info, String, N, 0UL, dmcWarning)
3830
3831 #define TERM_WARNING(String, T, ContextP) \
3832 add_dmc_err((ContextP)->err_info, String, -1, T, dmcWarning)
3833
dmc_list(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)3834 static DMCRet dmc_list(DMCContext *context,
3835 DMCHeap *heap,
3836 DMC_STACK_TYPE(UWord) *text,
3837 Eterm t,
3838 int *constant)
3839 {
3840 int c1;
3841 int c2;
3842 int ret;
3843
3844 if ((ret = dmc_expr(context, heap, text, CAR(list_val(t)), &c1)) != retOk)
3845 return ret;
3846
3847 if ((ret = dmc_expr(context, heap, text, CDR(list_val(t)), &c2)) != retOk)
3848 return ret;
3849
3850 if (c1 && c2) {
3851 *constant = 1;
3852 return retOk;
3853 }
3854 *constant = 0;
3855 if (!c1) {
3856 /* The CAR is not a constant, so if the CDR is, we just push it,
3857 otherwise it is already pushed. */
3858 if (c2)
3859 do_emit_constant(context, text, CDR(list_val(t)));
3860 DMC_PUSH(*text, matchConsA);
3861 } else { /* !c2 && c1 */
3862 do_emit_constant(context, text, CAR(list_val(t)));
3863 DMC_PUSH(*text, matchConsB);
3864 }
3865 --context->stack_used; /* Two objects on stack becomes one */
3866 return retOk;
3867 }
3868
3869 static void
dmc_rearrange_constants(DMCContext * context,DMC_STACK_TYPE (UWord)* text,int textpos,Eterm * p,Uint nelems)3870 dmc_rearrange_constants(DMCContext *context, DMC_STACK_TYPE(UWord) *text,
3871 int textpos, Eterm *p, Uint nelems)
3872 {
3873 DMC_STACK_TYPE(UWord) instr_save;
3874 Uint i;
3875
3876 DMC_INIT_STACK(instr_save);
3877 while (DMC_STACK_NUM(*text) > textpos) {
3878 DMC_PUSH(instr_save, DMC_POP(*text));
3879 }
3880 for (i = nelems; i--;) {
3881 do_emit_constant(context, text, p[i]);
3882 }
3883 while(!DMC_EMPTY(instr_save)) {
3884 DMC_PUSH(*text, DMC_POP(instr_save));
3885 }
3886 DMC_FREE(instr_save);
3887 }
3888
3889 static DMCRet
dmc_array(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm * p,Uint nelems,int * constant)3890 dmc_array(DMCContext *context, DMCHeap *heap, DMC_STACK_TYPE(UWord) *text,
3891 Eterm *p, Uint nelems, int *constant)
3892 {
3893 int all_constant = 1;
3894 int textpos = DMC_STACK_NUM(*text);
3895 int preventive_bumps = 0;
3896 Uint i;
3897
3898 /*
3899 ** We remember where we started to layout code,
3900 ** assume all is constant and back up and restart if not so.
3901 ** The array should be laid out with the last element first,
3902 ** so we can memcpy it to the eheap.
3903 */
3904 for (i = nelems; i--;) {
3905 DMCRet ret;
3906 int c;
3907
3908 ret = dmc_expr(context, heap, text, p[i], &c);
3909 if (ret != retOk) {
3910 return ret;
3911 }
3912 if (!c && all_constant) {
3913 all_constant = 0;
3914 if (i < nelems - 1) {
3915 /* Revert preventive stack bumps as they will now be done again
3916 * for real by do_emit_constant() */
3917 context->stack_used -= preventive_bumps;
3918
3919 dmc_rearrange_constants(context, text, textpos,
3920 p + i + 1, nelems - i - 1);
3921 }
3922 } else if (c) {
3923 if (all_constant) {
3924 /*
3925 * OTP-17379:
3926 * All constants so far, but do preventive stack bumps
3927 * as the constants may later be converted to matchPushC
3928 * by dmc_rearrange_constants above.
3929 * Otherwise dmc_expr() may do incorrect stack depth estimation
3930 * when it emits instructions for the first non-constant.
3931 */
3932 ++context->stack_used;
3933 ++preventive_bumps;
3934 }
3935 else {
3936 do_emit_constant(context, text, p[i]);
3937 }
3938 }
3939 }
3940 if (all_constant) {
3941 /* Preventive stack bumps not needed */
3942 context->stack_used -= preventive_bumps;
3943 }
3944 *constant = all_constant;
3945 return retOk;
3946 }
3947
3948 static DMCRet
dmc_tuple(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)3949 dmc_tuple(DMCContext *context, DMCHeap *heap, DMC_STACK_TYPE(UWord) *text,
3950 Eterm t, int *constant)
3951 {
3952 int all_constant;
3953 Eterm *p = tuple_val(t);
3954 Uint nelems = arityval(*p);
3955 DMCRet ret;
3956
3957 ret = dmc_array(context, heap, text, p + 1, nelems, &all_constant);
3958 if (ret != retOk) {
3959 return ret;
3960 }
3961 if (all_constant) {
3962 *constant = 1;
3963 return retOk;
3964 }
3965 DMC_PUSH2(*text, matchMkTuple, nelems);
3966 context->stack_used -= (nelems - 1);
3967 *constant = 0;
3968 return retOk;
3969 }
3970
3971 /*
3972 * For maps we only expand the values of the map. The keys remain as they are.
3973 * So the map #{ {const,a} => {const,b} } will be transformed to #{ {const,a} => b }.
3974 */
3975 static DMCRet
dmc_map(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)3976 dmc_map(DMCContext *context, DMCHeap *heap, DMC_STACK_TYPE(UWord) *text,
3977 Eterm t, int *constant)
3978 {
3979 int nelems;
3980 int constant_values, constant_keys;
3981 DMCRet ret;
3982 if (is_flatmap(t)) {
3983 flatmap_t *m = (flatmap_t *)flatmap_val(t);
3984 Eterm *values = flatmap_get_values(m);
3985 int textpos = DMC_STACK_NUM(*text);
3986 int stackpos = context->stack_used;
3987
3988 nelems = flatmap_get_size(m);
3989
3990 if ((ret = dmc_array(context, heap, text, values, nelems, &constant_values)) != retOk) {
3991 return ret;
3992 }
3993
3994 if ((ret = dmc_tuple(context, heap, text, m->keys, &constant_keys)) != retOk) {
3995 return ret;
3996 }
3997
3998 if (constant_values && constant_keys) {
3999 *constant = 1;
4000 return retOk;
4001 }
4002
4003 /* If all values were constants, then nothing was emitted by the
4004 first dmc_array, so we reset the pc and emit all values as
4005 constants and then re-emit the keys. */
4006 if (constant_values) {
4007 DMC_STACK_NUM(*text) = textpos;
4008 context->stack_used = stackpos;
4009 ASSERT(!constant_keys);
4010 for (int i = nelems; i--;) {
4011 do_emit_constant(context, text, values[i]);
4012 }
4013 dmc_tuple(context, heap, text, m->keys, &constant_keys);
4014 } else if (constant_keys) {
4015 Eterm *p = tuple_val(m->keys);
4016 Uint nelems = arityval(*p);
4017 ASSERT(!constant_values);
4018 p++;
4019 for (int i = nelems; i--;)
4020 do_emit_constant(context, text, p[i]);
4021 DMC_PUSH2(*text, matchMkTuple, nelems);
4022 context->stack_used -= nelems - 1;
4023 }
4024
4025 DMC_PUSH2(*text, matchMkFlatMap, nelems);
4026 context->stack_used -= nelems; /* n values + 1 key-tuple => 1 map */
4027 *constant = 0;
4028 return retOk;
4029 } else {
4030 DECLARE_WSTACK(wstack);
4031 Eterm *kv;
4032 int c;
4033 int textpos = DMC_STACK_NUM(*text);
4034 int stackpos = context->stack_used;
4035
4036 ASSERT(is_hashmap(t));
4037
4038 hashmap_iterator_init(&wstack, t, 1);
4039 constant_values = 1;
4040 nelems = hashmap_size(t);
4041
4042 /* Check if all keys and values are constants */
4043 while ((kv=hashmap_iterator_prev(&wstack)) != NULL && constant_values) {
4044 if ((ret = dmc_expr(context, heap, text, CAR(kv), &c)) != retOk) {
4045 DESTROY_WSTACK(wstack);
4046 return ret;
4047 }
4048 if (!c)
4049 constant_values = 0;
4050 if ((ret = dmc_expr(context, heap, text, CDR(kv), &c)) != retOk) {
4051 DESTROY_WSTACK(wstack);
4052 return ret;
4053 }
4054 if (!c)
4055 constant_values = 0;
4056 }
4057
4058 if (constant_values) {
4059 ASSERT(DMC_STACK_NUM(*text) == textpos);
4060 *constant = 1;
4061 DESTROY_WSTACK(wstack);
4062 return retOk;
4063 }
4064
4065 /* reset the program to the original position and re-emit everything */
4066 DMC_STACK_NUM(*text) = textpos;
4067 context->stack_used = stackpos;
4068
4069 *constant = 0;
4070
4071 hashmap_iterator_init(&wstack, t, 1);
4072
4073 while ((kv=hashmap_iterator_prev(&wstack)) != NULL) {
4074 /* push key */
4075 if ((ret = dmc_expr(context, heap, text, CAR(kv), &c)) != retOk) {
4076 DESTROY_WSTACK(wstack);
4077 return ret;
4078 }
4079 if (c) {
4080 do_emit_constant(context, text, CAR(kv));
4081 }
4082
4083 /* push value */
4084 if ((ret = dmc_expr(context, heap, text, CDR(kv), &c)) != retOk) {
4085 DESTROY_WSTACK(wstack);
4086 return ret;
4087 }
4088 if (c) {
4089 do_emit_constant(context, text, CDR(kv));
4090 }
4091 }
4092 DMC_PUSH2(*text, matchMkHashMap, nelems);
4093 context->stack_used -= 2*nelems - 1; /* n keys & values => 1 map */
4094 DESTROY_WSTACK(wstack);
4095 return retOk;
4096 }
4097 }
4098
dmc_whole_expression(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4099 static DMCRet dmc_whole_expression(DMCContext *context,
4100 DMCHeap *heap,
4101 DMC_STACK_TYPE(UWord) *text,
4102 Eterm t,
4103 int *constant)
4104 {
4105 if (context->cflags & DCOMP_TRACE) {
4106 /* Hmmm, convert array to list... */
4107 if (context->special) {
4108 DMC_PUSH(*text, matchPushArrayAsListU);
4109 } else {
4110 ASSERT(is_tuple(context->matchexpr
4111 [context->current_match]));
4112 DMC_PUSH(*text, matchPushArrayAsList);
4113 }
4114 } else {
4115 DMC_PUSH(*text, matchPushExpr);
4116 }
4117 ++context->stack_used;
4118 if (context->stack_used > context->stack_need)
4119 context->stack_need = context->stack_used;
4120 *constant = 0;
4121 return retOk;
4122 }
4123
4124 /* Figure out which PushV instruction to use.
4125 */
dmc_add_pushv_variant(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Uint n)4126 static void dmc_add_pushv_variant(DMCContext *context, DMCHeap *heap,
4127 DMC_STACK_TYPE(UWord) *text, Uint n)
4128 {
4129 DMCVariable* v = &heap->vars[n];
4130 MatchOps instr = matchPushV;
4131
4132 ASSERT(n < heap->vars_used && v->is_bound);
4133 if (!context->is_guard) {
4134 if(!v->is_in_body) {
4135 instr = matchPushVResult;
4136 v->is_in_body = 1;
4137 }
4138 }
4139 DMC_PUSH(*text, instr);
4140 DMC_PUSH(*text, n);
4141 }
4142
dmc_variable(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4143 static DMCRet dmc_variable(DMCContext *context,
4144 DMCHeap *heap,
4145 DMC_STACK_TYPE(UWord) *text,
4146 Eterm t,
4147 int *constant)
4148 {
4149 Uint n = db_is_variable(t);
4150
4151 if (n >= heap->vars_used || !heap->vars[n].is_bound) {
4152 RETURN_VAR_ERROR("Variable $%%d is unbound.", n, context, *constant);
4153 }
4154
4155 dmc_add_pushv_variant(context, heap, text, n);
4156
4157 ++context->stack_used;
4158 if (context->stack_used > context->stack_need)
4159 context->stack_need = context->stack_used;
4160 *constant = 0;
4161 return retOk;
4162 }
4163
dmc_all_bindings(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4164 static DMCRet dmc_all_bindings(DMCContext *context,
4165 DMCHeap *heap,
4166 DMC_STACK_TYPE(UWord) *text,
4167 Eterm t,
4168 int *constant)
4169 {
4170 int i;
4171 int heap_used = 0;
4172
4173 DMC_PUSH(*text, matchPushC);
4174 DMC_PUSH(*text, NIL);
4175 for (i = heap->vars_used - 1; i >= 0; --i) {
4176 if (heap->vars[i].is_bound) {
4177 dmc_add_pushv_variant(context, heap, text, i);
4178 DMC_PUSH(*text, matchConsB);
4179 heap_used += 2;
4180 }
4181 }
4182 ++context->stack_used;
4183 if ((context->stack_used + 1) > context->stack_need)
4184 context->stack_need = (context->stack_used + 1);
4185 *constant = 0;
4186 return retOk;
4187 }
4188
dmc_const(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4189 static DMCRet dmc_const(DMCContext *context,
4190 DMCHeap *heap,
4191 DMC_STACK_TYPE(UWord) *text,
4192 Eterm t,
4193 int *constant)
4194 {
4195 if (tuple_val(t)[0] != make_arityval(2)) {
4196 RETURN_TERM_ERROR("Special form 'const' called with more than one "
4197 "argument in %T.", t, context, *constant);
4198 }
4199 *constant = 1;
4200 return retOk;
4201 }
4202
dmc_and(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4203 static DMCRet dmc_and(DMCContext *context,
4204 DMCHeap *heap,
4205 DMC_STACK_TYPE(UWord) *text,
4206 Eterm t,
4207 int *constant)
4208 {
4209 Eterm *p = tuple_val(t);
4210 Uint a = arityval(*p);
4211 DMCRet ret;
4212 int i;
4213 int c;
4214
4215 if (a < 2) {
4216 RETURN_TERM_ERROR("Special form 'and' called without arguments "
4217 "in %T.", t, context, *constant);
4218 }
4219 *constant = 0;
4220 for (i = a; i > 1; --i) {
4221 if ((ret = dmc_expr(context, heap, text, p[i], &c)) != retOk)
4222 return ret;
4223 if (c)
4224 do_emit_constant(context, text, p[i]);
4225 }
4226 DMC_PUSH(*text, matchAnd);
4227 DMC_PUSH(*text, (Uint) a - 1);
4228 context->stack_used -= (a - 2);
4229 return retOk;
4230 }
4231
dmc_or(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4232 static DMCRet dmc_or(DMCContext *context,
4233 DMCHeap *heap,
4234 DMC_STACK_TYPE(UWord) *text,
4235 Eterm t,
4236 int *constant)
4237 {
4238 Eterm *p = tuple_val(t);
4239 Uint a = arityval(*p);
4240 DMCRet ret;
4241 int i;
4242 int c;
4243
4244 if (a < 2) {
4245 RETURN_TERM_ERROR("Special form 'or' called without arguments "
4246 "in %T.", t, context, *constant);
4247 }
4248 *constant = 0;
4249 for (i = a; i > 1; --i) {
4250 if ((ret = dmc_expr(context, heap, text, p[i], &c)) != retOk)
4251 return ret;
4252 if (c)
4253 do_emit_constant(context, text, p[i]);
4254 }
4255 DMC_PUSH(*text, matchOr);
4256 DMC_PUSH(*text, (Uint) a - 1);
4257 context->stack_used -= (a - 2);
4258 return retOk;
4259 }
4260
4261
dmc_andalso(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4262 static DMCRet dmc_andalso(DMCContext *context,
4263 DMCHeap *heap,
4264 DMC_STACK_TYPE(UWord) *text,
4265 Eterm t,
4266 int *constant)
4267 {
4268 Eterm *p = tuple_val(t);
4269 Uint a = arityval(*p);
4270 DMCRet ret;
4271 int i;
4272 int c;
4273 Uint lbl;
4274 Uint lbl_next;
4275 Uint lbl_val;
4276
4277 if (a < 2) {
4278 RETURN_TERM_ERROR("Special form 'andalso' called without"
4279 " arguments "
4280 "in %T.", t, context, *constant);
4281 }
4282 *constant = 0;
4283 lbl = 0;
4284 for (i = 2; i <= a; ++i) {
4285 if ((ret = dmc_expr(context, heap, text, p[i], &c)) != retOk)
4286 return ret;
4287 if (c)
4288 do_emit_constant(context, text, p[i]);
4289 if (i == a) {
4290 DMC_PUSH(*text, matchJump);
4291 } else {
4292 DMC_PUSH(*text, matchAndAlso);
4293 }
4294 DMC_PUSH(*text, lbl);
4295 lbl = DMC_STACK_NUM(*text)-1;
4296 --(context->stack_used);
4297 }
4298 DMC_PUSH(*text, matchPushC);
4299 DMC_PUSH(*text, am_true);
4300 lbl_val = DMC_STACK_NUM(*text);
4301 while (lbl) {
4302 lbl_next = DMC_PEEK(*text, lbl);
4303 DMC_POKE(*text, lbl, lbl_val-lbl-1);
4304 lbl = lbl_next;
4305 }
4306 if (++context->stack_used > context->stack_need)
4307 context->stack_need = context->stack_used;
4308 return retOk;
4309 }
4310
dmc_orelse(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4311 static DMCRet dmc_orelse(DMCContext *context,
4312 DMCHeap *heap,
4313 DMC_STACK_TYPE(UWord) *text,
4314 Eterm t,
4315 int *constant)
4316 {
4317 Eterm *p = tuple_val(t);
4318 Uint a = arityval(*p);
4319 DMCRet ret;
4320 int i;
4321 int c;
4322 Uint lbl;
4323 Uint lbl_next;
4324 Uint lbl_val;
4325
4326 if (a < 2) {
4327 RETURN_TERM_ERROR("Special form 'orelse' called without arguments "
4328 "in %T.", t, context, *constant);
4329 }
4330 *constant = 0;
4331 lbl = 0;
4332 for (i = 2; i <= a; ++i) {
4333 if ((ret = dmc_expr(context, heap, text, p[i], &c)) != retOk)
4334 return ret;
4335 if (c)
4336 do_emit_constant(context, text, p[i]);
4337 if (i == a) {
4338 DMC_PUSH(*text, matchJump);
4339 } else {
4340 DMC_PUSH(*text, matchOrElse);
4341 }
4342 DMC_PUSH(*text, lbl);
4343 lbl = DMC_STACK_NUM(*text)-1;
4344 --(context->stack_used);
4345 }
4346 DMC_PUSH(*text, matchPushC);
4347 DMC_PUSH(*text, am_false);
4348 lbl_val = DMC_STACK_NUM(*text);
4349 while (lbl) {
4350 lbl_next = DMC_PEEK(*text, lbl);
4351 DMC_POKE(*text, lbl, lbl_val-lbl-1);
4352 lbl = lbl_next;
4353 }
4354 if (++context->stack_used > context->stack_need)
4355 context->stack_need = context->stack_used;
4356 return retOk;
4357 }
4358
dmc_message(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4359 static DMCRet dmc_message(DMCContext *context,
4360 DMCHeap *heap,
4361 DMC_STACK_TYPE(UWord) *text,
4362 Eterm t,
4363 int *constant)
4364 {
4365 Eterm *p = tuple_val(t);
4366 DMCRet ret;
4367 int c;
4368
4369
4370 if (!(context->cflags & DCOMP_TRACE)) {
4371 RETURN_ERROR("Special form 'message' used in wrong dialect.",
4372 context,
4373 *constant);
4374 }
4375 if (context->is_guard) {
4376 RETURN_ERROR("Special form 'message' called in guard context.",
4377 context,
4378 *constant);
4379 }
4380
4381 if (p[0] != make_arityval(2)) {
4382 RETURN_TERM_ERROR("Special form 'message' called with wrong "
4383 "number of arguments in %T.", t, context,
4384 *constant);
4385 }
4386 *constant = 0;
4387 if ((ret = dmc_expr(context, heap, text, p[2], &c)) != retOk) {
4388 return ret;
4389 }
4390 if (c) {
4391 do_emit_constant(context, text, p[2]);
4392 }
4393 DMC_PUSH(*text, matchReturn);
4394 DMC_PUSH(*text, matchPushC);
4395 DMC_PUSH(*text, am_true);
4396 /* Push as much as we remove, stack_need is untouched */
4397 return retOk;
4398 }
4399
dmc_self(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4400 static DMCRet dmc_self(DMCContext *context,
4401 DMCHeap *heap,
4402 DMC_STACK_TYPE(UWord) *text,
4403 Eterm t,
4404 int *constant)
4405 {
4406 Eterm *p = tuple_val(t);
4407
4408 if (p[0] != make_arityval(1)) {
4409 RETURN_TERM_ERROR("Special form 'self' called with arguments "
4410 "in %T.", t, context, *constant);
4411 }
4412 *constant = 0;
4413 DMC_PUSH(*text, matchSelf);
4414 if (++context->stack_used > context->stack_need)
4415 context->stack_need = context->stack_used;
4416 return retOk;
4417 }
4418
dmc_return_trace(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4419 static DMCRet dmc_return_trace(DMCContext *context,
4420 DMCHeap *heap,
4421 DMC_STACK_TYPE(UWord) *text,
4422 Eterm t,
4423 int *constant)
4424 {
4425 Eterm *p = tuple_val(t);
4426
4427 if (!(context->cflags & DCOMP_TRACE)) {
4428 RETURN_ERROR("Special form 'return_trace' used in wrong dialect.",
4429 context,
4430 *constant);
4431 }
4432 if (context->is_guard) {
4433 RETURN_ERROR("Special form 'return_trace' called in "
4434 "guard context.", context, *constant);
4435 }
4436
4437 if (p[0] != make_arityval(1)) {
4438 RETURN_TERM_ERROR("Special form 'return_trace' called with "
4439 "arguments in %T.", t, context, *constant);
4440 }
4441 *constant = 0;
4442 DMC_PUSH(*text, matchSetReturnTrace); /* Pushes 'true' on the stack */
4443 if (++context->stack_used > context->stack_need)
4444 context->stack_need = context->stack_used;
4445 return retOk;
4446 }
4447
dmc_exception_trace(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4448 static DMCRet dmc_exception_trace(DMCContext *context,
4449 DMCHeap *heap,
4450 DMC_STACK_TYPE(UWord) *text,
4451 Eterm t,
4452 int *constant)
4453 {
4454 Eterm *p = tuple_val(t);
4455
4456 if (!(context->cflags & DCOMP_TRACE)) {
4457 RETURN_ERROR("Special form 'exception_trace' used in wrong dialect.",
4458 context,
4459 *constant);
4460 }
4461 if (context->is_guard) {
4462 RETURN_ERROR("Special form 'exception_trace' called in "
4463 "guard context.", context, *constant);
4464 }
4465
4466 if (p[0] != make_arityval(1)) {
4467 RETURN_TERM_ERROR("Special form 'exception_trace' called with "
4468 "arguments in %T.", t, context, *constant);
4469 }
4470 *constant = 0;
4471 DMC_PUSH(*text, matchSetExceptionTrace); /* Pushes 'true' on the stack */
4472 if (++context->stack_used > context->stack_need)
4473 context->stack_need = context->stack_used;
4474 return retOk;
4475 }
4476
check_trace(const char * op,DMCContext * context,int * constant,int need_cflags,int allow_in_guard,DMCRet * retp)4477 static int check_trace(const char* op,
4478 DMCContext *context,
4479 int *constant,
4480 int need_cflags,
4481 int allow_in_guard,
4482 DMCRet* retp)
4483 {
4484 if (!(context->cflags & DCOMP_TRACE)) {
4485 *retp = RETURN_ERROR_X(-1, context, *constant, "Special form '%s' "
4486 "used in wrong dialect.", op);
4487 return 0;
4488 }
4489 if ((context->cflags & need_cflags) != need_cflags) {
4490 *retp = RETURN_ERROR_X(-1, context, *constant, "Special form '%s' "
4491 "not allow for this trace event.", op);
4492 return 0;
4493 }
4494 if (context->is_guard && !allow_in_guard) {
4495 *retp = RETURN_ERROR_X(-1, context, *constant, "Special form '%s' "
4496 "called in guard context.", op);
4497 return 0;
4498 }
4499 return 1;
4500 }
4501
dmc_is_seq_trace(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4502 static DMCRet dmc_is_seq_trace(DMCContext *context,
4503 DMCHeap *heap,
4504 DMC_STACK_TYPE(UWord) *text,
4505 Eterm t,
4506 int *constant)
4507 {
4508 Eterm *p = tuple_val(t);
4509 DMCRet ret;
4510
4511 if (!check_trace("is_seq_trace", context, constant, DCOMP_ALLOW_TRACE_OPS, 1, &ret))
4512 return ret;
4513
4514 if (p[0] != make_arityval(1)) {
4515 RETURN_TERM_ERROR("Special form 'is_seq_trace' called with "
4516 "arguments in %T.", t, context, *constant);
4517 }
4518 *constant = 0;
4519 DMC_PUSH(*text, matchIsSeqTrace);
4520 /* Pushes 'true' or 'false' on the stack */
4521 if (++context->stack_used > context->stack_need)
4522 context->stack_need = context->stack_used;
4523 return retOk;
4524 }
4525
dmc_set_seq_token(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4526 static DMCRet dmc_set_seq_token(DMCContext *context,
4527 DMCHeap *heap,
4528 DMC_STACK_TYPE(UWord) *text,
4529 Eterm t,
4530 int *constant)
4531 {
4532 Eterm *p = tuple_val(t);
4533 DMCRet ret;
4534 int c;
4535
4536 if (!check_trace("set_seq_trace", context, constant, DCOMP_ALLOW_TRACE_OPS, 0, &ret))
4537 return ret;
4538
4539 if (p[0] != make_arityval(3)) {
4540 RETURN_TERM_ERROR("Special form 'set_seq_token' called with wrong "
4541 "number of arguments in %T.", t, context,
4542 *constant);
4543 }
4544 *constant = 0;
4545 if ((ret = dmc_expr(context, heap, text, p[3], &c)) != retOk) {
4546 return ret;
4547 }
4548 if (c) {
4549 do_emit_constant(context, text, p[3]);
4550 }
4551 if ((ret = dmc_expr(context, heap, text, p[2], &c)) != retOk) {
4552 return ret;
4553 }
4554 if (c) {
4555 do_emit_constant(context, text, p[2]);
4556 }
4557 if (context->cflags & DCOMP_FAKE_DESTRUCTIVE) {
4558 DMC_PUSH(*text, matchSetSeqTokenFake);
4559 } else {
4560 DMC_PUSH(*text, matchSetSeqToken);
4561 }
4562 --context->stack_used; /* Remove two and add one */
4563 return retOk;
4564 }
4565
dmc_get_seq_token(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4566 static DMCRet dmc_get_seq_token(DMCContext *context,
4567 DMCHeap *heap,
4568 DMC_STACK_TYPE(UWord) *text,
4569 Eterm t,
4570 int *constant)
4571 {
4572 Eterm *p = tuple_val(t);
4573 DMCRet ret;
4574
4575 if (!check_trace("get_seq_token", context, constant, DCOMP_ALLOW_TRACE_OPS, 0, &ret))
4576 return ret;
4577
4578 if (p[0] != make_arityval(1)) {
4579 RETURN_TERM_ERROR("Special form 'get_seq_token' called with "
4580 "arguments in %T.", t, context,
4581 *constant);
4582 }
4583
4584 *constant = 0;
4585 DMC_PUSH(*text, matchGetSeqToken);
4586 if (++context->stack_used > context->stack_need)
4587 context->stack_need = context->stack_used;
4588 return retOk;
4589 }
4590
4591
4592
dmc_display(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4593 static DMCRet dmc_display(DMCContext *context,
4594 DMCHeap *heap,
4595 DMC_STACK_TYPE(UWord) *text,
4596 Eterm t,
4597 int *constant)
4598 {
4599 Eterm *p = tuple_val(t);
4600 DMCRet ret;
4601 int c;
4602
4603
4604 if (!(context->cflags & DCOMP_TRACE)) {
4605 RETURN_ERROR("Special form 'display' used in wrong dialect.",
4606 context,
4607 *constant);
4608 }
4609 if (context->is_guard) {
4610 RETURN_ERROR("Special form 'display' called in guard context.",
4611 context,
4612 *constant);
4613 }
4614
4615 if (p[0] != make_arityval(2)) {
4616 RETURN_TERM_ERROR("Special form 'display' called with wrong "
4617 "number of arguments in %T.", t, context,
4618 *constant);
4619 }
4620 *constant = 0;
4621 if ((ret = dmc_expr(context, heap, text, p[2], &c)) != retOk) {
4622 return ret;
4623 }
4624 if (c) {
4625 do_emit_constant(context, text, p[2]);
4626 }
4627 DMC_PUSH(*text, matchDisplay);
4628 /* Push as much as we remove, stack_need is untouched */
4629 return retOk;
4630 }
4631
dmc_process_dump(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4632 static DMCRet dmc_process_dump(DMCContext *context,
4633 DMCHeap *heap,
4634 DMC_STACK_TYPE(UWord) *text,
4635 Eterm t,
4636 int *constant)
4637 {
4638 Eterm *p = tuple_val(t);
4639 DMCRet ret;
4640
4641 if (!check_trace("process_dump", context, constant, DCOMP_ALLOW_TRACE_OPS, 0, &ret))
4642 return ret;
4643
4644 if (p[0] != make_arityval(1)) {
4645 RETURN_TERM_ERROR("Special form 'process_dump' called with "
4646 "arguments in %T.", t, context, *constant);
4647 }
4648 *constant = 0;
4649 DMC_PUSH(*text, matchProcessDump); /* Creates binary */
4650 if (++context->stack_used > context->stack_need)
4651 context->stack_need = context->stack_used;
4652 return retOk;
4653 }
4654
dmc_enable_trace(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4655 static DMCRet dmc_enable_trace(DMCContext *context,
4656 DMCHeap *heap,
4657 DMC_STACK_TYPE(UWord) *text,
4658 Eterm t,
4659 int *constant)
4660 {
4661 Eterm *p = tuple_val(t);
4662 Uint a = arityval(*p);
4663 DMCRet ret;
4664 int c;
4665
4666 if (!check_trace("enable_trace", context, constant, DCOMP_ALLOW_TRACE_OPS, 0, &ret))
4667 return ret;
4668
4669 switch (a) {
4670 case 2:
4671 *constant = 0;
4672 if ((ret = dmc_expr(context, heap, text, p[2], &c)) != retOk) {
4673 return ret;
4674 }
4675 if (c) {
4676 do_emit_constant(context, text, p[2]);
4677 }
4678 DMC_PUSH(*text, matchEnableTrace);
4679 /* Push as much as we remove, stack_need is untouched */
4680 break;
4681 case 3:
4682 *constant = 0;
4683 if ((ret = dmc_expr(context, heap, text, p[3], &c)) != retOk) {
4684 return ret;
4685 }
4686 if (c) {
4687 do_emit_constant(context, text, p[3]);
4688 }
4689 if ((ret = dmc_expr(context, heap, text, p[2], &c)) != retOk) {
4690 return ret;
4691 }
4692 if (c) {
4693 do_emit_constant(context, text, p[2]);
4694 }
4695 DMC_PUSH(*text, matchEnableTrace2);
4696 --context->stack_used; /* Remove two and add one */
4697 break;
4698 default:
4699 RETURN_TERM_ERROR("Special form 'enable_trace' called with wrong "
4700 "number of arguments in %T.", t, context,
4701 *constant);
4702 }
4703 return retOk;
4704 }
4705
dmc_disable_trace(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4706 static DMCRet dmc_disable_trace(DMCContext *context,
4707 DMCHeap *heap,
4708 DMC_STACK_TYPE(UWord) *text,
4709 Eterm t,
4710 int *constant)
4711 {
4712 Eterm *p = tuple_val(t);
4713 Uint a = arityval(*p);
4714 DMCRet ret;
4715 int c;
4716
4717 if (!check_trace("disable_trace", context, constant, DCOMP_ALLOW_TRACE_OPS, 0, &ret))
4718 return ret;
4719
4720 switch (a) {
4721 case 2:
4722 *constant = 0;
4723 if ((ret = dmc_expr(context, heap, text, p[2], &c)) != retOk) {
4724 return ret;
4725 }
4726 if (c) {
4727 do_emit_constant(context, text, p[2]);
4728 }
4729 DMC_PUSH(*text, matchDisableTrace);
4730 /* Push as much as we remove, stack_need is untouched */
4731 break;
4732 case 3:
4733 *constant = 0;
4734 if ((ret = dmc_expr(context, heap, text, p[3], &c)) != retOk) {
4735 return ret;
4736 }
4737 if (c) {
4738 do_emit_constant(context, text, p[3]);
4739 }
4740 if ((ret = dmc_expr(context, heap, text, p[2], &c)) != retOk) {
4741 return ret;
4742 }
4743 if (c) {
4744 do_emit_constant(context, text, p[2]);
4745 }
4746 DMC_PUSH(*text, matchDisableTrace2);
4747 --context->stack_used; /* Remove two and add one */
4748 break;
4749 default:
4750 RETURN_TERM_ERROR("Special form 'disable_trace' called with wrong "
4751 "number of arguments in %T.", t, context,
4752 *constant);
4753 }
4754 return retOk;
4755 }
4756
dmc_trace(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4757 static DMCRet dmc_trace(DMCContext *context,
4758 DMCHeap *heap,
4759 DMC_STACK_TYPE(UWord) *text,
4760 Eterm t,
4761 int *constant)
4762 {
4763 Eterm *p = tuple_val(t);
4764 Uint a = arityval(*p);
4765 DMCRet ret;
4766 int c;
4767
4768 if (!check_trace("trace", context, constant, DCOMP_ALLOW_TRACE_OPS, 0, &ret))
4769 return ret;
4770
4771 switch (a) {
4772 case 3:
4773 *constant = 0;
4774 if ((ret = dmc_expr(context, heap, text, p[3], &c)) != retOk) {
4775 return ret;
4776 }
4777 if (c) {
4778 do_emit_constant(context, text, p[3]);
4779 }
4780 if ((ret = dmc_expr(context, heap, text, p[2], &c)) != retOk) {
4781 return ret;
4782 }
4783 if (c) {
4784 do_emit_constant(context, text, p[2]);
4785 }
4786 DMC_PUSH(*text, matchTrace2);
4787 --context->stack_used; /* Remove two and add one */
4788 break;
4789 case 4:
4790 *constant = 0;
4791 if ((ret = dmc_expr(context, heap, text, p[4], &c)) != retOk) {
4792 return ret;
4793 }
4794 if (c) {
4795 do_emit_constant(context, text, p[4]);
4796 }
4797 if ((ret = dmc_expr(context, heap, text, p[3], &c)) != retOk) {
4798 return ret;
4799 }
4800 if (c) {
4801 do_emit_constant(context, text, p[3]);
4802 }
4803 if ((ret = dmc_expr(context, heap, text, p[2], &c)) != retOk) {
4804 return ret;
4805 }
4806 if (c) {
4807 do_emit_constant(context, text, p[2]);
4808 }
4809 DMC_PUSH(*text, matchTrace3);
4810 context->stack_used -= 2; /* Remove three and add one */
4811 break;
4812 default:
4813 RETURN_TERM_ERROR("Special form 'trace' called with wrong "
4814 "number of arguments in %T.", t, context,
4815 *constant);
4816 }
4817 return retOk;
4818 }
4819
4820
4821
dmc_caller(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4822 static DMCRet dmc_caller(DMCContext *context,
4823 DMCHeap *heap,
4824 DMC_STACK_TYPE(UWord) *text,
4825 Eterm t,
4826 int *constant)
4827 {
4828 Eterm *p = tuple_val(t);
4829 DMCRet ret;
4830
4831 if (!check_trace("caller", context, constant,
4832 (DCOMP_CALL_TRACE|DCOMP_ALLOW_TRACE_OPS), 0, &ret))
4833 return ret;
4834
4835 if (p[0] != make_arityval(1)) {
4836 RETURN_TERM_ERROR("Special form 'caller' called with "
4837 "arguments in %T.", t, context, *constant);
4838 }
4839 *constant = 0;
4840 DMC_PUSH(*text, matchCaller); /* Creates binary */
4841 if (++context->stack_used > context->stack_need)
4842 context->stack_need = context->stack_used;
4843 return retOk;
4844 }
4845
4846
4847
dmc_silent(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4848 static DMCRet dmc_silent(DMCContext *context,
4849 DMCHeap *heap,
4850 DMC_STACK_TYPE(UWord) *text,
4851 Eterm t,
4852 int *constant)
4853 {
4854 Eterm *p = tuple_val(t);
4855 DMCRet ret;
4856 int c;
4857
4858 if (!check_trace("silent", context, constant, DCOMP_ALLOW_TRACE_OPS, 0, &ret))
4859 return ret;
4860
4861 if (p[0] != make_arityval(2)) {
4862 RETURN_TERM_ERROR("Special form 'silent' called with wrong "
4863 "number of arguments in %T.", t, context,
4864 *constant);
4865 }
4866 *constant = 0;
4867 if ((ret = dmc_expr(context, heap, text, p[2], &c)) != retOk) {
4868 return ret;
4869 }
4870 if (c) {
4871 do_emit_constant(context, text, p[2]);
4872 }
4873 DMC_PUSH(*text, matchSilent);
4874 DMC_PUSH(*text, matchPushC);
4875 DMC_PUSH(*text, am_true);
4876 /* Push as much as we remove, stack_need is untouched */
4877 return retOk;
4878 }
4879
4880
4881
dmc_fun(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)4882 static DMCRet dmc_fun(DMCContext *context,
4883 DMCHeap *heap,
4884 DMC_STACK_TYPE(UWord) *text,
4885 Eterm t,
4886 int *constant)
4887 {
4888 Eterm *p = tuple_val(t);
4889 Uint a = arityval(*p);
4890 int c;
4891 int i;
4892 DMCRet ret;
4893 DMCGuardBif *b;
4894
4895 /* Special forms. */
4896 switch (p[1]) {
4897 case am_const:
4898 return dmc_const(context, heap, text, t, constant);
4899 case am_and:
4900 return dmc_and(context, heap, text, t, constant);
4901 case am_or:
4902 return dmc_or(context, heap, text, t, constant);
4903 case am_andalso:
4904 case am_andthen:
4905 return dmc_andalso(context, heap, text, t, constant);
4906 case am_orelse:
4907 return dmc_orelse(context, heap, text, t, constant);
4908 case am_self:
4909 return dmc_self(context, heap, text, t, constant);
4910 case am_message:
4911 return dmc_message(context, heap, text, t, constant);
4912 case am_is_seq_trace:
4913 return dmc_is_seq_trace(context, heap, text, t, constant);
4914 case am_set_seq_token:
4915 return dmc_set_seq_token(context, heap, text, t, constant);
4916 case am_get_seq_token:
4917 return dmc_get_seq_token(context, heap, text, t, constant);
4918 case am_return_trace:
4919 return dmc_return_trace(context, heap, text, t, constant);
4920 case am_exception_trace:
4921 return dmc_exception_trace(context, heap, text, t, constant);
4922 case am_display:
4923 return dmc_display(context, heap, text, t, constant);
4924 case am_process_dump:
4925 return dmc_process_dump(context, heap, text, t, constant);
4926 case am_enable_trace:
4927 return dmc_enable_trace(context, heap, text, t, constant);
4928 case am_disable_trace:
4929 return dmc_disable_trace(context, heap, text, t, constant);
4930 case am_trace:
4931 return dmc_trace(context, heap, text, t, constant);
4932 case am_caller:
4933 return dmc_caller(context, heap, text, t, constant);
4934 case am_silent:
4935 return dmc_silent(context, heap, text, t, constant);
4936 case am_set_tcw:
4937 if (context->cflags & DCOMP_FAKE_DESTRUCTIVE) {
4938 b = dmc_lookup_bif(am_set_tcw_fake, ((int) a) - 1);
4939 } else {
4940 b = dmc_lookup_bif(p[1], ((int) a) - 1);
4941 }
4942 break;
4943 default:
4944 b = dmc_lookup_bif(p[1], ((int) a) - 1);
4945 }
4946
4947
4948 if (b == NULL) {
4949 if (context->err_info != NULL) {
4950 /* Ugly, should define a better RETURN_TERM_ERROR interface... */
4951 char buff[100];
4952 erts_snprintf(buff, sizeof(buff),
4953 "Function %%T/%d does_not_exist.",
4954 (int)a - 1);
4955 RETURN_TERM_ERROR(buff, p[1], context, *constant);
4956 } else {
4957 return retFail;
4958 }
4959 }
4960 ASSERT(b->arity == ((int) a) - 1);
4961 if (! (b->flags &
4962 (1 <<
4963 ((context->cflags & DCOMP_DIALECT_MASK) +
4964 (context->is_guard ? DBIF_GUARD : DBIF_BODY))))) {
4965 /* Body clause used in wrong context. */
4966 if (context->err_info != NULL) {
4967 /* Ugly, should define a better RETURN_TERM_ERROR interface... */
4968 char buff[100];
4969 erts_snprintf(buff, sizeof(buff),
4970 "Function %%T/%d cannot be called in this context.",
4971 (int)a - 1);
4972 RETURN_TERM_ERROR(buff, p[1], context, *constant);
4973 } else {
4974 return retFail;
4975 }
4976 }
4977
4978 *constant = 0;
4979
4980 for (i = a; i > 1; --i) {
4981 if ((ret = dmc_expr(context, heap, text, p[i], &c)) != retOk)
4982 return ret;
4983 if (c)
4984 do_emit_constant(context, text, p[i]);
4985 }
4986 switch (b->arity) {
4987 case 0:
4988 DMC_PUSH(*text, matchCall0);
4989 break;
4990 case 1:
4991 DMC_PUSH(*text, matchCall1);
4992 break;
4993 case 2:
4994 DMC_PUSH(*text, matchCall2);
4995 break;
4996 case 3:
4997 DMC_PUSH(*text, matchCall3);
4998 break;
4999 default:
5000 erts_exit(ERTS_ERROR_EXIT,"ets:match() internal error, "
5001 "guard with more than 3 arguments.");
5002 }
5003 DMC_PUSH(*text, (UWord) b->biff);
5004 context->stack_used -= (((int) a) - 2);
5005 if (context->stack_used > context->stack_need)
5006 context->stack_need = context->stack_used;
5007 return retOk;
5008 }
5009
dmc_expr(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm t,int * constant)5010 static DMCRet dmc_expr(DMCContext *context,
5011 DMCHeap *heap,
5012 DMC_STACK_TYPE(UWord) *text,
5013 Eterm t,
5014 int *constant)
5015 {
5016 DMCRet ret;
5017 Eterm tmp;
5018 Eterm *p;
5019
5020 if (stack_guard(context->stack_limit)) {
5021 context->freason = SYSTEM_LIMIT;
5022 RETURN_TERM_ERROR("Excessive nesting; system limit reached near: %T",
5023 t, context, *constant);
5024 }
5025
5026 switch (t & _TAG_PRIMARY_MASK) {
5027 case TAG_PRIMARY_LIST:
5028 if ((ret = dmc_list(context, heap, text, t, constant)) != retOk)
5029 return ret;
5030 break;
5031 case TAG_PRIMARY_BOXED:
5032 if (is_map(t)) {
5033 return dmc_map(context, heap, text, t, constant);
5034 }
5035 if (!is_tuple(t)) {
5036 goto simple_term;
5037 }
5038 p = tuple_val(t);
5039 #ifdef HARDDEBUG
5040 erts_fprintf(stderr,"%d %d %d %d\n",arityval(*p),is_tuple(tmp = p[1]),
5041 is_atom(p[1]),db_is_variable(p[1]));
5042 #endif
5043 if (p[0] == make_arityval(1) && is_tuple(tmp = p[1])) {
5044 if ((ret = dmc_tuple(context, heap, text, tmp, constant)) != retOk)
5045 return ret;
5046 } else if (arityval(*p) >= 1 && is_atom(p[1]) &&
5047 !(db_is_variable(p[1]) >= 0)) {
5048 if ((ret = dmc_fun(context, heap, text, t, constant)) != retOk)
5049 return ret;
5050 } else
5051 RETURN_TERM_ERROR("%T is neither a function call, nor a tuple "
5052 "(tuples are written {{ ... }}).", t,
5053 context, *constant);
5054 break;
5055 case TAG_PRIMARY_IMMED1:
5056 if (db_is_variable(t) >= 0) {
5057 if ((ret = dmc_variable(context, heap, text, t, constant))
5058 != retOk)
5059 return ret;
5060 break;
5061 } else if (t == am_DollarUnderscore) {
5062 if ((ret = dmc_whole_expression(context, heap, text, t, constant))
5063 != retOk)
5064 return ret;
5065 break;
5066 } else if (t == am_DollarDollar) {
5067 if ((ret = dmc_all_bindings(context, heap, text, t, constant))
5068 != retOk)
5069 return ret;
5070 break;
5071 }
5072 /* Fall through */
5073 default:
5074 simple_term:
5075 *constant = 1;
5076 }
5077 return retOk;
5078 }
5079
5080
compile_guard_expr(DMCContext * context,DMCHeap * heap,DMC_STACK_TYPE (UWord)* text,Eterm l)5081 static DMCRet compile_guard_expr(DMCContext *context,
5082 DMCHeap *heap,
5083 DMC_STACK_TYPE(UWord) *text,
5084 Eterm l)
5085 {
5086 DMCRet ret;
5087 int constant;
5088 Eterm t;
5089
5090 if (l != NIL) {
5091 if (!is_list(l))
5092 RETURN_ERROR("Match expression is not a list.",
5093 context, constant);
5094 if (!(context->is_guard)) {
5095 DMC_PUSH(*text, matchCatch);
5096 }
5097 while (is_list(l)) {
5098 constant = 0;
5099 t = CAR(list_val(l));
5100 if ((ret = dmc_expr(context, heap, text, t, &constant)) !=
5101 retOk)
5102 return ret;
5103 if (constant) {
5104 do_emit_constant(context, text, t);
5105 }
5106 l = CDR(list_val(l));
5107 if (context->is_guard) {
5108 DMC_PUSH(*text,matchTrue);
5109 } else {
5110 DMC_PUSH(*text,matchWaste);
5111 }
5112 --context->stack_used;
5113 }
5114 if (l != NIL)
5115 RETURN_ERROR("Match expression is not a proper list.",
5116 context, constant);
5117 if (!(context->is_guard) && (context->cflags & DCOMP_TABLE)) {
5118 ASSERT(matchWaste == DMC_TOP(*text));
5119 (void) DMC_POP(*text);
5120 DMC_PUSH(*text, matchReturn); /* Same impact on stack as
5121 matchWaste */
5122 }
5123 }
5124 return retOk;
5125 }
5126
5127
5128
5129
5130 /*
5131 ** Match compilation utility code
5132 */
5133
5134 /*
5135 ** Handling of bif's in match guard expressions
5136 */
5137
dmc_lookup_bif(Eterm t,int arity)5138 static DMCGuardBif *dmc_lookup_bif(Eterm t, int arity)
5139 {
5140 /*
5141 ** Place for optimization, bsearch is slower than inlining it...
5142 */
5143 DMCGuardBif node = {0,NULL,0};
5144 node.name = t;
5145 node.arity = arity;
5146 return bsearch(&node,
5147 guard_tab,
5148 sizeof(guard_tab) / sizeof(DMCGuardBif),
5149 sizeof(DMCGuardBif),
5150 (int (*)(const void *, const void *)) &cmp_guard_bif);
5151 }
5152
5153 #ifdef DMC_DEBUG
dmc_lookup_bif_reversed(void * f)5154 static Eterm dmc_lookup_bif_reversed(void *f)
5155 {
5156 int i;
5157 for (i = 0; i < (sizeof(guard_tab) / sizeof(DMCGuardBif)); ++i)
5158 if (f == guard_tab[i].biff)
5159 return guard_tab[i].name;
5160 return am_undefined;
5161 }
5162 #endif
5163
5164 /* For sorting. */
cmp_uint(void * a,void * b)5165 static int cmp_uint(void *a, void *b)
5166 {
5167 if (*((unsigned *)a) < *((unsigned *)b))
5168 return -1;
5169 else
5170 return (*((unsigned *)a) > *((unsigned *)b));
5171 }
5172
cmp_guard_bif(void * a,void * b)5173 static int cmp_guard_bif(void *a, void *b)
5174 {
5175 int ret;
5176 if (( ret = ((int) atom_val(((DMCGuardBif *) a)->name)) -
5177 ((int) atom_val(((DMCGuardBif *) b)->name)) ) == 0) {
5178 ret = ((DMCGuardBif *) a)->arity - ((DMCGuardBif *) b)->arity;
5179 }
5180 return ret;
5181 }
5182
5183 /*
5184 ** Compact the variables in a match expression i e make {$1, $100, $1000}
5185 ** become {$0,$1,$2}.
5186 */
match_compact(ErlHeapFragment * expr,DMCErrInfo * err_info)5187 static int match_compact(ErlHeapFragment *expr, DMCErrInfo *err_info)
5188 {
5189 int i, j, a, n, x;
5190 DMC_STACK_TYPE(unsigned) heap;
5191 Eterm *p;
5192 char buff[25] = "$"; /* large enough for 64 bit to */
5193 int ret;
5194
5195 DMC_INIT_STACK(heap);
5196
5197 p = expr->mem;
5198 i = expr->used_size;
5199 while (i--) {
5200 if (is_thing(*p)) {
5201 a = thing_arityval(*p);
5202 ASSERT(a <= i);
5203 i -= a;
5204 p += a;
5205 } else if (is_atom(*p) && (n = db_is_variable(*p)) >= 0) {
5206 x = DMC_STACK_NUM(heap);
5207 for (j = 0; j < x && DMC_PEEK(heap,j) != n; ++j)
5208 ;
5209
5210 if (j == x)
5211 DMC_PUSH(heap,n);
5212 }
5213 ++p;
5214 }
5215 qsort(DMC_STACK_DATA(heap), DMC_STACK_NUM(heap), sizeof(unsigned),
5216 (int (*)(const void *, const void *)) &cmp_uint);
5217
5218 if (err_info != NULL) { /* lint needs a translation table */
5219 err_info->var_trans = erts_alloc(ERTS_ALC_T_DB_TRANS_TAB,
5220 sizeof(unsigned)*DMC_STACK_NUM(heap));
5221 sys_memcpy(err_info->var_trans, DMC_STACK_DATA(heap),
5222 DMC_STACK_NUM(heap) * sizeof(unsigned));
5223 err_info->num_trans = DMC_STACK_NUM(heap);
5224 }
5225
5226 p = expr->mem;
5227 i = expr->used_size;
5228 while (i--) {
5229 if (is_thing(*p)) {
5230 a = thing_arityval(*p);
5231 i -= a;
5232 p += a;
5233 } else if (is_atom(*p) && (n = db_is_variable(*p)) >= 0) {
5234 x = DMC_STACK_NUM(heap);
5235 #ifdef HARDDEBUG
5236 erts_fprintf(stderr, "%T");
5237 #endif
5238 for (j = 0; j < x && DMC_PEEK(heap,j) != n; ++j)
5239 ;
5240 ASSERT(j < x);
5241 erts_snprintf(buff+1, sizeof(buff) - 1, "%u", (unsigned) j);
5242 /* Yes, writing directly into terms, they ARE off heap */
5243 *p = erts_atom_put((byte *) buff, sys_strlen(buff),
5244 ERTS_ATOM_ENC_LATIN1, 1);
5245 }
5246 ++p;
5247 }
5248 ret = DMC_STACK_NUM(heap);
5249 DMC_FREE(heap);
5250 return ret;
5251 }
5252
5253 /*
5254 ** Simple size object that takes care of function calls and constant tuples
5255 */
my_size_object(Eterm t)5256 static Uint my_size_object(Eterm t)
5257 {
5258 Uint sum = 0;
5259 Eterm tmp;
5260 Eterm *p;
5261 switch (t & _TAG_PRIMARY_MASK) {
5262 case TAG_PRIMARY_LIST:
5263 sum += 2 + my_size_object(CAR(list_val(t))) +
5264 my_size_object(CDR(list_val(t)));
5265 break;
5266 case TAG_PRIMARY_BOXED:
5267 if (is_tuple(t)) {
5268 if (tuple_val(t)[0] == make_arityval(1) && is_tuple(tmp = tuple_val(t)[1])) {
5269 Uint i,n;
5270 p = tuple_val(tmp);
5271 n = arityval(p[0]);
5272 sum += 1 + n;
5273 for (i = 1; i <= n; ++i)
5274 sum += my_size_object(p[i]);
5275 } else if (tuple_val(t)[0] == make_arityval(2) &&
5276 is_atom(tmp = tuple_val(t)[1]) &&
5277 tmp == am_const) {
5278 sum += size_object(tuple_val(t)[2]);
5279 } else {
5280 erts_exit(ERTS_ERROR_EXIT,"Internal error, sizing unrecognized object in "
5281 "(d)ets:match compilation.");
5282 }
5283 break;
5284 } else if (is_map(t)) {
5285 if (is_flatmap(t)) {
5286 Uint n;
5287 flatmap_t *mp;
5288 mp = (flatmap_t*)flatmap_val(t);
5289
5290 /* Calculate size of keys */
5291 p = tuple_val(mp->keys);
5292 n = arityval(p[0]);
5293 sum += 1 + n;
5294 for (int i = 1; i <= n; ++i)
5295 sum += my_size_object(p[i]);
5296
5297 /* Calculate size of values */
5298 p = (Eterm *)mp;
5299 n = flatmap_get_size(mp);
5300 sum += n + 3;
5301 p += 3; /* hdr + size + keys words */
5302 while (n--) {
5303 sum += my_size_object(*p++);
5304 }
5305 } else {
5306 Eterm *head = (Eterm *)hashmap_val(t);
5307 Eterm hdr = *head;
5308 Uint sz;
5309 sz = hashmap_bitcount(MAP_HEADER_VAL(hdr));
5310 sum += 1 + sz + header_arity(hdr);
5311 head += 1 + header_arity(hdr);
5312
5313 while(sz-- > 0) {
5314 sum += my_size_object(head[sz]);
5315 }
5316 }
5317 break;
5318 }
5319 /* fall through */
5320 default:
5321 sum += size_object(t);
5322 break;
5323 }
5324 return sum;
5325 }
5326
my_copy_struct(Eterm t,Eterm ** hp,ErlOffHeap * off_heap)5327 static Eterm my_copy_struct(Eterm t, Eterm **hp, ErlOffHeap* off_heap)
5328 {
5329 Eterm ret = NIL, a, b;
5330 Eterm *p;
5331 Uint sz;
5332 switch (t & _TAG_PRIMARY_MASK) {
5333 case TAG_PRIMARY_LIST:
5334 a = my_copy_struct(CAR(list_val(t)), hp, off_heap);
5335 b = my_copy_struct(CDR(list_val(t)), hp, off_heap);
5336 ret = CONS(*hp, a, b);
5337 *hp += 2;
5338 break;
5339 case TAG_PRIMARY_BOXED:
5340 if (is_tuple(t)) {
5341 if (tuple_val(t)[0] == make_arityval(1) &&
5342 is_tuple(a = tuple_val(t)[1])) {
5343 Uint i,n;
5344 Eterm *savep = *hp;
5345 ret = make_tuple(savep);
5346 p = tuple_val(a);
5347 n = arityval(p[0]);
5348 *hp += n + 1;
5349 *savep++ = make_arityval(n);
5350 for(i = 1; i <= n; ++i)
5351 *savep++ = my_copy_struct(p[i], hp, off_heap);
5352 }
5353 else if (tuple_val(t)[0] == make_arityval(2) &&
5354 tuple_val(t)[1] == am_const) {
5355 /* A {const, XXX} expression */
5356 b = tuple_val(t)[2];
5357 sz = size_object(b);
5358 ret = copy_struct(b,sz,hp,off_heap);
5359 } else {
5360 erts_exit(ERTS_ERROR_EXIT, "Trying to constant-copy non constant expression "
5361 "0x%bex in (d)ets:match compilation.", t);
5362 }
5363 } else if (is_map(t)) {
5364 if (is_flatmap(t)) {
5365 Uint i,n;
5366 flatmap_t *mp;
5367 Eterm *savep;
5368 Eterm keys;
5369
5370 mp = (flatmap_t*)flatmap_val(t);
5371
5372 /* Copy keys */
5373 savep = *hp;
5374 keys = make_tuple(savep);
5375 p = tuple_val(mp->keys);
5376 n = arityval(p[0]);
5377 *hp += n + 1;
5378 *savep++ = make_arityval(n);
5379 for(i = 1; i <= n; ++i)
5380 *savep++ = my_copy_struct(p[i], hp, off_heap);
5381
5382 savep = *hp;
5383 ret = make_flatmap(savep);
5384 n = flatmap_get_size(mp);
5385 p = (Eterm *)mp;
5386 *hp += n + 3;
5387 *savep++ = mp->thing_word;
5388 *savep++ = mp->size;
5389 *savep++ = keys;
5390 p += 3; /* hdr + size + keys words */
5391 for (i = 0; i < n; i++)
5392 *savep++ = my_copy_struct(p[i], hp, off_heap);
5393 erts_usort_flatmap((flatmap_t*)flatmap_val(ret));
5394 } else {
5395 Eterm *head = hashmap_val(t);
5396 Eterm hdr = *head;
5397 Uint sz;
5398 Eterm *savep = *hp;
5399 sz = hashmap_bitcount(MAP_HEADER_VAL(hdr));
5400 *hp += 1 + sz + header_arity(hdr);
5401
5402 ret = make_hashmap(savep);
5403
5404 *savep++ = *head++; /* map header */
5405 if (header_arity(hdr) == 1)
5406 *savep++ = *head++; /* map size */
5407
5408 for (int i = 0; i < sz; i++) {
5409 *savep++ = my_copy_struct(head[i],hp,off_heap);
5410 }
5411 }
5412 } else {
5413 sz = size_object(t);
5414 ret = copy_struct(t,sz,hp,off_heap);
5415 }
5416 break;
5417 default:
5418 ret = t;
5419 }
5420 return ret;
5421 }
5422
5423 /*
5424 ** Compiled match bif interface
5425 */
5426 /*
5427 ** erlang:match_spec_test(MatchAgainst, MatchSpec, Type) ->
5428 ** {ok, Return, Flags, Errors} | {error, Errors}
5429 ** MatchAgainst -> if Type == trace: list() else tuple()
5430 ** MatchSpec -> MatchSpec with body corresponding to Type
5431 ** Type -> trace | table (only trace implemented in R5C)
5432 ** Return -> if Type == trace TraceReturn else {BodyReturn, VariableBindings}
5433 ** TraceReturn -> {true | false | term()}
5434 ** BodyReturn -> term()
5435 ** VariableBindings -> [term(), ...]
5436 ** Errors -> [OneError, ...]
5437 ** OneError -> {error, string()} | {warning, string()}
5438 ** Flags -> [Flag, ...]
5439 ** Flag -> return_trace (currently only flag)
5440 */
match_spec_test_3(BIF_ALIST_3)5441 BIF_RETTYPE match_spec_test_3(BIF_ALIST_3)
5442 {
5443 Eterm res;
5444 #ifdef DMC_DEBUG
5445 if (BIF_ARG_3 == ERTS_MAKE_AM("dis")) {
5446 test_disassemble_next = 1;
5447 BIF_RET(am_true);
5448 } else
5449 #endif
5450 if (BIF_ARG_3 == am_trace) {
5451 res = match_spec_test(BIF_P, BIF_ARG_1, BIF_ARG_2, 1);
5452 if (is_value(res)) {
5453 BIF_RET(res);
5454 }
5455 } else if (BIF_ARG_3 == am_table) {
5456 res = match_spec_test(BIF_P, BIF_ARG_1, BIF_ARG_2, 0);
5457 if (is_value(res)) {
5458 BIF_RET(res);
5459 }
5460 }
5461 BIF_ERROR(BIF_P, BADARG);
5462 }
5463
match_spec_test(Process * p,Eterm against,Eterm spec,int trace)5464 static Eterm match_spec_test(Process *p, Eterm against, Eterm spec, int trace)
5465 {
5466 Eterm lint_res;
5467 Binary *mps;
5468 Eterm res;
5469 Eterm ret;
5470 Eterm flg;
5471 Eterm *hp;
5472 Uint32 ret_flags;
5473 Uint sz;
5474 Eterm save_cp;
5475 Uint freason;
5476
5477 if (trace && !(is_list(against) || against == NIL)) {
5478 return THE_NON_VALUE;
5479 }
5480 if (trace) {
5481 const Uint cflags = (DCOMP_TRACE | DCOMP_FAKE_DESTRUCTIVE |
5482 DCOMP_CALL_TRACE | DCOMP_ALLOW_TRACE_OPS);
5483 lint_res = db_match_set_lint(p, spec, cflags);
5484 mps = db_match_set_compile(p, spec, cflags, &freason);
5485 } else {
5486 const Uint cflags = (DCOMP_TABLE | DCOMP_FAKE_DESTRUCTIVE);
5487 lint_res = db_match_set_lint(p, spec, cflags);
5488 mps = db_match_set_compile(p, spec, cflags, &freason);
5489 }
5490
5491 if (mps == NULL) {
5492 hp = HAlloc(p,3);
5493 ret = TUPLE2(hp, am_error, lint_res);
5494 } else {
5495 #ifdef DMC_DEBUG
5496 if (test_disassemble_next) {
5497 test_disassemble_next = 0;
5498 db_match_dis(mps);
5499 }
5500 #endif /* DMC_DEBUG */
5501 if (trace) {
5502 Eterm *arr = NULL;
5503 int n = 0;
5504
5505 if (is_list(against)) {
5506 Eterm l = against;
5507 do {
5508 ++n;
5509 l = CDR(list_val(l));
5510 } while (is_list(l));
5511
5512 arr = erts_alloc(ERTS_ALC_T_DB_TMP, sizeof(Eterm) * n);
5513
5514 l = against;
5515 n = 0;
5516 do {
5517 arr[n] = CAR(list_val(l));
5518 ++n;
5519 l = CDR(list_val(l));
5520 } while (is_list(l));
5521 }
5522 save_cp = p->stop[0];
5523 p->stop[0] = NIL;
5524 res = erts_match_set_run_trace(p, p,
5525 mps, arr, n,
5526 ERTS_PAM_COPY_RESULT|ERTS_PAM_IGNORE_TRACE_SILENT,
5527 &ret_flags);
5528 p->stop[0] = save_cp;
5529 if (arr)
5530 erts_free(ERTS_ALC_T_DB_TMP, arr);
5531 } else {
5532 res = erts_match_set_run_ets(p, mps, against, 0, &ret_flags);
5533 }
5534
5535 /* We are in the context of a BIF,
5536 {caller} should return 'undefined' */
5537 if (is_non_value(res)) {
5538 res = am_false;
5539 }
5540 sz = 0;
5541 if (ret_flags & MATCH_SET_EXCEPTION_TRACE) sz += 2;
5542 if (ret_flags & MATCH_SET_RETURN_TRACE) sz += 2;
5543 hp = HAlloc(p, 5 + sz);
5544 flg = NIL;
5545 if (ret_flags & MATCH_SET_EXCEPTION_TRACE) {
5546 flg = CONS(hp, am_exception_trace, flg);
5547 hp += 2;
5548 }
5549 if (ret_flags & MATCH_SET_RETURN_TRACE) {
5550 flg = CONS(hp, am_return_trace, flg);
5551 hp += 2;
5552 }
5553 erts_bin_free(mps);
5554 ret = TUPLE4(hp, am_ok, res, flg, lint_res);
5555 }
5556 return ret;
5557 }
5558
seq_trace_fake(Process * p,Eterm arg1)5559 static Eterm seq_trace_fake(Process *p, Eterm arg1)
5560 {
5561 Eterm result = erl_seq_trace_info(p, arg1);
5562 if (!is_non_value(result) && is_tuple(result) && *tuple_val(result) == 2) {
5563 return (tuple_val(result))[2];
5564 }
5565 return result;
5566 }
5567
db_alloc_tmp_uncompressed(DbTableCommon * tb,DbTerm * org)5568 DbTerm* db_alloc_tmp_uncompressed(DbTableCommon* tb, DbTerm* org)
5569 {
5570 ErlOffHeap tmp_offheap;
5571 DbTerm* res = erts_alloc(ERTS_ALC_T_TMP,
5572 sizeof(DbTerm) + org->size*sizeof(Eterm));
5573 Eterm* hp = res->tpl;
5574 tmp_offheap.first = NULL;
5575 db_copy_from_comp(tb, org, &hp, &tmp_offheap);
5576 res->first_oh = tmp_offheap.first;
5577 res->size = org->size;
5578 #ifdef DEBUG_CLONE
5579 res->debug_clone = NULL;
5580 #endif
5581 return res;
5582 }
5583
db_free_tmp_uncompressed(DbTerm * obj)5584 void db_free_tmp_uncompressed(DbTerm* obj)
5585 {
5586 ErlOffHeap off_heap;
5587 off_heap.first = obj->first_oh;
5588 erts_cleanup_offheap(&off_heap);
5589 #ifdef DEBUG_CLONE
5590 ASSERT(obj->debug_clone == NULL);
5591 #endif
5592 erts_free(ERTS_ALC_T_TMP, obj);
5593 }
5594
db_match_dbterm_uncompressed(DbTableCommon * tb,Process * c_p,Binary * bprog,DbTerm * obj,enum erts_pam_run_flags flags)5595 Eterm db_match_dbterm_uncompressed(DbTableCommon* tb, Process* c_p, Binary* bprog,
5596 DbTerm* obj, enum erts_pam_run_flags flags)
5597 {
5598
5599 Uint32 dummy;
5600 Eterm res;
5601
5602 res = db_prog_match(c_p, c_p,
5603 bprog, make_tuple(obj->tpl), NULL, 0,
5604 flags|ERTS_PAM_CONTIGUOUS_TUPLE, &dummy);
5605
5606 return res;
5607 }
5608
db_match_dbterm(DbTableCommon * tb,Process * c_p,Binary * bprog,DbTerm * obj,enum erts_pam_run_flags flags)5609 Eterm db_match_dbterm(DbTableCommon* tb, Process* c_p, Binary* bprog,
5610 DbTerm* obj, enum erts_pam_run_flags flags)
5611 {
5612 Eterm res;
5613 if (tb->compress) {
5614 obj = db_alloc_tmp_uncompressed(tb, obj);
5615 }
5616 res = db_match_dbterm_uncompressed(tb, c_p, bprog, obj, flags);
5617 if (tb->compress) {
5618 db_free_tmp_uncompressed(obj);
5619 }
5620 return res;
5621 }
5622
5623
5624 #ifdef DMC_DEBUG
5625
5626 /*
5627 ** Disassemble match program
5628 */
db_match_dis(Binary * bp)5629 void db_match_dis(Binary *bp)
5630 {
5631 MatchProg *prog = Binary2MatchProg(bp);
5632 UWord *t = prog->text;
5633 Uint n;
5634 Eterm p;
5635 int first;
5636 ErlHeapFragment *tmp;
5637
5638 while (t < prog->prog_end) {
5639 switch (*t) {
5640 case matchTryMeElse:
5641 ++t;
5642 n = *t;
5643 ++t;
5644 erts_printf("TryMeElse\t%beu\n", n);
5645 break;
5646 case matchArray:
5647 ++t;
5648 n = *t;
5649 ++t;
5650 erts_printf("Array\t%beu\n", n);
5651 break;
5652 case matchArrayBind:
5653 ++t;
5654 n = *t;
5655 ++t;
5656 erts_printf("ArrayBind\t%beu\n", n);
5657 break;
5658 case matchTuple:
5659 ++t;
5660 n = *t;
5661 ++t;
5662 erts_printf("Tuple\t%beu\n", n);
5663 break;
5664 case matchMap:
5665 ++t;
5666 n = *t;
5667 ++t;
5668 erts_printf("Map\t%beu\n", n);
5669 break;
5670 case matchKey:
5671 ++t;
5672 p = (Eterm) *t;
5673 ++t;
5674 erts_printf("Key\t%p (%T)\n", t, p);
5675 break;
5676 case matchPushT:
5677 ++t;
5678 n = *t;
5679 ++t;
5680 erts_printf("PushT\t%beu\n", n);
5681 break;
5682 case matchPushL:
5683 ++t;
5684 erts_printf("PushL\n");
5685 break;
5686 case matchPushM:
5687 ++t;
5688 n = *t;
5689 ++t;
5690 erts_printf("PushM\t%beu\n", n);
5691 break;
5692 case matchPop:
5693 ++t;
5694 erts_printf("Pop\n");
5695 break;
5696 case matchSwap:
5697 ++t;
5698 erts_printf("Swap\n");
5699 break;
5700 case matchBind:
5701 ++t;
5702 n = *t;
5703 ++t;
5704 erts_printf("Bind\t%beu\n", n);
5705 break;
5706 case matchCmp:
5707 ++t;
5708 n = *t;
5709 ++t;
5710 erts_printf("Cmp\t%beu\n", n);
5711 break;
5712 case matchEqBin:
5713 ++t;
5714 p = (Eterm) *t;
5715 ++t;
5716 erts_printf("EqBin\t%p (%T)\n", t, p);
5717 break;
5718 case matchEqRef:
5719 ++t;
5720 {
5721 Uint32 *num;
5722 int ri;
5723
5724 if (is_ordinary_ref_thing(t)) {
5725 ErtsORefThing *rt = (ErtsORefThing *) t;
5726 num = rt->num;
5727 t += ERTS_REF_THING_SIZE;
5728 }
5729 else if (is_pid_ref_thing(t)) {
5730 ErtsPRefThing *prt = (ErtsPRefThing *) t;
5731 num = prt->num;
5732 t += ERTS_PID_REF_THING_SIZE;
5733 }
5734 else {
5735 ErtsMRefThing *mrt = (ErtsMRefThing *) t;
5736 ASSERT(is_magic_ref_thing(t));
5737 num = mrt->mb->refn;
5738 t += ERTS_MAGIC_REF_THING_SIZE;
5739 }
5740
5741 erts_printf("EqRef\t(%d) {", (int) ERTS_REF_NUMBERS);
5742 first = 1;
5743 for (ri = 0; ri < ERTS_REF_NUMBERS; ++ri) {
5744 if (first)
5745 first = 0;
5746 else
5747 erts_printf(", ");
5748 #if defined(ARCH_64)
5749 erts_printf("0x%016bex", num[ri]);
5750 #else
5751 erts_printf("0x%08bex", num[ri]);
5752 #endif
5753 }
5754 }
5755 erts_printf("}\n");
5756 break;
5757 case matchEqBig:
5758 ++t;
5759 n = thing_arityval(*t);
5760 {
5761 Eterm *et = (Eterm *) t;
5762 t += n+1;
5763 erts_printf("EqBig\t(%d) {", (int) n);
5764 first = 1;
5765 ++n;
5766 while (n--) {
5767 if (first)
5768 first = 0;
5769 else
5770 erts_printf(", ");
5771 #if defined(ARCH_64)
5772 erts_printf("0x%016bex", *et);
5773 #else
5774 erts_printf("0x%08bex", *et);
5775 #endif
5776 ++et;
5777 }
5778 }
5779 erts_printf("}\n");
5780 break;
5781 case matchEqFloat:
5782 ++t;
5783 {
5784 double num;
5785 sys_memcpy(&num,t,sizeof(double));
5786 t += sizeof(double) / sizeof(*t);
5787 erts_printf("EqFloat\t%f\n", num);
5788 }
5789 break;
5790 case matchEq:
5791 ++t;
5792 p = (Eterm) *t;
5793 ++t;
5794 erts_printf("Eq \t%T\n", p);
5795 break;
5796 case matchList:
5797 ++t;
5798 erts_printf("List\n");
5799 break;
5800 case matchHalt:
5801 ++t;
5802 erts_printf("Halt\n");
5803 break;
5804 case matchSkip:
5805 ++t;
5806 erts_printf("Skip\n");
5807 break;
5808 case matchPushC:
5809 ++t;
5810 p = (Eterm) *t;
5811 ++t;
5812 erts_printf("PushC\t%T\n", p);
5813 break;
5814 case matchConsA:
5815 ++t;
5816 erts_printf("ConsA\n");
5817 break;
5818 case matchConsB:
5819 ++t;
5820 erts_printf("ConsB\n");
5821 break;
5822 case matchMkTuple:
5823 ++t;
5824 n = *t;
5825 ++t;
5826 erts_printf("MkTuple\t%beu\n", n);
5827 break;
5828 case matchMkFlatMap:
5829 ++t;
5830 n = *t;
5831 ++t;
5832 erts_printf("MkFlatMap\t%beu\n", n);
5833 break;
5834 case matchMkHashMap:
5835 ++t;
5836 n = *t;
5837 ++t;
5838 erts_printf("MkHashMap\t%beu\n", n);
5839 break;
5840 case matchOr:
5841 ++t;
5842 n = *t;
5843 ++t;
5844 erts_printf("Or\t%beu\n", n);
5845 break;
5846 case matchAnd:
5847 ++t;
5848 n = *t;
5849 ++t;
5850 erts_printf("And\t%beu\n", n);
5851 break;
5852 case matchOrElse:
5853 ++t;
5854 n = *t;
5855 ++t;
5856 erts_printf("OrElse\t%beu\n", n);
5857 break;
5858 case matchAndAlso:
5859 ++t;
5860 n = *t;
5861 ++t;
5862 erts_printf("AndAlso\t%beu\n", n);
5863 break;
5864 case matchCall0:
5865 ++t;
5866 p = dmc_lookup_bif_reversed((void *) *t);
5867 ++t;
5868 erts_printf("Call0\t%T\n", p);
5869 break;
5870 case matchCall1:
5871 ++t;
5872 p = dmc_lookup_bif_reversed((void *) *t);
5873 ++t;
5874 erts_printf("Call1\t%T\n", p);
5875 break;
5876 case matchCall2:
5877 ++t;
5878 p = dmc_lookup_bif_reversed((void *) *t);
5879 ++t;
5880 erts_printf("Call2\t%T\n", p);
5881 break;
5882 case matchCall3:
5883 ++t;
5884 p = dmc_lookup_bif_reversed((void *) *t);
5885 ++t;
5886 erts_printf("Call3\t%T\n", p);
5887 break;
5888 case matchPushV:
5889 ++t;
5890 n = (Uint) *t;
5891 ++t;
5892 erts_printf("PushV\t%beu\n", n);
5893 break;
5894 case matchPushVResult:
5895 n = (Uint) *++t;
5896 ++t;
5897 erts_printf("PushVResult\t%beu\n", n);
5898 break;
5899 case matchTrue:
5900 ++t;
5901 erts_printf("True\n");
5902 break;
5903 case matchPushExpr:
5904 ++t;
5905 erts_printf("PushExpr\n");
5906 break;
5907 case matchPushArrayAsList:
5908 ++t;
5909 erts_printf("PushArrayAsList\n");
5910 break;
5911 case matchPushArrayAsListU:
5912 ++t;
5913 erts_printf("PushArrayAsListU\n");
5914 break;
5915 case matchSelf:
5916 ++t;
5917 erts_printf("Self\n");
5918 break;
5919 case matchWaste:
5920 ++t;
5921 erts_printf("Waste\n");
5922 break;
5923 case matchReturn:
5924 ++t;
5925 erts_printf("Return\n");
5926 break;
5927 case matchProcessDump:
5928 ++t;
5929 erts_printf("ProcessDump\n");
5930 break;
5931 case matchDisplay:
5932 ++t;
5933 erts_printf("Display\n");
5934 break;
5935 case matchIsSeqTrace:
5936 ++t;
5937 erts_printf("IsSeqTrace\n");
5938 break;
5939 case matchSetSeqToken:
5940 ++t;
5941 erts_printf("SetSeqToken\n");
5942 break;
5943 case matchSetSeqTokenFake:
5944 ++t;
5945 erts_printf("SetSeqTokenFake\n");
5946 break;
5947 case matchGetSeqToken:
5948 ++t;
5949 erts_printf("GetSeqToken\n");
5950 break;
5951 case matchSetReturnTrace:
5952 ++t;
5953 erts_printf("SetReturnTrace\n");
5954 break;
5955 case matchSetExceptionTrace:
5956 ++t;
5957 erts_printf("SetReturnTrace\n");
5958 break;
5959 case matchCatch:
5960 ++t;
5961 erts_printf("Catch\n");
5962 break;
5963 case matchEnableTrace:
5964 ++t;
5965 erts_printf("EnableTrace\n");
5966 break;
5967 case matchDisableTrace:
5968 ++t;
5969 erts_printf("DisableTrace\n");
5970 break;
5971 case matchEnableTrace2:
5972 ++t;
5973 erts_printf("EnableTrace2\n");
5974 break;
5975 case matchDisableTrace2:
5976 ++t;
5977 erts_printf("DisableTrace2\n");
5978 break;
5979 case matchTrace2:
5980 ++t;
5981 erts_printf("Trace2\n");
5982 break;
5983 case matchTrace3:
5984 ++t;
5985 erts_printf("Trace3\n");
5986 break;
5987 case matchCaller:
5988 ++t;
5989 erts_printf("Caller\n");
5990 break;
5991 default:
5992 erts_printf("??? (0x%bpx)\n", *t);
5993 ++t;
5994 break;
5995 }
5996 }
5997 erts_printf("\n\nterm_save: {");
5998 first = 1;
5999 for (tmp = prog->term_save; tmp; tmp = tmp->next) {
6000 if (first)
6001 first = 0;
6002 else
6003 erts_printf(", ");
6004 erts_printf("%p", tmp);
6005 }
6006 erts_printf("}\n");
6007 erts_printf("num_bindings: %d\n", prog->num_bindings);
6008 erts_printf("heap_size: %beu\n", prog->heap_size);
6009 erts_printf("stack_offset: %beu\n", prog->stack_offset);
6010 erts_printf("text: %p\n", prog->text);
6011 erts_printf("stack_size: %d (words)\n", prog->heap_size-prog->stack_offset);
6012
6013 }
6014
6015 #endif /* DMC_DEBUG */
6016