1 /*
2  * %CopyrightBegin%
3  *
4  * Copyright Ericsson AB 1996-2020. All Rights Reserved.
5  *
6  * Licensed under the Apache License, Version 2.0 (the "License");
7  * you may not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  *     http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an "AS IS" BASIS,
14  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  *
18  * %CopyrightEnd%
19  */
20 
21 #ifdef HAVE_CONFIG_H
22 #  include "config.h"
23 #endif
24 
25 #define ERL_WANT_GC_INTERNALS__
26 
27 #include "sys.h"
28 #include "erl_vm.h"
29 #include "global.h"
30 #include "erl_process.h"
31 #include "erl_gc.h"
32 #include "big.h"
33 #include "erl_map.h"
34 #include "erl_binary.h"
35 #include "erl_bits.h"
36 #include "dtrace-wrapper.h"
37 
38 static void move_one_frag(Eterm** hpp, ErlHeapFragment*, ErlOffHeap*, int);
39 
40 /*
41  *  Copy object "obj" to process p.
42  */
copy_object_x(Eterm obj,Process * to,Uint extra)43 Eterm copy_object_x(Eterm obj, Process* to, Uint extra)
44 {
45     if (!is_immed(obj)) {
46         Uint size = size_object(obj);
47         Eterm* hp = HAllocX(to, size, extra);
48         Eterm res;
49 
50 #ifdef USE_VM_PROBES
51         if (DTRACE_ENABLED(copy_object)) {
52             DTRACE_CHARBUF(proc_name, 64);
53 
54             erts_snprintf(proc_name, sizeof(DTRACE_CHARBUF_NAME(proc_name)),
55                     "%T", to->common.id);
56             DTRACE2(copy_object, proc_name, size);
57         }
58 #endif
59         res = copy_struct(obj, size, &hp, &to->off_heap);
60 #ifdef DEBUG
61         if (eq(obj, res) == 0) {
62             erts_exit(ERTS_ABORT_EXIT, "copy not equal to source\n");
63         }
64 #endif
65         return res;
66     }
67     return obj;
68 }
69 
70 /*
71  * Return the "flat" size of the object.
72  */
73 
74 #define in_literal_purge_area(PTR)                   \
75     (lit_purge_ptr && (                              \
76         (lit_purge_ptr <= (PTR) &&                   \
77         (PTR) < (lit_purge_ptr + lit_purge_sz))))
78 
size_object_x(Eterm obj,erts_literal_area_t * litopt)79 Uint size_object_x(Eterm obj, erts_literal_area_t *litopt)
80 {
81     Uint sum = 0;
82     Eterm* ptr;
83     int arity;
84     Eterm *lit_purge_ptr = litopt ? litopt->lit_purge_ptr : NULL;
85     Uint   lit_purge_sz  = litopt ? litopt->lit_purge_sz  : 0;
86 #ifdef DEBUG
87     Eterm mypid = erts_get_current_pid();
88 #endif
89     DECLARE_ESTACK(s);
90     VERBOSE(DEBUG_SHCOPY, ("[pid=%T] size_object %p\n", mypid, obj));
91 
92     for (;;) {
93 	switch (primary_tag(obj)) {
94 	case TAG_PRIMARY_LIST:
95 	    ptr = list_val(obj);
96             if (litopt && erts_is_literal(obj,ptr) && !in_literal_purge_area(ptr)) {
97                 goto pop_next;
98             }
99 	    sum += 2;
100 	    obj = *ptr++;
101 	    if (!IS_CONST(obj)) {
102 		ESTACK_PUSH(s, obj);
103 	    }
104 	    obj = *ptr;
105 	    break;
106 	case TAG_PRIMARY_BOXED:
107 	    {
108 		Eterm hdr;
109                 ptr = boxed_val(obj);
110                 if (litopt && erts_is_literal(obj,ptr) && !in_literal_purge_area(ptr)) {
111                     goto pop_next;
112                 }
113                 hdr = *ptr;
114 		ASSERT(is_header(hdr));
115 		switch (hdr & _TAG_HEADER_MASK) {
116 		case ARITYVAL_SUBTAG:
117 		    ptr = tuple_val(obj);
118 		    arity = header_arity(hdr);
119 		    sum += arity + 1;
120 		    if (arity == 0) { /* Empty tuple -- unusual. */
121 			goto pop_next;
122 		    }
123 		    while (arity-- > 1) {
124 			obj = *++ptr;
125 			if (!IS_CONST(obj)) {
126 			    ESTACK_PUSH(s, obj);
127 			}
128 		    }
129 		    obj = *++ptr;
130 		    break;
131 		case FUN_SUBTAG:
132 		    {
133 			Eterm* bptr = fun_val(obj);
134 			ErlFunThing* funp = (ErlFunThing *) bptr;
135 			unsigned eterms = 1 /* creator */ + funp->num_free;
136 			unsigned sz = thing_arityval(hdr);
137 			sum += 1 /* header */ + sz + eterms;
138 			bptr += 1 /* header */ + sz;
139 			while (eterms-- > 1) {
140 			  obj = *bptr++;
141 			  if (!IS_CONST(obj)) {
142 			    ESTACK_PUSH(s, obj);
143 			  }
144 			}
145 			obj = *bptr;
146 			break;
147 		    }
148 		case MAP_SUBTAG:
149 		    switch (MAP_HEADER_TYPE(hdr)) {
150 			case MAP_HEADER_TAG_FLATMAP_HEAD :
151                             {
152                                 Uint n;
153                                 flatmap_t *mp;
154                                 mp  = (flatmap_t*)flatmap_val(obj);
155                                 ptr = (Eterm *)mp;
156                                 n   = flatmap_get_size(mp) + 1;
157                                 sum += n + 2;
158                                 ptr += 2; /* hdr + size words */
159                                 while (n--) {
160                                     obj = *ptr++;
161                                     if (!IS_CONST(obj)) {
162                                         ESTACK_PUSH(s, obj);
163                                     }
164                                 }
165                                 goto pop_next;
166                             }
167 			case MAP_HEADER_TAG_HAMT_HEAD_BITMAP :
168 			case MAP_HEADER_TAG_HAMT_HEAD_ARRAY :
169 			case MAP_HEADER_TAG_HAMT_NODE_BITMAP :
170 			    {
171 				Eterm *head;
172 				Uint sz;
173 				head  = hashmap_val(obj);
174 				sz    = hashmap_bitcount(MAP_HEADER_VAL(hdr));
175 				sum  += 1 + sz + header_arity(hdr);
176 				head += 1 + header_arity(hdr);
177 
178 				if (sz == 0) {
179 				    goto pop_next;
180 				}
181 				while(sz-- > 1) {
182 				    obj = head[sz];
183 				    if (!IS_CONST(obj)) {
184 					ESTACK_PUSH(s, obj);
185 				    }
186 				}
187 				obj = head[0];
188 			    }
189 			    break;
190 			default:
191 			    erts_exit(ERTS_ABORT_EXIT, "size_object: bad hashmap type %d\n", MAP_HEADER_TYPE(hdr));
192 		    }
193 		    break;
194 		case SUB_BINARY_SUBTAG:
195 		    {
196 			Eterm real_bin;
197 			ERTS_DECLARE_DUMMY(Uint offset); /* Not used. */
198 			Uint bitsize;
199 			Uint bitoffs;
200 			Uint extra_bytes;
201 			Eterm hdr;
202 			ERTS_GET_REAL_BIN(obj, real_bin, offset, bitoffs, bitsize);
203 			if ((bitsize + bitoffs) > 8) {
204 			    sum += ERL_SUB_BIN_SIZE;
205 			    extra_bytes = 2;
206 			} else if ((bitsize + bitoffs) > 0) {
207 			    sum += ERL_SUB_BIN_SIZE;
208 			    extra_bytes = 1;
209 			} else {
210 			    extra_bytes = 0;
211 			}
212 			hdr = *binary_val(real_bin);
213 			if (thing_subtag(hdr) == REFC_BINARY_SUBTAG) {
214 			    sum += PROC_BIN_SIZE;
215 			} else {
216 			    sum += heap_bin_size(binary_size(obj)+extra_bytes);
217 			}
218 			goto pop_next;
219 		    }
220 		    break;
221                 case BIN_MATCHSTATE_SUBTAG:
222 		    erts_exit(ERTS_ABORT_EXIT,
223 			     "size_object: matchstate term not allowed");
224 		default:
225 		    sum += thing_arityval(hdr) + 1;
226 		    goto pop_next;
227 		}
228 	    }
229 	    break;
230 	case TAG_PRIMARY_IMMED1:
231 	pop_next:
232 	    if (ESTACK_ISEMPTY(s)) {
233 		DESTROY_ESTACK(s);
234 		VERBOSE(DEBUG_SHCOPY, ("[pid=%T] size was: %u\n", mypid, sum));
235 		return sum;
236 	    }
237 	    obj = ESTACK_POP(s);
238 	    break;
239 	default:
240 	    erts_exit(ERTS_ABORT_EXIT, "size_object: bad tag for %#x\n", obj);
241 	}
242     }
243 }
244 
245 /*
246  *  Machinery for sharing preserving information
247  *  Using a WSTACK but not very transparently; consider refactoring
248  */
249 
250 #define DECLARE_BITSTORE(s)						\
251     DECLARE_WSTACK(s);							\
252     int WSTK_CONCAT(s,_bitoffs) = 0;					\
253     int WSTK_CONCAT(s,_offset) = 0;					\
254     UWord WSTK_CONCAT(s,_buffer) = 0
255 
256 #define DESTROY_BITSTORE(s) DESTROY_WSTACK(s)
257 #define BITSTORE_PUT(s,i)						\
258 do {									\
259     WSTK_CONCAT(s,_buffer) |= i << WSTK_CONCAT(s,_bitoffs);		\
260     WSTK_CONCAT(s,_bitoffs) += 2;					\
261     if (WSTK_CONCAT(s,_bitoffs) >= 8*sizeof(UWord)) {			\
262 	WSTACK_PUSH(s, WSTK_CONCAT(s,_buffer));				\
263 	WSTK_CONCAT(s,_bitoffs) = 0;					\
264 	WSTK_CONCAT(s,_buffer) = 0;					\
265     }									\
266 } while(0)
267 #define BITSTORE_CLOSE(s)						\
268 do {									\
269     if (WSTK_CONCAT(s,_bitoffs) > 0) {					\
270 	WSTACK_PUSH(s, WSTK_CONCAT(s,_buffer));				\
271 	WSTK_CONCAT(s,_bitoffs) = 0;					\
272     }									\
273 } while(0)
274 
275 #define BITSTORE_FETCH(s,dst)                                           \
276 do {                                                                    \
277     UWord result;                                                       \
278     if (WSTK_CONCAT(s,_bitoffs) <= 0) {                                 \
279         WSTK_CONCAT(s,_buffer) = s.wstart[WSTK_CONCAT(s,_offset)];      \
280         WSTK_CONCAT(s,_offset)++;                                       \
281         WSTK_CONCAT(s,_bitoffs) = 8*sizeof(UWord);                      \
282     }                                                                   \
283     WSTK_CONCAT(s,_bitoffs) -= 2;                                       \
284     result = WSTK_CONCAT(s,_buffer) & 3;                                \
285     WSTK_CONCAT(s,_buffer) >>= 2;                                       \
286     (dst) = result;                                                     \
287 } while(0)
288 
289 #define COUNT_OFF_HEAP (0)
290 
291 /*
292  *  Return the real size of an object and find sharing information
293  *  This currently returns the same as erts_debug:size/1.
294  *  It is argued whether the size of subterms in constant pools
295  *  should be counted or not.
296  */
297 
size_shared(Eterm obj)298 Uint size_shared(Eterm obj)
299 {
300     Eterm saved_obj = obj;
301     Uint sum = 0;
302     Eterm* ptr;
303 
304     DECLARE_EQUEUE(s);
305     DECLARE_BITSTORE(b);
306 
307     for (;;) {
308 	switch (primary_tag(obj)) {
309 	case TAG_PRIMARY_LIST: {
310 	    Eterm head, tail;
311 	    ptr = list_val(obj);
312 	    /* we're not counting anything that's outside our heap */
313 	    if (!COUNT_OFF_HEAP && erts_is_literal(obj,ptr)) {
314 		goto pop_next;
315 	    }
316 	    head = CAR(ptr);
317 	    tail = CDR(ptr);
318 	    /* if it's visited, don't count it */
319 	    if (primary_tag(tail) == TAG_PRIMARY_HEADER ||
320 		primary_tag(head) == TAG_PRIMARY_HEADER) {
321 		goto pop_next;
322 	    }
323 	    /* else make it visited now */
324 	    switch (primary_tag(tail)) {
325 	    case TAG_PRIMARY_LIST:
326 		ptr[1] = (tail - TAG_PRIMARY_LIST) | TAG_PRIMARY_HEADER;
327 		break;
328 	    case TAG_PRIMARY_IMMED1:
329 		CAR(ptr) = (head - primary_tag(head)) | TAG_PRIMARY_HEADER;
330 		CDR(ptr) = (tail - TAG_PRIMARY_IMMED1) | primary_tag(head);
331 		break;
332 	    case TAG_PRIMARY_BOXED:
333 		BITSTORE_PUT(b, primary_tag(head));
334 		CAR(ptr) = (head - primary_tag(head)) | TAG_PRIMARY_HEADER;
335 		CDR(ptr) = (tail - TAG_PRIMARY_BOXED) | TAG_PRIMARY_HEADER;
336 		break;
337 	    }
338 	    /* and count it */
339 	    sum += 2;
340 	    if (!IS_CONST(head)) {
341 		EQUEUE_PUT(s, head);
342 	    }
343 	    obj = tail;
344 	    break;
345 	}
346 	case TAG_PRIMARY_BOXED: {
347 	    Eterm hdr;
348 	    ptr = boxed_val(obj);
349 	    /* we're not counting anything that's outside our heap */
350 	    if (!COUNT_OFF_HEAP && erts_is_literal(obj,ptr)) {
351 		goto pop_next;
352 	    }
353 	    hdr = *ptr;
354 	    /* if it's visited, don't count it */
355 	    if (primary_tag(hdr) != TAG_PRIMARY_HEADER) {
356 		goto pop_next;
357 	    }
358 	    /* else make it visited now */
359 	    *ptr = (hdr - primary_tag(hdr)) + BOXED_VISITED;
360 	    /* and count it */
361 	    ASSERT(is_header(hdr));
362 	    switch (hdr & _TAG_HEADER_MASK) {
363 	    case ARITYVAL_SUBTAG: {
364 		int arity = header_arity(hdr);
365 		sum += arity + 1;
366 		if (arity == 0) { /* Empty tuple -- unusual. */
367 		    goto pop_next;
368 		}
369 		while (arity-- > 0) {
370 		    obj = *++ptr;
371 		    if (!IS_CONST(obj)) {
372 			EQUEUE_PUT(s, obj);
373 		    }
374 		}
375 		goto pop_next;
376 	    }
377 	    case FUN_SUBTAG: {
378 		ErlFunThing* funp = (ErlFunThing *) ptr;
379 		unsigned eterms = 1 /* creator */ + funp->num_free;
380 		unsigned sz = thing_arityval(hdr);
381 		sum += 1 /* header */ + sz + eterms;
382 		ptr += 1 /* header */ + sz;
383 		while (eterms-- > 0) {
384 		    obj = *ptr++;
385 		    if (!IS_CONST(obj)) {
386 			EQUEUE_PUT(s, obj);
387 		    }
388 		}
389 		goto pop_next;
390 	    }
391 	    case SUB_BINARY_SUBTAG: {
392 		ErlSubBin* sb = (ErlSubBin *) ptr;
393 		Uint extra_bytes;
394 		Eterm hdr;
395 		ASSERT((sb->thing_word & ~BOXED_VISITED_MASK) == HEADER_SUB_BIN);
396 		if (sb->bitsize + sb->bitoffs > 8) {
397 		    sum += ERL_SUB_BIN_SIZE;
398 		    extra_bytes = 2;
399 		} else if (sb->bitsize + sb->bitoffs > 0) {
400 		    sum += ERL_SUB_BIN_SIZE;
401 		    extra_bytes = 1;
402 		} else {
403 		    extra_bytes = 0;
404 		}
405 		ptr = binary_val(sb->orig);
406 		hdr = (*ptr) & ~BOXED_VISITED_MASK;
407 		if (thing_subtag(hdr) == REFC_BINARY_SUBTAG) {
408 		    sum += PROC_BIN_SIZE;
409 		} else {
410 		    ASSERT(thing_subtag(hdr) == HEAP_BINARY_SUBTAG);
411 		    sum += heap_bin_size(binary_size(obj) + extra_bytes);
412 		}
413 		goto pop_next;
414 	    }
415             case MAP_SUBTAG:
416                 switch (MAP_HEADER_TYPE(hdr)) {
417                     case MAP_HEADER_TAG_FLATMAP_HEAD : {
418                         flatmap_t *mp  = (flatmap_t*)flatmap_val(obj);
419                         Uint n = flatmap_get_size(mp) + 1;
420                         ptr  = (Eterm *)mp;
421                         sum += n + 2;
422                         ptr += 2; /* hdr + size words */
423                         while (n--) {
424                             obj = *ptr++;
425                             if (!IS_CONST(obj)) {
426                                 EQUEUE_PUT(s, obj);
427                             }
428                         }
429                         goto pop_next;
430                     }
431                     case MAP_HEADER_TAG_HAMT_HEAD_BITMAP :
432                     case MAP_HEADER_TAG_HAMT_HEAD_ARRAY :
433                     case MAP_HEADER_TAG_HAMT_NODE_BITMAP : {
434                         Uint n = hashmap_bitcount(MAP_HEADER_VAL(hdr));
435                         sum += 1 + n + header_arity(hdr);
436                         ptr += 1 + header_arity(hdr);
437                         while (n--) {
438                             obj = *ptr++;
439                             if (!IS_CONST(obj)) {
440                                 EQUEUE_PUT(s, obj);
441                             }
442                         }
443                         goto pop_next;
444                     }
445                     default:
446                         erts_exit(ERTS_ABORT_EXIT, "size_shared: bad hashmap type %d\n", MAP_HEADER_TYPE(hdr));
447                 }
448 	    case BIN_MATCHSTATE_SUBTAG:
449 		erts_exit(ERTS_ABORT_EXIT,
450 			 "size_shared: matchstate term not allowed");
451 	    default:
452 		sum += thing_arityval(hdr) + 1;
453 		goto pop_next;
454 	    }
455 	    break;
456 	}
457 	case TAG_PRIMARY_IMMED1:
458 	pop_next:
459 	    if (EQUEUE_ISEMPTY(s)) {
460 		goto cleanup;
461 	    }
462 	    obj = EQUEUE_GET(s);
463 	    break;
464 	default:
465 	    erts_exit(ERTS_ABORT_EXIT, "size_shared: bad tag for %#x\n", obj);
466 	}
467     }
468 
469 cleanup:
470     obj = saved_obj;
471     BITSTORE_CLOSE(b);
472     for (;;) {
473 	switch (primary_tag(obj)) {
474 	case TAG_PRIMARY_LIST: {
475 	    Eterm head, tail;
476 	    ptr = list_val(obj);
477 	    if (!COUNT_OFF_HEAP && erts_is_literal(obj,ptr)) {
478 		goto cleanup_next;
479 	    }
480 	    head = CAR(ptr);
481 	    tail = CDR(ptr);
482 	    /* if not already clean, clean it up */
483 	    if (primary_tag(tail) == TAG_PRIMARY_HEADER) {
484 		if (primary_tag(head) == TAG_PRIMARY_HEADER) {
485 		    Eterm saved;
486                     BITSTORE_FETCH(b, saved);
487 		    CAR(ptr) = head = (head - TAG_PRIMARY_HEADER) | saved;
488 		    CDR(ptr) = tail = (tail - TAG_PRIMARY_HEADER) | TAG_PRIMARY_BOXED;
489 		} else {
490 		    CDR(ptr) = tail = (tail - TAG_PRIMARY_HEADER) | TAG_PRIMARY_LIST;
491 		}
492 	    } else if (primary_tag(head) == TAG_PRIMARY_HEADER) {
493 		CAR(ptr) = head = (head - TAG_PRIMARY_HEADER) | primary_tag(tail);
494 		CDR(ptr) = tail = (tail - primary_tag(tail)) | TAG_PRIMARY_IMMED1;
495 	    } else {
496 		goto cleanup_next;
497 	    }
498 	    /* and its children too */
499 	    if (!IS_CONST(head)) {
500 		EQUEUE_PUT_UNCHECKED(s, head);
501 	    }
502 	    obj = tail;
503 	    break;
504 	}
505 	case TAG_PRIMARY_BOXED: {
506 	    Eterm hdr;
507 	    ptr = boxed_val(obj);
508 	    if (!COUNT_OFF_HEAP && erts_is_literal(obj,ptr)) {
509 		goto cleanup_next;
510 	    }
511 	    hdr = *ptr;
512 	    /* if not already clean, clean it up */
513 	    if (primary_tag(hdr) == TAG_PRIMARY_HEADER) {
514 		goto cleanup_next;
515 	    }
516 	    else {
517 		ASSERT(primary_tag(hdr) == BOXED_VISITED);
518 		*ptr = hdr = (hdr - BOXED_VISITED) + TAG_PRIMARY_HEADER;
519 	    }
520 	    /* and its children too */
521 	    switch (hdr & _TAG_HEADER_MASK) {
522 	    case ARITYVAL_SUBTAG: {
523 		int arity = header_arity(hdr);
524 		if (arity == 0) { /* Empty tuple -- unusual. */
525 		    goto cleanup_next;
526 		}
527 		while (arity-- > 0) {
528 		    obj = *++ptr;
529 		    if (!IS_CONST(obj)) {
530 			EQUEUE_PUT_UNCHECKED(s, obj);
531 		    }
532 		}
533 		goto cleanup_next;
534 	    }
535 	    case FUN_SUBTAG: {
536 		ErlFunThing* funp = (ErlFunThing *) ptr;
537 		unsigned eterms = 1 /* creator */ + funp->num_free;
538 		unsigned sz = thing_arityval(hdr);
539 		ptr += 1 /* header */ + sz;
540 		while (eterms-- > 0) {
541 		    obj = *ptr++;
542 		    if (!IS_CONST(obj)) {
543 			EQUEUE_PUT_UNCHECKED(s, obj);
544 		    }
545 		}
546 		goto cleanup_next;
547 	    }
548             case MAP_SUBTAG:
549                 switch (MAP_HEADER_TYPE(hdr)) {
550                     case MAP_HEADER_TAG_FLATMAP_HEAD : {
551                         flatmap_t *mp = (flatmap_t *) ptr;
552                         Uint n = flatmap_get_size(mp) + 1;
553                         ptr += 2; /* hdr + size words */
554                         while (n--) {
555                             obj = *ptr++;
556                             if (!IS_CONST(obj)) {
557                                 EQUEUE_PUT_UNCHECKED(s, obj);
558                             }
559                         }
560                         goto cleanup_next;
561                     }
562                     case MAP_HEADER_TAG_HAMT_HEAD_BITMAP :
563                     case MAP_HEADER_TAG_HAMT_HEAD_ARRAY :
564                     case MAP_HEADER_TAG_HAMT_NODE_BITMAP : {
565                         Uint n = hashmap_bitcount(MAP_HEADER_VAL(hdr));
566                         sum += 1 + n + header_arity(hdr);
567                         ptr += 1 + header_arity(hdr);
568                         while (n--) {
569                             obj = *ptr++;
570                             if (!IS_CONST(obj)) {
571                                 EQUEUE_PUT_UNCHECKED(s, obj);
572                             }
573                         }
574                         goto cleanup_next;
575                     }
576                     default:
577                         erts_exit(ERTS_ABORT_EXIT, "size_shared: bad hashmap type %d\n", MAP_HEADER_TYPE(hdr));
578                 }
579 	    default:
580 		goto cleanup_next;
581 	    }
582 	    break;
583 	}
584 	case TAG_PRIMARY_IMMED1:
585 	cleanup_next:
586 	    if (EQUEUE_ISEMPTY(s)) {
587 		goto all_clean;
588 	    }
589 	    obj = EQUEUE_GET(s);
590 	    break;
591 	default:
592 	    erts_exit(ERTS_ABORT_EXIT, "size_shared: bad tag for %#x\n", obj);
593 	}
594     }
595 
596  all_clean:
597     /* Return the result */
598     DESTROY_EQUEUE(s);
599     DESTROY_BITSTORE(b);
600     return sum;
601 }
602 
603 
604 /*
605  *  Copy a structure to a heap.
606  */
copy_struct_x(Eterm obj,Uint sz,Eterm ** hpp,ErlOffHeap * off_heap,Uint * bsz,erts_literal_area_t * litopt,char * file,int line)607 Eterm copy_struct_x(Eterm obj, Uint sz, Eterm** hpp, ErlOffHeap* off_heap,
608                     Uint *bsz, erts_literal_area_t *litopt
609 #ifdef ERTS_COPY_REGISTER_LOCATION
610                     , char *file, int line
611 #endif
612     )
613 {
614     char* hstart;
615     Uint hsize;
616     Eterm* htop;
617     Eterm* hbot;
618     Eterm* hp;
619     Eterm* ERTS_RESTRICT objp;
620     Eterm* tp;
621     Eterm  res;
622     Eterm  elem;
623     Eterm* tailp;
624     Eterm* argp;
625     Eterm* const_tuple;
626     Eterm hdr;
627     Eterm *hend;
628     int i;
629     Eterm *lit_purge_ptr = litopt ? litopt->lit_purge_ptr : NULL;
630     Uint   lit_purge_sz  = litopt ? litopt->lit_purge_sz  : 0;
631 #ifdef DEBUG
632     Eterm org_obj = obj;
633     Uint org_sz = sz;
634     Eterm mypid = erts_get_current_pid();
635 #endif
636 
637     if (IS_CONST(obj))
638 	return obj;
639 
640     VERBOSE(DEBUG_SHCOPY, ("[pid=%T] copy_struct %p\n", mypid, obj));
641 
642     DTRACE1(copy_struct, (int32_t)sz);
643 
644     hp = htop = *hpp;
645     hbot = hend = htop + sz;
646     hstart = (char *)htop;
647     hsize = (char*) hbot - hstart;
648     const_tuple = 0;
649 
650     /* Copy the object onto the heap */
651     switch (primary_tag(obj)) {
652     case TAG_PRIMARY_LIST:
653 	argp = &res;
654 	objp = list_val(obj);
655 	goto L_copy_list;
656     case TAG_PRIMARY_BOXED: argp = &res; goto L_copy_boxed;
657     default:
658 	erts_exit(ERTS_ABORT_EXIT,
659 		 "%s, line %d: Internal error in copy_struct: 0x%08x\n",
660 		 __FILE__, __LINE__,obj);
661     }
662 
663  L_copy:
664     while (hp != htop) {
665 	obj = *hp;
666 	switch (primary_tag(obj)) {
667 	case TAG_PRIMARY_IMMED1:
668 	    hp++;
669 	    break;
670 	case TAG_PRIMARY_LIST:
671 	    objp = list_val(obj);
672 	    if (ErtsInArea(objp,hstart,hsize)) {
673 		hp++;
674 		break;
675 	    }
676 	    argp = hp++;
677 	    /* Fall through */
678 
679 	L_copy_list:
680 	    tailp = argp;
681             if (litopt && erts_is_literal(obj,objp) && !in_literal_purge_area(objp)) {
682                 *tailp = obj;
683                 goto L_copy;
684             }
685 	    for (;;) {
686 		tp = tailp;
687 		elem = CAR(objp);
688 		if (IS_CONST(elem)) {
689 		    hbot -= 2;
690 		    CAR(hbot) = elem;
691 		    tailp = &CDR(hbot);
692 		} else {
693 		    CAR(htop) = elem;
694 		    tailp = &CDR(htop);
695 		    htop += 2;
696 		}
697 		*tp = make_list(tailp - 1);
698 		obj = CDR(objp);
699 
700 		if (!is_list(obj)) {
701 		    break;
702 		}
703 		objp = list_val(obj);
704 
705                 if (litopt && erts_is_literal(obj,objp) && !in_literal_purge_area(objp)) {
706                     *tailp = obj;
707                     goto L_copy;
708                 }
709 	    }
710 	    switch (primary_tag(obj)) {
711 	    case TAG_PRIMARY_IMMED1: *tailp = obj; goto L_copy;
712 	    case TAG_PRIMARY_BOXED: argp = tailp; goto L_copy_boxed;
713 	    default:
714 		erts_exit(ERTS_ABORT_EXIT,
715 			 "%s, line %d: Internal error in copy_struct: 0x%08x\n",
716 			 __FILE__, __LINE__,obj);
717 	    }
718 
719 	case TAG_PRIMARY_BOXED:
720 	    if (ErtsInArea(boxed_val(obj),hstart,hsize)) {
721 		hp++;
722 		break;
723 	    }
724 	    argp = hp++;
725 
726 	L_copy_boxed:
727 	    objp = boxed_val(obj);
728             if (litopt && erts_is_literal(obj,objp) && !in_literal_purge_area(objp)) {
729                 *argp = obj;
730                 break;
731             }
732 	    hdr = *objp;
733 	    switch (hdr & _TAG_HEADER_MASK) {
734 	    case ARITYVAL_SUBTAG:
735 		{
736 		    int const_flag = 1; /* assume constant tuple */
737 		    i = arityval(hdr);
738 		    *argp = make_tuple(htop);
739 		    tp = htop;	/* tp is pointer to new arity value */
740 		    *htop++ = *objp++; /* copy arity value */
741 		    while (i--) {
742 			elem = *objp++;
743 			if (!IS_CONST(elem)) {
744 			    const_flag = 0;
745 			}
746 			*htop++ = elem;
747 		    }
748 		    if (const_flag) {
749 			const_tuple = tp; /* this is the latest const_tuple */
750 		    }
751 		}
752 		break;
753 	    case REFC_BINARY_SUBTAG:
754 		{
755 		    ProcBin* pb;
756 
757 		    pb = (ProcBin *) objp;
758 		    if (pb->flags) {
759 			erts_emasculate_writable_binary(pb);
760 		    }
761 		    i = thing_arityval(*objp) + 1;
762 		    hbot -= i;
763 		    tp = hbot;
764 		    while (i--)  {
765 			*tp++ = *objp++;
766 		    }
767 		    *argp = make_binary(hbot);
768 		    pb = (ProcBin*) hbot;
769 		    erts_refc_inc(&pb->val->intern.refc, 2);
770 		    pb->next = off_heap->first;
771 		    pb->flags = 0;
772 		    off_heap->first = (struct erl_off_heap_header*) pb;
773 		    OH_OVERHEAD(off_heap, pb->size / sizeof(Eterm));
774 		}
775 		break;
776 	    case SUB_BINARY_SUBTAG:
777 		{
778 		    ErlSubBin* sb = (ErlSubBin *) objp;
779 		    Eterm real_bin = sb->orig;
780 		    Uint bit_offset = sb->bitoffs;
781 		    Uint bit_size = sb -> bitsize;
782 		    Uint offset = sb->offs;
783 		    size_t size = sb->size;
784 		    Uint extra_bytes;
785 		    Uint real_size;
786 		    if ((bit_size + bit_offset) > 8) {
787 			extra_bytes = 2;
788 		    } else if ((bit_size + bit_offset) > 0) {
789 			extra_bytes = 1;
790 		    } else {
791 			extra_bytes = 0;
792 		    }
793 		    real_size = size+extra_bytes;
794 		    objp = binary_val(real_bin);
795 		    if (thing_subtag(*objp) == HEAP_BINARY_SUBTAG) {
796 			ErlHeapBin* from = (ErlHeapBin *) objp;
797 			ErlHeapBin* to;
798 			i = heap_bin_size(real_size);
799 			hbot -= i;
800 			to = (ErlHeapBin *) hbot;
801 			to->thing_word = header_heap_bin(real_size);
802 			to->size = real_size;
803 			sys_memcpy(to->data, ((byte *)from->data)+offset, real_size);
804 		    } else {
805 			ProcBin* from = (ProcBin *) objp;
806 			ProcBin* to;
807 
808 			ASSERT(thing_subtag(*objp) == REFC_BINARY_SUBTAG);
809 			if (from->flags) {
810 			    erts_emasculate_writable_binary(from);
811 			}
812 			hbot -= PROC_BIN_SIZE;
813 			to = (ProcBin *) hbot;
814 			to->thing_word = HEADER_PROC_BIN;
815 			to->size = real_size;
816 			to->val = from->val;
817 			erts_refc_inc(&to->val->intern.refc, 2);
818 			to->bytes = from->bytes + offset;
819 			to->next = off_heap->first;
820 			to->flags = 0;
821 			off_heap->first = (struct erl_off_heap_header*) to;
822 			OH_OVERHEAD(off_heap, to->size / sizeof(Eterm));
823 		    }
824 		    *argp = make_binary(hbot);
825 		    if (extra_bytes != 0) {
826 			ErlSubBin* res;
827 			hbot -= ERL_SUB_BIN_SIZE;
828 			res = (ErlSubBin *) hbot;
829 			res->thing_word = HEADER_SUB_BIN;
830 			res->size = size;
831 			res->bitsize = bit_size;
832 			res->bitoffs = bit_offset;
833 			res->offs = 0;
834 			res->is_writable = 0;
835 			res->orig = *argp;
836 			*argp = make_binary(hbot);
837 		    }
838 		    break;
839 		}
840 		break;
841 	    case FUN_SUBTAG:
842 		{
843 		    ErlFunThing* funp = (ErlFunThing *) objp;
844 
845 		    i =  thing_arityval(hdr) + 2 + funp->num_free;
846 		    tp = htop;
847 		    while (i--)  {
848 			*htop++ = *objp++;
849 		    }
850 		    funp = (ErlFunThing *) tp;
851 		    funp->next = off_heap->first;
852 		    off_heap->first = (struct erl_off_heap_header*) funp;
853 		    erts_refc_inc(&funp->fe->refc, 2);
854 		    *argp = make_fun(tp);
855 		}
856 		break;
857 	    case EXTERNAL_PID_SUBTAG:
858 	    case EXTERNAL_PORT_SUBTAG:
859 	    case EXTERNAL_REF_SUBTAG:
860 		{
861 		  ExternalThing *etp = (ExternalThing *) objp;
862 #if defined(ERTS_COPY_REGISTER_LOCATION) && defined(ERL_NODE_BOOKKEEP)
863 		  erts_ref_node_entry__(etp->node, 2, make_boxed(htop), file, line);
864 #else
865 		  erts_ref_node_entry(etp->node, 2, make_boxed(htop));
866 #endif
867 		}
868 	    L_off_heap_node_container_common:
869 		{
870 		  struct erl_off_heap_header *ohhp;
871 		  ohhp = (struct erl_off_heap_header *) htop;
872 		  i  = thing_arityval(hdr) + 1;
873 		  *argp = make_boxed(htop);
874 		  tp = htop;
875 
876 		  while (i--)  {
877 		    *htop++ = *objp++;
878 		  }
879 
880 		  ohhp->next = off_heap->first;
881 		  off_heap->first = ohhp;
882 
883 		}
884 		break;
885 	    case MAP_SUBTAG:
886 		tp = htop;
887 		switch (MAP_HEADER_TYPE(hdr)) {
888 		    case MAP_HEADER_TAG_FLATMAP_HEAD :
889                         i = flatmap_get_size(objp) + 3;
890                         *argp = make_flatmap(htop);
891                         while (i--) {
892                             *htop++ = *objp++;
893                         }
894 			break;
895 		    case MAP_HEADER_TAG_HAMT_HEAD_BITMAP :
896 		    case MAP_HEADER_TAG_HAMT_HEAD_ARRAY :
897 			*htop++ = *objp++;
898 		    case MAP_HEADER_TAG_HAMT_NODE_BITMAP :
899 			i = 1 + hashmap_bitcount(MAP_HEADER_VAL(hdr));
900 			while (i--)  { *htop++ = *objp++; }
901 			*argp = make_hashmap(tp);
902 			break;
903 		    default:
904 			erts_exit(ERTS_ABORT_EXIT, "copy_struct: bad hashmap type %d\n", MAP_HEADER_TYPE(hdr));
905 		}
906 		break;
907 	    case BIN_MATCHSTATE_SUBTAG:
908 		erts_exit(ERTS_ABORT_EXIT,
909 			 "copy_struct: matchstate term not allowed");
910 	    case REF_SUBTAG:
911 		if (is_magic_ref_thing(objp)) {
912 		    ErtsMRefThing *mreft = (ErtsMRefThing *) objp;
913 		    erts_refc_inc(&mreft->mb->intern.refc, 2);
914 		    goto L_off_heap_node_container_common;
915 		}
916 		/* Fall through... */
917 	    default:
918 		i = thing_arityval(hdr)+1;
919 		hbot -= i;
920 		tp = hbot;
921 		*argp = make_boxed(hbot);
922 		while (i--) {
923 		    *tp++ = *objp++;
924 		}
925 	    }
926 	    break;
927 	case TAG_PRIMARY_HEADER:
928 	    if (header_is_thing(obj) || hp == const_tuple) {
929 		hp += header_arity(obj) + 1;
930 	    } else {
931 		hp++;
932 	    }
933 	    break;
934 	}
935     }
936 
937     if (bsz) {
938         *hpp = htop;
939         *bsz = hend - hbot;
940     } else {
941 #ifdef DEBUG
942         if (!eq(org_obj, res)) {
943             erts_exit(ERTS_ABORT_EXIT,
944                     "Internal error in copy_struct() when copying %T:"
945                     " not equal to copy %T\n",
946                     org_obj, res);
947         }
948         if (htop != hbot)
949             erts_exit(ERTS_ABORT_EXIT,
950                     "Internal error in copy_struct() when copying %T:"
951                     " htop=%p != hbot=%p (sz=%beu)\n",
952                     org_obj, htop, hbot, org_sz);
953 #else
954         if (htop > hbot) {
955             erts_exit(ERTS_ABORT_EXIT,
956                     "Internal error in copy_struct(): htop, hbot overrun\n");
957         }
958 #endif
959         *hpp = (Eterm *) (hstart+hsize);
960     }
961     VERBOSE(DEBUG_SHCOPY, ("[pid=%T] result is at %p\n", mypid, res));
962     return res;
963 }
964 
965 
966 /*
967  *  Machinery for the table used by the sharing preserving copier
968  *  Using an ESTACK but not very transparently; consider refactoring
969  */
970 
971 #define DECLARE_SHTABLE(s)					\
972     DECLARE_ESTACK(s);						\
973     Uint ESTK_CONCAT(s,_offset) = 0
974 #define DESTROY_SHTABLE(s) DESTROY_ESTACK(s)
975 #define SHTABLE_INCR 4
976 #define SHTABLE_NEXT(s)	ESTK_CONCAT(s,_offset)
977 #define SHTABLE_PUSH(s,x,y,b)					\
978 do {								\
979     if (s.sp > s.end - SHTABLE_INCR) {				\
980         erl_grow_estack(&(s), SHTABLE_INCR);	       		\
981     }								\
982     *s.sp++ = (x);						\
983     *s.sp++ = (y);						\
984     *s.sp++ = (Eterm) NULL;					\
985     *s.sp++ = (Eterm) (b);		                	\
986     ESTK_CONCAT(s,_offset) += SHTABLE_INCR;			\
987 } while(0)
988 #define SHTABLE_X(s,e) (s.start[e])
989 #define SHTABLE_Y(s,e) (s.start[(e)+1])
990 #define SHTABLE_FWD(s,e) ((Eterm *) (s.start[(e)+2]))
991 #define SHTABLE_FWD_UPD(s,e,p) (s.start[(e)+2] = (Eterm) (p))
992 #define SHTABLE_REV(s,e) ((Eterm *) (s.start[(e)+3]))
993 
994 #define LIST_SHARED_UNPROCESSED ((Eterm) 0)
995 #define LIST_SHARED_PROCESSED	((Eterm) 1)
996 
997 #define HEAP_ELEM_TO_BE_FILLED	_unchecked_make_list(NULL)
998 
999 
1000 /*
1001  *  Specialized macros for using/reusing the persistent state
1002  */
1003 
1004 #define DECLARE_EQUEUE_INIT_INFO(q, info)		\
1005     UWord* EQUE_DEF_QUEUE(q) = info->queue_default;	\
1006     ErtsEQueue q = {					\
1007         EQUE_DEF_QUEUE(q), /* start */			\
1008         EQUE_DEF_QUEUE(q), /* front */			\
1009         EQUE_DEF_QUEUE(q), /* back */			\
1010         1,                 /* possibly_empty */		\
1011         EQUE_DEF_QUEUE(q) + DEF_EQUEUE_SIZE, /* end */	\
1012         ERTS_ALC_T_ESTACK  /* alloc_type */		\
1013     }
1014 
1015 #define DECLARE_EQUEUE_FROM_INFO(q, info)		\
1016     /* no EQUE_DEF_QUEUE(q), read-only */		\
1017     ErtsEQueue q = {					\
1018         info->queue_start,      /* start */		\
1019         info->queue_start,      /* front */		\
1020         info->queue_start,      /* back */		\
1021         1,                      /* possibly_empty */	\
1022         info->queue_end,        /* end */		\
1023         info->queue_alloc_type  /* alloc_type */	\
1024     }
1025 
1026 #define DECLARE_BITSTORE_INIT_INFO(s, info)		\
1027     UWord* WSTK_DEF_STACK(s) = info->bitstore_default;	\
1028     ErtsWStack s = {					\
1029         WSTK_DEF_STACK(s),  /* wstart */		\
1030         WSTK_DEF_STACK(s),  /* wsp */			\
1031         WSTK_DEF_STACK(s) + DEF_WSTACK_SIZE, /* wend */	\
1032         WSTK_DEF_STACK(s),  /* wdflt */ 		\
1033         ERTS_ALC_T_ESTACK /* alloc_type */		\
1034     };							\
1035     int WSTK_CONCAT(s,_bitoffs) = 0;			\
1036     /* no WSTK_CONCAT(s,_offset), write-only */		\
1037     UWord WSTK_CONCAT(s,_buffer) = 0
1038 
1039 #define DECLARE_BITSTORE_FROM_INFO(s, info)		\
1040     /* no WSTK_DEF_STACK(s), read-only */		\
1041     ErtsWStack s = {					\
1042         info->bitstore_start,  /* wstart */		\
1043         NULL,                  /* wsp,  read-only */	\
1044         NULL,                  /* wend, read-only */	\
1045         NULL,                  /* wdef, read-only */	\
1046         info->bitstore_alloc_type /* alloc_type */	\
1047     };							\
1048     int WSTK_CONCAT(s,_bitoffs) = 0;			\
1049     int WSTK_CONCAT(s,_offset) = 0;			\
1050     UWord WSTK_CONCAT(s,_buffer) = 0
1051 
1052 #define DECLARE_SHTABLE_INIT_INFO(s, info)		\
1053     Eterm* ESTK_DEF_STACK(s) = info->shtable_default;	\
1054     ErtsEStack s = {					\
1055         ESTK_DEF_STACK(s),  /* start */			\
1056         ESTK_DEF_STACK(s),  /* sp */			\
1057         ESTK_DEF_STACK(s) + DEF_ESTACK_SIZE, /* end */	\
1058         ESTK_DEF_STACK(s),  /* default */		\
1059         ERTS_ALC_T_ESTACK /* alloc_type */		\
1060     };							\
1061     Uint ESTK_CONCAT(s,_offset) = 0
1062 
1063 #define DECLARE_SHTABLE_FROM_INFO(s, info)		\
1064     /* no ESTK_DEF_STACK(s), read-only */		\
1065     ErtsEStack s = {					\
1066         info->shtable_start,     /* start */		\
1067         NULL,                    /* sp,  read-only */	\
1068         NULL,                    /* end, read-only */	\
1069         NULL,                    /* def, read-only */	\
1070         info->shtable_alloc_type /* alloc_type */	\
1071     };							\
1072     /* no ESTK_CONCAT(s,_offset), read-only */
1073 
1074 /*
1075  *  Copy object "obj" preserving sharing.
1076  *  First half: count size and calculate sharing.
1077  */
copy_shared_calculate(Eterm obj,erts_shcopy_t * info)1078 Uint copy_shared_calculate(Eterm obj, erts_shcopy_t *info)
1079 {
1080     Uint sum;
1081     Uint e;
1082     unsigned sz;
1083     Eterm* ptr;
1084     Eterm *lit_purge_ptr = info->lit_purge_ptr;
1085     Uint lit_purge_sz = info->lit_purge_sz;
1086     int copy_literals = info->copy_literals;
1087 #ifdef DEBUG
1088     Eterm mypid = erts_get_current_pid();
1089 #endif
1090 
1091     DECLARE_EQUEUE_INIT_INFO(s, info);
1092     DECLARE_BITSTORE_INIT_INFO(b, info);
1093     DECLARE_SHTABLE_INIT_INFO(t, info);
1094 
1095     /* step #0:
1096        -------------------------------------------------------
1097        get rid of the easy cases first:
1098        - copying constants
1099        - if not a proper process, do flat copy
1100     */
1101 
1102     if (IS_CONST(obj))
1103 	return 0;
1104 
1105     VERBOSE(DEBUG_SHCOPY, ("[pid=%T] copy_shared_calculate %p\n", mypid, obj));
1106     VERBOSE(DEBUG_SHCOPY, ("[pid=%T] message is %T\n", mypid, obj));
1107 
1108     /* step #1:
1109        -------------------------------------------------------
1110        traverse the term and calculate the size;
1111        when traversing, transform as you do in size_shared
1112        but when you find shared objects:
1113 
1114        a. add entry in the table, indexed by i
1115        b. mark them:
1116 	  b1. boxed terms, set header to (i | 11)
1117 	      store (old header, NONV, NULL, backptr) in the entry
1118 	  b2. cons cells, set CDR to NONV, set CAR to i
1119 	      store (old CAR, old CDR, NULL, backptr) in the entry
1120     */
1121 
1122     sum = 0;
1123 
1124     for (;;) {
1125 	switch (primary_tag(obj)) {
1126 	case TAG_PRIMARY_LIST: {
1127 	    Eterm head, tail;
1128 	    ptr = list_val(obj);
1129 	    /* off heap list pointers are copied verbatim */
1130 	    if (erts_is_literal(obj,ptr)) {
1131 		VERBOSE(DEBUG_SHCOPY, ("[pid=%T] bypassed copying %p is %T\n", mypid, ptr, obj));
1132                 if (copy_literals || in_literal_purge_area(ptr))
1133                     info->literal_size += size_object(obj);
1134 		goto pop_next;
1135 	    }
1136 	    head = CAR(ptr);
1137 	    tail = CDR(ptr);
1138 	    /* if it's visited, don't count it;
1139 	       if not already shared, make it shared and store it in the table */
1140 	    if (primary_tag(tail) == TAG_PRIMARY_HEADER ||
1141 		primary_tag(head) == TAG_PRIMARY_HEADER) {
1142 		if (tail != THE_NON_VALUE) {
1143 		    e = SHTABLE_NEXT(t);
1144 		    VERBOSE(DEBUG_SHCOPY, ("[pid=%T] tabling L %p\n", mypid, ptr));
1145 		    SHTABLE_PUSH(t, head, tail, ptr);
1146 		    CAR(ptr) = (e << _TAG_PRIMARY_SIZE) | LIST_SHARED_UNPROCESSED;
1147 		    CDR(ptr) = THE_NON_VALUE;
1148 		}
1149 		goto pop_next;
1150 	    }
1151 	    /* else make it visited now */
1152 	    switch (primary_tag(tail)) {
1153 	    case TAG_PRIMARY_LIST:
1154 		VERBOSE(DEBUG_SHCOPY, ("[pid=%T] mangling L/L %p\n", mypid, ptr));
1155 		CDR(ptr) = (tail - TAG_PRIMARY_LIST) | TAG_PRIMARY_HEADER;
1156 		break;
1157 	    case TAG_PRIMARY_IMMED1:
1158 		VERBOSE(DEBUG_SHCOPY, ("[pid=%T] mangling L/I %p\n", mypid, ptr));
1159 		CAR(ptr) = (head - primary_tag(head)) | TAG_PRIMARY_HEADER;
1160 		CDR(ptr) = (tail - TAG_PRIMARY_IMMED1) | primary_tag(head);
1161 		break;
1162 	    case TAG_PRIMARY_BOXED:
1163 		BITSTORE_PUT(b, primary_tag(head));
1164 		VERBOSE(DEBUG_SHCOPY, ("[pid=%T] mangling L/B %p\n", mypid, ptr));
1165 		CAR(ptr) = (head - primary_tag(head)) | TAG_PRIMARY_HEADER;
1166 		CDR(ptr) = (tail - TAG_PRIMARY_BOXED) | TAG_PRIMARY_HEADER;
1167 		break;
1168 	    }
1169 	    /* and count it */
1170 	    sum += 2;
1171 	    if (!IS_CONST(head)) {
1172 		EQUEUE_PUT(s, head);
1173 	    }
1174 	    obj = tail;
1175 	    break;
1176 	}
1177 	case TAG_PRIMARY_BOXED: {
1178 	    Eterm hdr;
1179 	    ptr = boxed_val(obj);
1180 	    /* off heap pointers to boxes are copied verbatim */
1181 	    if (erts_is_literal(obj,ptr)) {
1182 		VERBOSE(DEBUG_SHCOPY, ("[pid=%T] bypassed copying %p is %T\n", mypid, ptr, obj));
1183                 if (copy_literals || in_literal_purge_area(ptr))
1184                     info->literal_size += size_object(obj);
1185 		goto pop_next;
1186 	    }
1187 	    hdr = *ptr;
1188 	    /* if it's visited, don't count it;
1189 	       if not already shared, make it shared and store it in the table */
1190 	    if (primary_tag(hdr) != TAG_PRIMARY_HEADER) {
1191 		if (primary_tag(hdr) == BOXED_VISITED) {
1192 		    e = SHTABLE_NEXT(t);
1193 		    VERBOSE(DEBUG_SHCOPY, ("[pid=%T] tabling B %p\n", mypid, ptr));
1194 		    SHTABLE_PUSH(t, hdr, THE_NON_VALUE, ptr);
1195 		    *ptr = (e << _TAG_PRIMARY_SIZE) | BOXED_SHARED_UNPROCESSED;
1196 		}
1197 		goto pop_next;
1198 	    }
1199 	    /* else make it visited now */
1200 	    VERBOSE(DEBUG_SHCOPY, ("[pid=%T] mangling B %p\n", mypid, ptr));
1201 	    *ptr = (hdr - primary_tag(hdr)) + BOXED_VISITED;
1202 	    /* and count it */
1203 	    ASSERT(is_header(hdr));
1204 	    switch (hdr & _TAG_HEADER_MASK) {
1205 	    case ARITYVAL_SUBTAG: {
1206 		int arity = header_arity(hdr);
1207 		sum += arity + 1;
1208 		if (arity == 0) { /* Empty tuple -- unusual. */
1209 		    goto pop_next;
1210 		}
1211 		while (arity-- > 0) {
1212 		    obj = *++ptr;
1213 		    if (!IS_CONST(obj)) {
1214 			EQUEUE_PUT(s, obj);
1215 		    }
1216 		}
1217 		goto pop_next;
1218 	    }
1219 	    case FUN_SUBTAG: {
1220 		ErlFunThing* funp = (ErlFunThing *) ptr;
1221 		unsigned eterms = 1 /* creator */ + funp->num_free;
1222 		sz = thing_arityval(hdr);
1223 		sum += 1 /* header */ + sz + eterms;
1224 		ptr += 1 /* header */ + sz;
1225 		while (eterms-- > 0) {
1226 		    obj = *ptr++;
1227 		    if (!IS_CONST(obj)) {
1228 			EQUEUE_PUT(s, obj);
1229 		    }
1230 		}
1231 		goto pop_next;
1232 	    }
1233 	    case SUB_BINARY_SUBTAG: {
1234 		ErlSubBin* sb = (ErlSubBin *) ptr;
1235 		Eterm real_bin = sb->orig;
1236 		Uint bit_offset = sb->bitoffs;
1237 		Uint bit_size = sb->bitsize;
1238 		size_t size = sb->size;
1239 		Uint extra_bytes;
1240 		Eterm hdr;
1241 		if (bit_size + bit_offset > 8) {
1242 		    sum += ERL_SUB_BIN_SIZE;
1243 		    extra_bytes = 2;
1244 		} else if (bit_size + bit_offset > 0) {
1245 		    sum += ERL_SUB_BIN_SIZE;
1246 		    extra_bytes = 1;
1247 		} else {
1248 		    extra_bytes = 0;
1249 		}
1250                 ASSERT(is_boxed(real_bin));
1251                 hdr = *_unchecked_binary_val(real_bin);
1252                 switch (primary_tag(hdr)) {
1253                 case TAG_PRIMARY_HEADER:
1254                     /* real_bin is untouched, only referred by sub-bins so far */
1255                     break;
1256                 case BOXED_VISITED:
1257                     /* real_bin referred directly once so far */
1258                     hdr = (hdr - BOXED_VISITED) + TAG_PRIMARY_HEADER;
1259                     break;
1260                 case BOXED_SHARED_PROCESSED:
1261                 case BOXED_SHARED_UNPROCESSED:
1262                     /* real_bin referred directly more than once */
1263                     e = hdr >> _TAG_PRIMARY_SIZE;
1264                     hdr = SHTABLE_X(t, e);
1265                     hdr = (hdr & ~BOXED_VISITED_MASK) + TAG_PRIMARY_HEADER;
1266                     break;
1267                 }
1268 
1269 		if (thing_subtag(hdr) == HEAP_BINARY_SUBTAG) {
1270 		    sum += heap_bin_size(size+extra_bytes);
1271 		} else {
1272 		    ASSERT(thing_subtag(hdr) == REFC_BINARY_SUBTAG);
1273 		    sum += PROC_BIN_SIZE;
1274 		}
1275 		goto pop_next;
1276 	    }
1277             case MAP_SUBTAG:
1278                 switch (MAP_HEADER_TYPE(hdr)) {
1279                     case MAP_HEADER_TAG_FLATMAP_HEAD : {
1280                         flatmap_t *mp = (flatmap_t *) ptr;
1281                         Uint n = flatmap_get_size(mp) + 1;
1282                         sum += n + 2;
1283                         ptr += 2; /* hdr + size words */
1284                         while (n--) {
1285                             obj = *ptr++;
1286                             if (!IS_CONST(obj)) {
1287                                 EQUEUE_PUT(s, obj);
1288                             }
1289                         }
1290                         goto pop_next;
1291                     }
1292                     case MAP_HEADER_TAG_HAMT_HEAD_BITMAP :
1293                     case MAP_HEADER_TAG_HAMT_HEAD_ARRAY :
1294                     case MAP_HEADER_TAG_HAMT_NODE_BITMAP : {
1295                         Uint n = hashmap_bitcount(MAP_HEADER_VAL(hdr));
1296                         sum += 1 + n + header_arity(hdr);
1297                         ptr += 1 + header_arity(hdr);
1298 
1299                         if (n == 0) {
1300                             goto pop_next;
1301                         }
1302                         while(n--) {
1303                             obj = *ptr++;
1304                             if (!IS_CONST(obj)) {
1305                                 EQUEUE_PUT(s, obj);
1306                             }
1307                         }
1308                         goto pop_next;
1309                     }
1310                     default:
1311                         erts_exit(ERTS_ABORT_EXIT, "copy_shared_calculate: bad hashmap type %d\n", MAP_HEADER_TYPE(hdr));
1312                 }
1313             case BIN_MATCHSTATE_SUBTAG:
1314 		erts_exit(ERTS_ABORT_EXIT,
1315 			 "size_shared: matchstate term not allowed");
1316 	    default:
1317 		sum += thing_arityval(hdr) + 1;
1318 		goto pop_next;
1319 	    }
1320 	    break;
1321 	}
1322 	case TAG_PRIMARY_IMMED1:
1323 	pop_next:
1324 	    if (EQUEUE_ISEMPTY(s)) {
1325                 /* add sentinel to the table */
1326                 SHTABLE_PUSH(t, THE_NON_VALUE, THE_NON_VALUE, NULL);
1327                 /* store persistent info */
1328                 BITSTORE_CLOSE(b);
1329                 info->queue_start = s.start;
1330                 info->queue_end = s.end;
1331                 info->queue_alloc_type = s.alloc_type;
1332                 info->bitstore_start = b.wstart;
1333                 info->bitstore_alloc_type = b.alloc_type;
1334                 info->shtable_start = t.start;
1335                 info->shtable_alloc_type = t.alloc_type;
1336                 /* single point of return: the size of the object */
1337                 VERBOSE(DEBUG_SHCOPY, ("[pid=%T] size was: %u\n", mypid, sum));
1338                 return sum + info->literal_size;
1339 	    }
1340 	    obj = EQUEUE_GET(s);
1341 	    break;
1342 	default:
1343 	    erts_exit(ERTS_ABORT_EXIT, "[pid=%T] size_shared: bad tag for %#x\n", obj);
1344 	}
1345     }
1346 }
1347 
1348 /*
1349  *  Copy object "obj" preserving sharing.
1350  *  Second half: copy and restore the object.
1351  */
copy_shared_perform_x(Eterm obj,Uint size,erts_shcopy_t * info,Eterm ** hpp,ErlOffHeap * off_heap,char * file,int line)1352 Uint copy_shared_perform_x(Eterm obj, Uint size, erts_shcopy_t *info,
1353                            Eterm** hpp, ErlOffHeap* off_heap
1354 #ifdef ERTS_COPY_REGISTER_LOCATION
1355                            , char *file, int line
1356 #endif
1357     )
1358 {
1359     Uint e;
1360     unsigned sz;
1361     Eterm* ptr;
1362     Eterm* hp;
1363     Eterm* hscan;
1364     Eterm result;
1365     Eterm* resp;
1366     Eterm *hbot, *hend;
1367     unsigned remaining;
1368     Eterm *lit_purge_ptr = info->lit_purge_ptr;
1369     Uint lit_purge_sz = info->lit_purge_sz;
1370     int copy_literals = info->copy_literals;
1371 #ifdef DEBUG
1372     Eterm mypid = erts_get_current_pid();
1373     Eterm saved_obj = obj;
1374 #endif
1375 
1376     DECLARE_EQUEUE_FROM_INFO(s, info);
1377     DECLARE_BITSTORE_FROM_INFO(b, info);
1378     DECLARE_SHTABLE_FROM_INFO(t, info);
1379 
1380     /* step #0:
1381        -------------------------------------------------------
1382        get rid of the easy cases first:
1383        - copying constants
1384        - if not a proper process, do flat copy
1385     */
1386 
1387     if (IS_CONST(obj))
1388 	return obj;
1389 
1390     VERBOSE(DEBUG_SHCOPY, ("[pid=%T] copy_shared_perform %p\n", mypid, obj));
1391 
1392     /* step #2: was performed before this function was called
1393        -------------------------------------------------------
1394        allocate new space
1395     */
1396 
1397     hscan = hp = *hpp;
1398     hbot  = hend = hp + size;
1399 
1400     /* step #3:
1401        -------------------------------------------------------
1402        traverse the term a second time and when traversing:
1403        a. if the object is marked as shared
1404 	  a1. if the entry contains a forwarding ptr, use that
1405 	  a2. otherwise, copy it to the new space and store the
1406 	      forwarding ptr to the entry
1407       b. otherwise, reverse-transform as you do in size_shared
1408 	 and copy to the new space
1409     */
1410 
1411     resp = &result;
1412     remaining = 0;
1413     for (;;) {
1414 	switch (primary_tag(obj)) {
1415 	case TAG_PRIMARY_LIST: {
1416 	    Eterm head, tail;
1417 	    ptr = list_val(obj);
1418 	    /* off heap list pointers are copied verbatim */
1419 	    if (erts_is_literal(obj,ptr)) {
1420                 if (!(copy_literals || in_literal_purge_area(ptr))) {
1421                     *resp = obj;
1422                 } else {
1423                     Uint bsz = 0;
1424                     *resp = copy_struct_x(obj, hbot - hp, &hp, off_heap, &bsz, NULL
1425 #ifdef ERTS_COPY_REGISTER_LOCATION
1426                                           , file, line
1427 #endif
1428                         ); /* copy literal */
1429                     hbot -= bsz;
1430                 }
1431 		goto cleanup_next;
1432 	    }
1433 	    head = CAR(ptr);
1434 	    tail = CDR(ptr);
1435 	    /* if it is shared */
1436 	    if (tail == THE_NON_VALUE) {
1437 		e = head >> _TAG_PRIMARY_SIZE;
1438 		/* if it has been processed, just use the forwarding pointer */
1439 		if (primary_tag(head) == LIST_SHARED_PROCESSED) {
1440 		    *resp = make_list(SHTABLE_FWD(t, e));
1441 		    goto cleanup_next;
1442 		}
1443 		/* else, let's process it now,
1444 		   copy it and keep the forwarding pointer */
1445 		else {
1446 		    CAR(ptr) = (head - primary_tag(head)) + LIST_SHARED_PROCESSED;
1447 		    head = SHTABLE_X(t, e);
1448 		    tail = SHTABLE_Y(t, e);
1449 		    ptr = &(SHTABLE_X(t, e));
1450 		    VERBOSE(DEBUG_SHCOPY, ("[pid=%T] tabled L %p is %p\n", mypid, ptr, SHTABLE_REV(t, e)));
1451 		    SHTABLE_FWD_UPD(t, e, hp);
1452 		}
1453 	    }
1454 	    /* if not already clean, clean it up and copy it */
1455 	    if (primary_tag(tail) == TAG_PRIMARY_HEADER) {
1456 		if (primary_tag(head) == TAG_PRIMARY_HEADER) {
1457 		    Eterm saved;
1458                     BITSTORE_FETCH(b, saved);
1459 		    VERBOSE(DEBUG_SHCOPY, ("[pid=%T] unmangling L/B %p\n", mypid, ptr));
1460 		    CAR(ptr) = head = (head - TAG_PRIMARY_HEADER) + saved;
1461 		    CDR(ptr) = tail = (tail - TAG_PRIMARY_HEADER) + TAG_PRIMARY_BOXED;
1462 		} else {
1463 		    VERBOSE(DEBUG_SHCOPY, ("[pid=%T] unmangling L/L %p\n", mypid, ptr));
1464 		    CDR(ptr) = tail = (tail - TAG_PRIMARY_HEADER) + TAG_PRIMARY_LIST;
1465 		}
1466 	    } else if (primary_tag(head) == TAG_PRIMARY_HEADER) {
1467 		VERBOSE(DEBUG_SHCOPY, ("[pid=%T] unmangling L/I %p\n", mypid, ptr));
1468 		CAR(ptr) = head = (head - TAG_PRIMARY_HEADER) | primary_tag(tail);
1469 		CDR(ptr) = tail = (tail - primary_tag(tail)) | TAG_PRIMARY_IMMED1;
1470 	    } else {
1471 		ASSERT(0 && "cannot come here");
1472 		goto cleanup_next;
1473 	    }
1474 	    /* and its children too */
1475 	    if (IS_CONST(head)) {
1476 		CAR(hp) = head;
1477 	    } else {
1478 		EQUEUE_PUT_UNCHECKED(s, head);
1479 		CAR(hp) = HEAP_ELEM_TO_BE_FILLED;
1480 	    }
1481 	    *resp = make_list(hp);
1482 	    resp = &(CDR(hp));
1483 	    hp += 2;
1484 	    obj = tail;
1485 	    break;
1486 	}
1487 	case TAG_PRIMARY_BOXED: {
1488 	    Eterm hdr;
1489 	    ptr = boxed_val(obj);
1490 	    /* off heap pointers to boxes are copied verbatim */
1491 	    if (erts_is_literal(obj,ptr)) {
1492                 if (!(copy_literals || in_literal_purge_area(ptr))) {
1493                     *resp = obj;
1494                 } else {
1495                     Uint bsz = 0;
1496                     *resp = copy_struct_x(obj, hbot - hp, &hp, off_heap, &bsz, NULL
1497 #ifdef ERTS_COPY_REGISTER_LOCATION
1498                                           , file, line
1499 #endif
1500                         ); /* copy literal */
1501                     hbot -= bsz;
1502                 }
1503 		goto cleanup_next;
1504 	    }
1505 	    hdr = *ptr;
1506 	    /* clean it up, unless it's already clean or shared and processed */
1507 	    switch (primary_tag(hdr)) {
1508 	    case TAG_PRIMARY_HEADER:
1509 		ASSERT(0 && "cannot come here");
1510 	    /* if it is shared and has been processed,
1511 	       just use the forwarding pointer */
1512 	    case BOXED_SHARED_PROCESSED:
1513 		e = hdr >> _TAG_PRIMARY_SIZE;
1514 		*resp = make_boxed(SHTABLE_FWD(t, e));
1515 		goto cleanup_next;
1516 	    /* if it is shared but has not been processed yet, let's process
1517 	       it now: copy it and keep the forwarding pointer */
1518 	    case BOXED_SHARED_UNPROCESSED:
1519 		e = hdr >> _TAG_PRIMARY_SIZE;
1520 		*ptr = (hdr - primary_tag(hdr)) + BOXED_SHARED_PROCESSED;
1521 		hdr = SHTABLE_X(t, e);
1522 		ASSERT(primary_tag(hdr) == BOXED_VISITED);
1523 		VERBOSE(DEBUG_SHCOPY, ("[pid=%T] tabled B %p is %p\n", mypid, ptr, SHTABLE_REV(t, e)));
1524 		VERBOSE(DEBUG_SHCOPY, ("[pid=%T] unmangling B %p\n", mypid, ptr));
1525 		SHTABLE_X(t, e) = hdr = (hdr - BOXED_VISITED) + TAG_PRIMARY_HEADER;
1526 		SHTABLE_FWD_UPD(t, e, hp);
1527 		break;
1528 	    case BOXED_VISITED:
1529 		VERBOSE(DEBUG_SHCOPY, ("[pid=%T] unmangling B %p\n", mypid, ptr));
1530 		*ptr = hdr = (hdr - BOXED_VISITED) + TAG_PRIMARY_HEADER;
1531 		break;
1532 	    }
1533 	    /* and its children too */
1534 	    switch (hdr & _TAG_HEADER_MASK) {
1535 	    case ARITYVAL_SUBTAG: {
1536 		int arity = header_arity(hdr);
1537 		*resp = make_boxed(hp);
1538 		*hp++ = hdr;
1539 		while (arity-- > 0) {
1540 		    obj = *++ptr;
1541 		    if (IS_CONST(obj)) {
1542 			*hp++ = obj;
1543 		    } else {
1544 			EQUEUE_PUT_UNCHECKED(s, obj);
1545 			*hp++ = HEAP_ELEM_TO_BE_FILLED;
1546 		    }
1547 		}
1548 		goto cleanup_next;
1549 	    }
1550 	    case FUN_SUBTAG: {
1551 		ErlFunThing* funp = (ErlFunThing *) ptr;
1552 		unsigned eterms = 1 /* creator */ + funp->num_free;
1553 		sz = thing_arityval(hdr);
1554 		funp = (ErlFunThing *) hp;
1555 		*resp = make_fun(hp);
1556 		*hp++ = hdr;
1557 		ptr++;
1558 		while (sz-- > 0) {
1559 		    *hp++ = *ptr++;
1560 		}
1561 		while (eterms-- > 0) {
1562 		    obj = *ptr++;
1563 		    if (IS_CONST(obj)) {
1564 			*hp++ = obj;
1565 		    } else {
1566 			EQUEUE_PUT_UNCHECKED(s, obj);
1567 			*hp++ = HEAP_ELEM_TO_BE_FILLED;
1568 		    }
1569 		}
1570 		funp->next = off_heap->first;
1571 		off_heap->first = (struct erl_off_heap_header*) funp;
1572 		erts_refc_inc(&funp->fe->refc, 2);
1573 		goto cleanup_next;
1574 	    }
1575 	    case MAP_SUBTAG:
1576                 *resp  = make_flatmap(hp);
1577                 *hp++  = hdr;
1578                 switch (MAP_HEADER_TYPE(hdr)) {
1579                     case MAP_HEADER_TAG_FLATMAP_HEAD : {
1580                         flatmap_t *mp = (flatmap_t *) ptr;
1581                         Uint n = flatmap_get_size(mp) + 1;
1582                         *hp++  = *++ptr; /* keys */
1583                         while (n--) {
1584                             obj = *++ptr;
1585                             if (IS_CONST(obj)) {
1586                                 *hp++ = obj;
1587                             } else {
1588                                 EQUEUE_PUT_UNCHECKED(s, obj);
1589                                 *hp++ = HEAP_ELEM_TO_BE_FILLED;
1590                             }
1591                         }
1592                         goto cleanup_next;
1593                     }
1594                     case MAP_HEADER_TAG_HAMT_HEAD_BITMAP :
1595                     case MAP_HEADER_TAG_HAMT_HEAD_ARRAY :
1596 			*hp++ = *++ptr; /* total map size */
1597                     case MAP_HEADER_TAG_HAMT_NODE_BITMAP : {
1598                          Uint n = hashmap_bitcount(MAP_HEADER_VAL(hdr));
1599                          while (n--)  {
1600                              obj = *++ptr;
1601                              if (IS_CONST(obj)) {
1602                                  *hp++ = obj;
1603                              } else {
1604                                  EQUEUE_PUT_UNCHECKED(s, obj);
1605                                  *hp++ = HEAP_ELEM_TO_BE_FILLED;
1606                              }
1607                          }
1608                         goto cleanup_next;
1609                     }
1610                     default:
1611                         erts_exit(ERTS_ABORT_EXIT, "copy_shared_perform: bad hashmap type %d\n", MAP_HEADER_TYPE(hdr));
1612                 }
1613 	    case REFC_BINARY_SUBTAG: {
1614 		ProcBin* pb = (ProcBin *) ptr;
1615 		sz = thing_arityval(hdr);
1616 		if (pb->flags) {
1617 		    erts_emasculate_writable_binary(pb);
1618 		}
1619 		pb = (ProcBin *) hp;
1620 		*resp = make_binary(hp);
1621 		*hp++ = hdr;
1622 		ptr++;
1623 		while (sz-- > 0) {
1624 		    *hp++ = *ptr++;
1625 		}
1626 		erts_refc_inc(&pb->val->intern.refc, 2);
1627 		pb->next = off_heap->first;
1628 		pb->flags = 0;
1629 		off_heap->first = (struct erl_off_heap_header*) pb;
1630 		OH_OVERHEAD(off_heap, pb->size / sizeof(Eterm));
1631 		goto cleanup_next;
1632 	    }
1633 	    case SUB_BINARY_SUBTAG: {
1634 		ErlSubBin* sb = (ErlSubBin *) ptr;
1635 		Eterm real_bin = sb->orig;
1636 		Uint bit_offset = sb->bitoffs;
1637 		Uint bit_size = sb->bitsize;
1638 		Uint offset = sb->offs;
1639 		size_t size = sb->size;
1640 		Uint extra_bytes;
1641 		Uint real_size;
1642 		if ((bit_size + bit_offset) > 8) {
1643 		    extra_bytes = 2;
1644 		} else if ((bit_size + bit_offset) > 0) {
1645 		    extra_bytes = 1;
1646 		} else {
1647 		    extra_bytes = 0;
1648 		}
1649 		real_size = size+extra_bytes;
1650 		*resp = make_binary(hp);
1651 		if (extra_bytes != 0) {
1652 		    ErlSubBin* res = (ErlSubBin *) hp;
1653 		    hp += ERL_SUB_BIN_SIZE;
1654 		    res->thing_word = HEADER_SUB_BIN;
1655 		    res->size = size;
1656 		    res->bitsize = bit_size;
1657 		    res->bitoffs = bit_offset;
1658 		    res->offs = 0;
1659 		    res->is_writable = 0;
1660 		    res->orig = make_binary(hp);
1661 		}
1662                 ASSERT(is_boxed(real_bin));
1663                 ptr = _unchecked_binary_val(real_bin);
1664                 hdr = *ptr;
1665                 switch (primary_tag(hdr)) {
1666                 case TAG_PRIMARY_HEADER:
1667                     /* real_bin is untouched, ie only referred by sub-bins */
1668                     break;
1669                 case BOXED_VISITED:
1670                     /* real_bin referred directly once */
1671                     hdr = (hdr - BOXED_VISITED) + TAG_PRIMARY_HEADER;
1672                     break;
1673                 case BOXED_SHARED_PROCESSED:
1674                 case BOXED_SHARED_UNPROCESSED:
1675                     /* real_bin referred directly more than once */
1676                     e = hdr >> _TAG_PRIMARY_SIZE;
1677                     hdr = SHTABLE_X(t, e);
1678                     hdr = (hdr & ~BOXED_VISITED_MASK) + TAG_PRIMARY_HEADER;
1679                     break;
1680                 }
1681 		if (thing_subtag(hdr) == HEAP_BINARY_SUBTAG) {
1682 		    ErlHeapBin* from = (ErlHeapBin *) ptr;
1683 		    ErlHeapBin* to = (ErlHeapBin *) hp;
1684 		    hp += heap_bin_size(real_size);
1685 		    to->thing_word = header_heap_bin(real_size);
1686 		    to->size = real_size;
1687 		    sys_memcpy(to->data, ((byte *)from->data)+offset, real_size);
1688 		} else {
1689 		    ProcBin* from = (ProcBin *) ptr;
1690 		    ProcBin* to = (ProcBin *) hp;
1691 		    ASSERT(thing_subtag(hdr) == REFC_BINARY_SUBTAG);
1692 		    if (from->flags) {
1693 			erts_emasculate_writable_binary(from);
1694 		    }
1695 		    hp += PROC_BIN_SIZE;
1696 		    to->thing_word = HEADER_PROC_BIN;
1697 		    to->size = real_size;
1698 		    to->val = from->val;
1699 		    erts_refc_inc(&to->val->intern.refc, 2);
1700 		    to->bytes = from->bytes + offset;
1701 		    to->next = off_heap->first;
1702 		    to->flags = 0;
1703 		    off_heap->first = (struct erl_off_heap_header*) to;
1704 		    OH_OVERHEAD(off_heap, to->size / sizeof(Eterm));
1705 		}
1706 		goto cleanup_next;
1707 	    }
1708 	    case EXTERNAL_PID_SUBTAG:
1709 	    case EXTERNAL_PORT_SUBTAG:
1710 	    case EXTERNAL_REF_SUBTAG:
1711 	    {
1712 		ExternalThing *etp = (ExternalThing *) ptr;
1713 
1714 #if defined(ERTS_COPY_REGISTER_LOCATION) && defined(ERL_NODE_BOOKKEEP)
1715                 erts_ref_node_entry__(etp->node, 2, make_boxed(hp), file, line);
1716 #else
1717                 erts_ref_node_entry(etp->node, 2, make_boxed(hp));
1718 #endif
1719 	    }
1720 	  off_heap_node_container_common:
1721 	    {
1722 		struct erl_off_heap_header *ohhp;
1723 		ohhp = (struct erl_off_heap_header *) hp;
1724 		sz = thing_arityval(hdr);
1725 		*resp = make_boxed(hp);
1726 		*hp++ = hdr;
1727 		ptr++;
1728 		while (sz-- > 0) {
1729 		    *hp++ = *ptr++;
1730 		}
1731 		ohhp->next = off_heap->first;
1732 		off_heap->first = ohhp;
1733 		goto cleanup_next;
1734 	    }
1735 	    case REF_SUBTAG:
1736 		if (is_magic_ref_thing_with_hdr(ptr,hdr)) {
1737 		    ErtsMRefThing *mreft = (ErtsMRefThing *) ptr;
1738 		    erts_refc_inc(&mreft->mb->intern.refc, 2);
1739 		    goto off_heap_node_container_common;
1740 		}
1741 		/* Fall through... */
1742 	    default:
1743 		sz = thing_arityval(hdr);
1744 		*resp = make_boxed(hp);
1745 		*hp++ = hdr;
1746 		ptr++;
1747 		while (sz-- > 0) {
1748 		    *hp++ = *ptr++;
1749 		}
1750 		goto cleanup_next;
1751 	    }
1752 	    break;
1753 	}
1754 	case TAG_PRIMARY_IMMED1:
1755 	    *resp = obj;
1756 	cleanup_next:
1757 	    if (EQUEUE_ISEMPTY(s)) {
1758 		goto all_clean;
1759 	    }
1760 	    obj = EQUEUE_GET(s);
1761 	    for (;;) {
1762 		ASSERT(hscan < hp);
1763 		if (remaining == 0) {
1764 		    if (*hscan == HEAP_ELEM_TO_BE_FILLED) {
1765 			resp = hscan;
1766 			hscan += 2;
1767 			break; /* scanning loop */
1768 		    } else if (primary_tag(*hscan) == TAG_PRIMARY_HEADER) {
1769 			switch (*hscan & _TAG_HEADER_MASK) {
1770 			case ARITYVAL_SUBTAG:
1771 			    remaining = header_arity(*hscan);
1772 			    hscan++;
1773 			    break;
1774 			case FUN_SUBTAG: {
1775 			    ErlFunThing* funp = (ErlFunThing *) hscan;
1776 			    hscan += 1 + thing_arityval(*hscan);
1777 			    remaining = 1 + funp->num_free;
1778 			    break;
1779 			}
1780 			case MAP_SUBTAG:
1781                             switch (MAP_HEADER_TYPE(*hscan)) {
1782                                 case MAP_HEADER_TAG_FLATMAP_HEAD : {
1783                                     flatmap_t *mp = (flatmap_t *) hscan;
1784                                     remaining = flatmap_get_size(mp) + 1;
1785                                     hscan += 2;
1786                                     break;
1787                                 }
1788                                 case MAP_HEADER_TAG_HAMT_HEAD_BITMAP :
1789                                 case MAP_HEADER_TAG_HAMT_HEAD_ARRAY :
1790                                 case MAP_HEADER_TAG_HAMT_NODE_BITMAP :
1791                                     remaining = hashmap_bitcount(MAP_HEADER_VAL(*hscan));
1792                                     hscan += MAP_HEADER_ARITY(*hscan) + 1;
1793                                     break;
1794                                 default:
1795                                     erts_exit(ERTS_ABORT_EXIT,
1796                                             "copy_shared_perform: bad hashmap type %d\n",
1797                                             MAP_HEADER_TYPE(*hscan));
1798                             }
1799                             break;
1800 			case SUB_BINARY_SUBTAG:
1801 			    ASSERT(((ErlSubBin *) hscan)->bitoffs +
1802 				   ((ErlSubBin *) hscan)->bitsize > 0);
1803 			    hscan += ERL_SUB_BIN_SIZE;
1804 			    break;
1805 			default:
1806 			    hscan += 1 + thing_arityval(*hscan);
1807 			    break;
1808 			}
1809 		    } else {
1810 			hscan++;
1811 		    }
1812 		} else if (*hscan == HEAP_ELEM_TO_BE_FILLED) {
1813 		    resp = hscan++;
1814 		    remaining--;
1815 		    break; /* scanning loop */
1816 		} else {
1817 		    hscan++;
1818 		    remaining--;
1819 		}
1820 	    }
1821 	    ASSERT(resp < hp);
1822 	    break;
1823 	default:
1824 	    erts_exit(ERTS_ABORT_EXIT, "size_shared: bad tag for %#x\n", obj);
1825 	}
1826     }
1827 
1828     /* step #4:
1829        -------------------------------------------------------
1830        traverse the table and reverse-transform all stored entries
1831     */
1832 
1833 all_clean:
1834     for (e = 0; ; e += SHTABLE_INCR) {
1835 	ptr = SHTABLE_REV(t, e);
1836 	if (ptr == NULL)
1837 	    break;
1838 	VERBOSE(DEBUG_SHCOPY, ("[copy] restoring shared: %x\n", ptr));
1839 	/* entry was a list */
1840 	if (SHTABLE_Y(t, e) != THE_NON_VALUE) {
1841 	    VERBOSE(DEBUG_SHCOPY, ("[pid=%T] untabling L %p\n", mypid, ptr));
1842 	    CAR(ptr) = SHTABLE_X(t, e);
1843 	    CDR(ptr) = SHTABLE_Y(t, e);
1844 	}
1845 	/* entry was boxed */
1846 	else {
1847 	    VERBOSE(DEBUG_SHCOPY, ("[pid=%T] untabling B %p\n", mypid, ptr));
1848 	    *ptr = SHTABLE_X(t, e);
1849 	    ASSERT(primary_tag(*ptr) == TAG_PRIMARY_HEADER);
1850 	}
1851     }
1852 
1853 #ifdef DEBUG
1854     if (eq(saved_obj, result) == 0) {
1855 	erts_fprintf(stderr, "original = %T\n", saved_obj);
1856 	erts_fprintf(stderr, "copy = %T\n", result);
1857 	erts_exit(ERTS_ABORT_EXIT, "copy (shared) not equal to source\n");
1858     }
1859 #endif
1860 
1861     VERBOSE(DEBUG_SHCOPY, ("[pid=%T] original was %T\n", mypid, saved_obj));
1862     VERBOSE(DEBUG_SHCOPY, ("[pid=%T] copy is %T\n", mypid, result));
1863     VERBOSE(DEBUG_SHCOPY, ("[pid=%T] result is at %p\n", mypid, result));
1864 
1865     ASSERT(hbot == hp);
1866     ASSERT(size == ((hp - *hpp) + (hend - hbot)));
1867     *hpp = hend;
1868     return result;
1869 }
1870 
1871 
1872 /*
1873  * Copy a term that is guaranteed to be contained in a single
1874  * heap block. The heap block is copied word by word, and any
1875  * pointers are offsetted to point correctly in the new location.
1876  *
1877  * Typically used to copy a term from an ets table.
1878  *
1879  * NOTE: Assumes that term is a tuple (ptr is an untagged tuple ptr).
1880  */
1881 Eterm
copy_shallow_x(Eterm * ERTS_RESTRICT ptr,Uint sz,Eterm ** hpp,ErlOffHeap * off_heap,char * file,int line)1882 copy_shallow_x(Eterm* ERTS_RESTRICT ptr, Uint sz, Eterm** hpp, ErlOffHeap* off_heap
1883 #ifdef ERTS_COPY_REGISTER_LOCATION
1884                , char *file, int line
1885 #endif
1886     )
1887 {
1888     Eterm* tp = ptr;
1889     Eterm* hp = *hpp;
1890     const Eterm res = make_tuple(hp);
1891     const Sint offs = (hp - tp) * sizeof(Eterm);
1892 
1893     while (sz--) {
1894 	Eterm val = *tp++;
1895 
1896 	switch (primary_tag(val)) {
1897 	case TAG_PRIMARY_IMMED1:
1898 	    *hp++ = val;
1899 	    break;
1900 	case TAG_PRIMARY_LIST:
1901 	case TAG_PRIMARY_BOXED:
1902 	    *hp++ = byte_offset_ptr(val, offs);
1903 	    break;
1904 	case TAG_PRIMARY_HEADER:
1905 	    *hp++ = val;
1906 	    switch (val & _HEADER_SUBTAG_MASK) {
1907 	    case ARITYVAL_SUBTAG:
1908 		break;
1909 	    case REFC_BINARY_SUBTAG:
1910 		{
1911 		    ProcBin* pb = (ProcBin *) (tp-1);
1912 		    erts_refc_inc(&pb->val->intern.refc, 2);
1913 		    OH_OVERHEAD(off_heap, pb->size / sizeof(Eterm));
1914 		}
1915 		goto off_heap_common;
1916 
1917 	    case FUN_SUBTAG:
1918 		{
1919 		    ErlFunThing* funp = (ErlFunThing *) (tp-1);
1920 		    erts_refc_inc(&funp->fe->refc, 2);
1921 		}
1922 		goto off_heap_common;
1923 	    case EXTERNAL_PID_SUBTAG:
1924 	    case EXTERNAL_PORT_SUBTAG:
1925 	    case EXTERNAL_REF_SUBTAG:
1926 		{
1927 		    ExternalThing* etp = (ExternalThing *) (tp-1);
1928 #if defined(ERTS_COPY_REGISTER_LOCATION) && defined(ERL_NODE_BOOKKEEP)
1929                     erts_ref_node_entry__(etp->node, 2, make_boxed(hp-1), file, line);
1930 #else
1931                     erts_ref_node_entry(etp->node, 2, make_boxed(hp-1));
1932 #endif
1933 		}
1934 	    off_heap_common:
1935 		{
1936 		    struct erl_off_heap_header* ohh = (struct erl_off_heap_header*)(hp-1);
1937 		    int tari = thing_arityval(val);
1938 
1939 		    sz -= tari;
1940 		    while (tari--) {
1941 			*hp++ = *tp++;
1942 		    }
1943 		    ohh->next = off_heap->first;
1944 		    off_heap->first = ohh;
1945 		}
1946 		break;
1947 	    case REF_SUBTAG: {
1948 		ErtsRefThing *rtp = (ErtsRefThing *) (tp - 1);
1949 		if (is_magic_ref_thing(rtp)) {
1950 		    ErtsMRefThing *mreft = (ErtsMRefThing *) rtp;
1951 		    erts_refc_inc(&mreft->mb->intern.refc, 2);
1952 		    goto off_heap_common;
1953 		}
1954 		/* Fall through... */
1955 	    }
1956 	    default:
1957 		{
1958 		    int tari = header_arity(val);
1959 
1960 		    sz -= tari;
1961 		    while (tari--) {
1962 			*hp++ = *tp++;
1963 		    }
1964 		}
1965 		break;
1966 	    }
1967 	    break;
1968 	}
1969     }
1970     *hpp = hp;
1971     return res;
1972 }
1973 
1974 /* Move all terms in heap fragments into heap. The terms must be guaranteed to
1975  * be contained within the fragments. The source terms are destructed with
1976  * move markers.
1977  * Typically used to copy a multi-fragmented message (from NIF).
1978  */
erts_move_multi_frags(Eterm ** hpp,ErlOffHeap * off_heap,ErlHeapFragment * first,Eterm * refs,unsigned nrefs,int literals)1979 void erts_move_multi_frags(Eterm** hpp, ErlOffHeap* off_heap, ErlHeapFragment* first,
1980 			   Eterm* refs, unsigned nrefs, int literals)
1981 {
1982     ErlHeapFragment* bp;
1983     Eterm* hp_start = *hpp;
1984     Eterm* hp_end;
1985     Eterm* hp;
1986     unsigned i;
1987     Eterm literal_tag;
1988 
1989 #ifdef TAG_LITERAL_PTR
1990     literal_tag = (Eterm) literals ? TAG_LITERAL_PTR : 0;
1991 #else
1992     literal_tag = (Eterm) 0;
1993 #endif
1994 
1995     for (bp=first; bp!=NULL; bp=bp->next) {
1996 	move_one_frag(hpp, bp, off_heap, literals);
1997     }
1998     hp_end = *hpp;
1999     for (hp=hp_start; hp<hp_end; ++hp) {
2000 	Eterm* ptr;
2001 	Eterm val;
2002 	Eterm gval = *hp;
2003 	switch (primary_tag(gval)) {
2004 	case TAG_PRIMARY_BOXED:
2005 	    ptr = boxed_val(gval);
2006 	    val = *ptr;
2007 	    if (IS_MOVED_BOXED(val)) {
2008 		ASSERT(is_boxed(val));
2009 #ifdef TAG_LITERAL_PTR
2010 		val |= literal_tag;
2011 #endif
2012 		*hp = val;
2013 	    }
2014 	    break;
2015 	case TAG_PRIMARY_LIST:
2016 	    ptr = list_val(gval);
2017 	    val = *ptr;
2018 	    if (IS_MOVED_CONS(val)) {
2019 		val = ptr[1];
2020 #ifdef TAG_LITERAL_PTR
2021 		val |= literal_tag;
2022 #endif
2023 		*hp = val;
2024 	    }
2025 	    break;
2026 	case TAG_PRIMARY_HEADER:
2027 	    if (header_is_thing(gval)) {
2028 		hp += thing_arityval(gval);
2029 	    }
2030 	    break;
2031 	}
2032     }
2033     for (i=0; i<nrefs; ++i) {
2034 	refs[i] = follow_moved(refs[i], literal_tag);
2035     }
2036 }
2037 
2038 static void
move_one_frag(Eterm ** hpp,ErlHeapFragment * frag,ErlOffHeap * off_heap,int literals)2039 move_one_frag(Eterm** hpp, ErlHeapFragment* frag, ErlOffHeap* off_heap, int literals)
2040 {
2041     Eterm* ptr = frag->mem;
2042     Eterm* end = ptr + frag->used_size;
2043     Eterm dummy_ref;
2044     Eterm* hp = *hpp;
2045 
2046     while (ptr != end) {
2047 	Eterm val;
2048 	ASSERT(ptr < end);
2049 	val = *ptr;
2050 	ASSERT(val != ERTS_HOLE_MARKER);
2051 	if (is_header(val)) {
2052 	    struct erl_off_heap_header* hdr = (struct erl_off_heap_header*)hp;
2053 	    ASSERT(ptr + header_arity(val) < end);
2054 	    ptr = move_boxed(ptr, val, &hp, &dummy_ref);
2055 	    switch (val & _HEADER_SUBTAG_MASK) {
2056 	    case REF_SUBTAG:
2057 		if (is_ordinary_ref_thing(hdr))
2058 		    break;
2059 	    case REFC_BINARY_SUBTAG:
2060 	    case FUN_SUBTAG:
2061 	    case EXTERNAL_PID_SUBTAG:
2062 	    case EXTERNAL_PORT_SUBTAG:
2063 	    case EXTERNAL_REF_SUBTAG:
2064 		hdr->next = off_heap->first;
2065 		off_heap->first = hdr;
2066 		break;
2067 	    }
2068 	}
2069 	else { /* must be a cons cell */
2070 	    ASSERT(ptr+1 < end);
2071 	    move_cons(ptr, val, &hp, &dummy_ref);
2072 	    ptr += 2;
2073 	}
2074     }
2075     *hpp = hp;
2076     OH_OVERHEAD(off_heap, frag->off_heap.overhead);
2077     frag->off_heap.first = NULL;
2078 }
2079