1 /*
2 * %CopyrightBegin%
3 *
4 * Copyright Ericsson AB 1996-2020. All Rights Reserved.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * %CopyrightEnd%
19 */
20
21 /* Implementation of the erlang external format
22 *
23 * And a nice cache mechanism which is used just to send a
24 * index indicating a specific atom to a remote node instead of the
25 * entire atom.
26 */
27
28 #ifdef HAVE_CONFIG_H
29 # include "config.h"
30 #endif
31
32 #define ERTS_WANT_EXTERNAL_TAGS
33
34 #include "sys.h"
35 #include "erl_vm.h"
36 #include "global.h"
37 #include "erl_process.h"
38 #include "error.h"
39 #include "external.h"
40 #include "bif.h"
41 #include "big.h"
42 #include "dist.h"
43 #include "erl_binary.h"
44 #include "erl_bits.h"
45 #include "erl_zlib.h"
46 #include "erl_map.h"
47 #include "erl_proc_sig_queue.h"
48 #include "erl_trace.h"
49
50 #define PASS_THROUGH 'p'
51
52 #define in_area(ptr,start,nbytes) ((UWord)((char*)(ptr) - (char*)(start)) < (nbytes))
53
54 #define MAX_STRING_LEN 0xffff
55
56 /*
57 * MAX value for the creation field in pid, port and reference
58 * for the old PID_EXT, PORT_EXT, REFERENCE_EXT and NEW_REFERENCE_EXT.
59 * Older nodes (OTP 19-22) will send us these so we must be able to decode them.
60 *
61 * From OTP 23 DFLAG_BIG_CREATION is mandatory so this node will always
62 * encode with new big 32-bit creations using NEW_PID_EXT, NEW_PORT_EXT
63 * and NEWER_REFERENCE_EXT.
64 */
65 #define ERTS_MAX_TINY_CREATION (3)
66 #define is_tiny_creation(Cre) ((unsigned)(Cre) <= ERTS_MAX_TINY_CREATION)
67
68 /*
69 * When 0 is used as creation, the real creation
70 * is unknown. Creation 0 on data will be changed to current
71 * creation of the node which it belongs to when it enters
72 * that node.
73 * This typically happens when a remote pid is created with
74 * list_to_pid/1 and then sent to the remote node. This behavior
75 * has the undesirable effect that a pid can be passed between nodes,
76 * and as a result of that not being equal to itself (the pid that
77 * comes back isn't equal to the original pid).
78 *
79 */
80
81 #undef ERTS_DEBUG_USE_DIST_SEP
82 #ifdef DEBUG
83 # if 0
84 /*
85 * Enabling ERTS_DEBUG_USE_DIST_SEP can be useful when debugging, but the
86 * result refuses to talk to nodes without it!
87 */
88 # define ERTS_DEBUG_USE_DIST_SEP
89 # endif
90 # define IF_DEBUG(X) X
91 #else
92 # define IF_DEBUG(X)
93 #endif
94
95 /* Does Sint fit in Sint32?
96 */
97 #define IS_SSMALL32(x) (((Uint) (((x) >> (32-1)) + 1)) < 2)
98
99 static Export term_to_binary_trap_export;
100
101 static byte* enc_term(ErtsAtomCacheMap *, Eterm, byte*, Uint64, struct erl_off_heap_header** off_heap);
102 struct TTBEncodeContext_;
103 static int enc_term_int(struct TTBEncodeContext_*,ErtsAtomCacheMap *acmp, Eterm obj, byte* ep, Uint64 dflags,
104 struct erl_off_heap_header** off_heap, Sint *reds, byte **res);
105 static int is_external_string(Eterm obj, Uint* lenp);
106 static byte* enc_atom(ErtsAtomCacheMap *, Eterm, byte*, Uint64);
107 static byte* enc_pid(ErtsAtomCacheMap *, Eterm, byte*, Uint64);
108 struct B2TContext_t;
109 static const byte* dec_term(ErtsDistExternal*, ErtsHeapFactory*, const byte*, Eterm*, struct B2TContext_t*, int);
110 static const byte* dec_atom(ErtsDistExternal *, const byte*, Eterm*);
111 static const byte* dec_pid(ErtsDistExternal *, ErtsHeapFactory*, const byte*, Eterm*, byte tag);
112 static Sint decoded_size(const byte *ep, const byte* endp, int internal_tags, struct B2TContext_t*);
113 static BIF_RETTYPE term_to_binary_trap_1(BIF_ALIST_1);
114
115 static Eterm erts_term_to_binary_int(Process* p, Sint bif_ix, Eterm Term, Eterm opts, int level,
116 Uint64 dflags, Binary *context_b, int iovec,
117 Uint fragment_size);
118
119 static Uint encode_size_struct2(ErtsAtomCacheMap *, Eterm, Uint64);
120 static ErtsExtSzRes encode_size_struct_int(TTBSizeContext*, ErtsAtomCacheMap *acmp,
121 Eterm obj, Uint64 dflags, Sint *reds, Uint *res);
122
123 static Export binary_to_term_trap_export;
124 static BIF_RETTYPE binary_to_term_trap_1(BIF_ALIST_1);
125 static Sint transcode_dist_obuf(ErtsDistOutputBuf*, DistEntry*, Uint64 dflags, Sint reds);
126 static byte *hopefull_bit_binary(TTBEncodeContext* ctx, byte **epp, Binary *pb_val, Eterm pb_term,
127 byte *bytes, byte bitoffs, byte bitsize, Uint sz);
128 static void hopefull_export(TTBEncodeContext* ctx, byte **epp, Export* exp, Uint32 dflags,
129 struct erl_off_heap_header** off_heap);
130 static void store_in_vec(TTBEncodeContext *ctx, byte *ep, Binary *ohbin, Eterm ohpb,
131 byte *ohp, Uint ohsz);
132
erts_init_external(void)133 void erts_init_external(void) {
134 erts_init_trap_export(&term_to_binary_trap_export,
135 am_erts_internal, am_term_to_binary_trap, 1,
136 &term_to_binary_trap_1);
137
138 erts_init_trap_export(&binary_to_term_trap_export,
139 am_erts_internal, am_binary_to_term_trap, 1,
140 &binary_to_term_trap_1);
141 return;
142 }
143
144 #define ERTS_MAX_INTERNAL_ATOM_CACHE_ENTRIES 255
145
146 #define ERTS_DIST_HDR_ATOM_CACHE_FLAG_BYTE_IX(IIX) \
147 (((((Uint32) (IIX)) >> 1) & 0x7fffffff))
148 #define ERTS_DIST_HDR_ATOM_CACHE_FLAG_BIT_IX(IIX) \
149 (((IIX) << 2) & 7)
150 #define ERTS_DIST_HDR_ATOM_CACHE_FLAG_BYTES(NO_ATOMS) \
151 (((((Uint32) (NO_ATOMS)) >> 1) & 0x7fffffff)+1)
152
153 #define ERTS_DIST_HDR_LONG_ATOMS_FLG (1 << 0)
154
155 /* #define ERTS_ATOM_CACHE_HASH */
156 #define ERTS_USE_ATOM_CACHE_SIZE 2039
157 #if ERTS_ATOM_CACHE_SIZE < ERTS_USE_ATOM_CACHE_SIZE
158 #error "ERTS_USE_ATOM_CACHE_SIZE too large"
159 #endif
160
161 static ERTS_INLINE int
atom2cix(Eterm atom)162 atom2cix(Eterm atom)
163 {
164 Uint val;
165 ASSERT(is_atom(atom));
166 val = atom_val(atom);
167 #ifdef ERTS_ATOM_CACHE_HASH
168 val = atom_tab(val)->slot.bucket.hvalue;
169 #endif
170 #if ERTS_USE_ATOM_CACHE_SIZE == 256
171 return (int) (val & ((Uint) 0xff));
172 #else
173 return (int) (val % ERTS_USE_ATOM_CACHE_SIZE);
174 #endif
175 }
176
erts_debug_max_atom_out_cache_index(void)177 int erts_debug_max_atom_out_cache_index(void)
178 {
179 return ERTS_USE_ATOM_CACHE_SIZE-1;
180 }
181
182 int
erts_debug_atom_to_out_cache_index(Eterm atom)183 erts_debug_atom_to_out_cache_index(Eterm atom)
184 {
185 return atom2cix(atom);
186 }
187
188 void
erts_init_atom_cache_map(ErtsAtomCacheMap * acmp)189 erts_init_atom_cache_map(ErtsAtomCacheMap *acmp)
190 {
191 if (acmp) {
192 int ix;
193 acmp->long_atoms = 0;
194 for (ix = 0; ix < ERTS_ATOM_CACHE_SIZE; ix++)
195 acmp->cache[ix].iix = -1;
196 acmp->sz = 0;
197 acmp->hdr_sz = -1;
198 }
199 }
200
201 void
erts_reset_atom_cache_map(ErtsAtomCacheMap * acmp)202 erts_reset_atom_cache_map(ErtsAtomCacheMap *acmp)
203 {
204 if (acmp) {
205 int i;
206 acmp->long_atoms = 0;
207 for (i = 0; i < acmp->sz; i++) {
208 ASSERT(0 <= acmp->cix[i] && acmp->cix[i] < ERTS_ATOM_CACHE_SIZE);
209 acmp->cache[acmp->cix[i]].iix = -1;
210 }
211 acmp->sz = 0;
212 acmp->hdr_sz = -1;
213 #ifdef DEBUG
214 for (i = 0; i < ERTS_ATOM_CACHE_SIZE; i++) {
215 ASSERT(acmp->cache[i].iix < 0);
216 }
217 #endif
218 }
219 }
220
221 void
erts_destroy_atom_cache_map(ErtsAtomCacheMap * acmp)222 erts_destroy_atom_cache_map(ErtsAtomCacheMap *acmp)
223 {
224
225 }
226
227 static ERTS_INLINE void
insert_acache_map(ErtsAtomCacheMap * acmp,Eterm atom,Uint64 dflags)228 insert_acache_map(ErtsAtomCacheMap *acmp, Eterm atom, Uint64 dflags)
229 {
230 if (acmp && acmp->sz < ERTS_MAX_INTERNAL_ATOM_CACHE_ENTRIES) {
231 int ix;
232 ASSERT(acmp->hdr_sz < 0);
233 ASSERT(dflags & DFLAG_UTF8_ATOMS);
234 ix = atom2cix(atom);
235 if (acmp->cache[ix].iix < 0) {
236 acmp->cache[ix].iix = acmp->sz;
237 acmp->cix[acmp->sz++] = ix;
238 acmp->cache[ix].atom = atom;
239 }
240 }
241 }
242
243 static ERTS_INLINE int
get_iix_acache_map(ErtsAtomCacheMap * acmp,Eterm atom,Uint64 dflags)244 get_iix_acache_map(ErtsAtomCacheMap *acmp, Eterm atom, Uint64 dflags)
245 {
246 if (!acmp)
247 return -1;
248 else {
249 int ix;
250 ASSERT(is_atom(atom));
251 ix = atom2cix(atom);
252 if (acmp->cache[ix].iix < 0) {
253 ASSERT(acmp->sz == ERTS_MAX_INTERNAL_ATOM_CACHE_ENTRIES);
254 return -1;
255 }
256 else {
257 ASSERT(acmp->cache[ix].iix < ERTS_ATOM_CACHE_SIZE);
258 return acmp->cache[ix].atom == atom ? acmp->cache[ix].iix : -1;
259 }
260 }
261 }
262
263 void
erts_finalize_atom_cache_map(ErtsAtomCacheMap * acmp,Uint64 dflags)264 erts_finalize_atom_cache_map(ErtsAtomCacheMap *acmp, Uint64 dflags)
265 {
266 if (acmp) {
267 int long_atoms = 0; /* !0 if one or more atoms are longer than 255. */
268 int i;
269 int sz = 0;
270 int min_sz;
271 ASSERT(dflags & DFLAG_UTF8_ATOMS);
272 ASSERT(acmp->hdr_sz < 0);
273 /* Make sure cache update instructions fit */
274 min_sz = (2+4)*acmp->sz;
275 for (i = 0; i < acmp->sz; i++) {
276 Atom *a;
277 Eterm atom;
278 int len;
279 atom = acmp->cache[acmp->cix[i]].atom;
280 ASSERT(is_atom(atom));
281 a = atom_tab(atom_val(atom));
282 len = (int) a->len;
283 ASSERT(len >= 0);
284 if (!long_atoms && len > 255)
285 long_atoms = 1;
286 /* Enough for a new atom cache value */
287 sz += 1 /* cix */ + 1 /* length */ + len /* text */;
288 }
289 if (long_atoms) {
290 acmp->long_atoms = 1;
291 sz += acmp->sz; /* we need 2 bytes per atom for length */
292 }
293 /* Dynamically sized flag field */
294 sz += ERTS_DIST_HDR_ATOM_CACHE_FLAG_BYTES(acmp->sz);
295 if (sz < min_sz)
296 sz = min_sz;
297 acmp->hdr_sz = sz;
298 }
299 }
300
301 Uint
erts_encode_ext_dist_header_size(TTBEncodeContext * ctx,ErtsAtomCacheMap * acmp,Uint fragments)302 erts_encode_ext_dist_header_size(TTBEncodeContext *ctx,
303 ErtsAtomCacheMap *acmp,
304 Uint fragments)
305 {
306 if (ctx->dflags & DFLAG_PENDING_CONNECT) {
307 /* HOPEFUL_DATA + hopefull flags + hopefull ix + payload ix */
308 return 1 + 8 + 4 + 4;
309 }
310 else if (!acmp && !(ctx->dflags & DFLAG_FRAGMENTS))
311 return 1; /* pass through */
312 else {
313 int fix_sz
314 = 1 /* VERSION_MAGIC */
315 + 1 /* DIST_HEADER */
316 + 1 /* dist header flags */
317 + 1 /* number of internal cache entries */
318 ;
319
320 if (fragments > 1)
321 fix_sz += 8 /* sequence id */
322 + 8 /* number of fragments */
323 ;
324 if (acmp) {
325 ASSERT(acmp->hdr_sz >= 0);
326 fix_sz += acmp->hdr_sz;
327 } else {
328 ASSERT(ctx->dflags & DFLAG_FRAGMENTS);
329 }
330
331 return fix_sz;
332 }
333 }
334
erts_encode_ext_dist_header_setup(TTBEncodeContext * ctx,byte * ctl_ext,ErtsAtomCacheMap * acmp,Uint fragments,Eterm from)335 byte *erts_encode_ext_dist_header_setup(TTBEncodeContext *ctx,
336 byte *ctl_ext, ErtsAtomCacheMap *acmp,
337 Uint fragments, Eterm from)
338 {
339 /* Maximum number of atom must be less than the maximum of a 32 bits
340 unsigned integer. Check is done in erl_init.c, erl_start function. */
341 if (ctx->dflags & DFLAG_PENDING_CONNECT) {
342 byte *ep = ctl_ext;
343 ep -= 4;
344 ctx->payload_ixp = ep;
345 put_int32(0, ep);
346 ep -= 4;
347 ctx->hopefull_ixp = ep;
348 put_int32(ERTS_NO_HIX, ep);
349 ep -= 8;
350 ctx->hopefull_flagsp = ep;
351 put_int64(0, ep);
352 *--ep = HOPEFUL_DATA;
353 return ep;
354 }
355 else if (!acmp && !(ctx->dflags & DFLAG_FRAGMENTS)) {
356 byte *ep = ctl_ext;
357 *--ep = PASS_THROUGH;
358 return ep;
359 }
360 else {
361 int i;
362 byte *ep = ctl_ext;
363 byte dist_hdr_flags = acmp && acmp->long_atoms ? ERTS_DIST_HDR_LONG_ATOMS_FLG : 0;
364 ASSERT(!acmp || acmp->hdr_sz >= 0);
365
366 if (acmp) {
367 /*
368 * Write cache update instructions. Note that this is a purely
369 * internal format, never seen on the wire. This section is later
370 * rewritten by erts_encode_ext_dist_header_finalize() while updating
371 * the cache. We write the header backwards just before the
372 * actual term(s).
373 */
374 for (i = acmp->sz-1; i >= 0; i--) {
375 Uint32 aval;
376 ASSERT(0 <= acmp->cix[i] && acmp->cix[i] < ERTS_ATOM_CACHE_SIZE);
377 ASSERT(i == acmp->cache[acmp->cix[i]].iix);
378 ASSERT(is_atom(acmp->cache[acmp->cix[i]].atom));
379
380 aval = (Uint32) atom_val(acmp->cache[acmp->cix[i]].atom);
381 ep -= 4;
382 put_int32(aval, ep);
383 ep -= 2;
384 put_int16(acmp->cix[i], ep);
385 }
386 --ep;
387 put_int8(acmp->sz, ep);
388 } else {
389 ASSERT(ctx->dflags & DFLAG_FRAGMENTS);
390 /* If we don't have an atom cache but are using a dist header we just put 0
391 in the atom cache size slot */
392 --ep;
393 put_int8(0, ep);
394 }
395 --ep;
396 put_int8(dist_hdr_flags, ep);
397 if (fragments > 1) {
398 ASSERT(is_pid(from));
399 ep -= 8;
400 put_int64(fragments, ep);
401 ep -= 8;
402 put_int64(from, ep);
403 *--ep = DIST_FRAG_HEADER;
404 } else {
405 *--ep = DIST_HEADER;
406 }
407 *--ep = VERSION_MAGIC;
408 return ep;
409 }
410 }
411
erts_encode_ext_dist_header_fragment(byte ** hdrpp,Uint fragment,Eterm from)412 byte *erts_encode_ext_dist_header_fragment(byte **hdrpp,
413 Uint fragment,
414 Eterm from)
415 {
416 byte *ep = *hdrpp, *start = ep;
417 ASSERT(is_pid(from));
418 *ep++ = VERSION_MAGIC;
419 *ep++ = DIST_FRAG_CONT;
420 put_int64(from, ep);
421 ep += 8;
422 put_int64(fragment, ep);
423 ep += 8;
424 *hdrpp = ep;
425 return start;
426 }
427
428
erts_encode_ext_dist_header_finalize(ErtsDistOutputBuf * ob,DistEntry * dep,Uint64 dflags,Sint reds)429 Sint erts_encode_ext_dist_header_finalize(ErtsDistOutputBuf* ob,
430 DistEntry* dep,
431 Uint64 dflags,
432 Sint reds)
433 {
434 byte *ip;
435 byte instr_buf[(2+4)*ERTS_ATOM_CACHE_SIZE];
436 int ci, sz;
437 byte dist_hdr_flags;
438 int long_atoms;
439 Uint64 seq_id = 0, frag_id = 0;
440 register byte *ep = ob->eiov->iov[1].iov_base;
441 ASSERT(dflags & DFLAG_UTF8_ATOMS);
442
443 /*
444 * The buffer can have different layouts at this point depending on
445 * what was known when encoded:
446 *
447 * Pending connection: HOPEFUL_DATA, HFlgs, HIX, PIX, CtrlTerm [, MsgTerm]
448 * With atom cache : VERSION_MAGIC, DIST_HEADER, ..., CtrlTerm [, MsgTerm]
449 * No atom cache : VERSION_MAGIC, CtrlTerm [, VERSION_MAGIC, MsgTerm]
450 */
451
452 if (ep[0] == HOPEFUL_DATA)
453 return transcode_dist_obuf(ob, dep, dflags, reds);
454
455 if (ep[0] == PASS_THROUGH) {
456 ASSERT(!(dflags & (DFLAG_DIST_HDR_ATOM_CACHE|DFLAG_FRAGMENTS)));
457 ASSERT(ob->eiov->iov[1].iov_len == 1);
458 return reds;
459 }
460
461 if (ep[1] == DIST_FRAG_CONT) {
462 ASSERT(ep[0] == VERSION_MAGIC);
463 ASSERT(ob->eiov->iov[1].iov_len == 18);
464 return reds;
465 }
466
467 if (ep[1] == DIST_FRAG_HEADER) {
468 /* skip the seq id and frag id */
469 seq_id = get_int64(&ep[2]);
470 ep += 8;
471 frag_id = get_int64(&ep[2]);
472 ep += 8;
473 }
474
475 dist_hdr_flags = ep[2];
476 long_atoms = ERTS_DIST_HDR_LONG_ATOMS_FLG & ((int) dist_hdr_flags);
477
478 /*
479 * Update output atom cache and write the external version of
480 * the dist header. We write the header backwards just
481 * before the actual term(s).
482 */
483 ep += 3;
484 ci = (int) get_int8(ep);
485 ASSERT(0 <= ci && ci < ERTS_ATOM_CACHE_SIZE);
486 ep += 1;
487 sz = (2+4)*ci;
488 ip = &instr_buf[0];
489 sys_memcpy((void *) ip, (void *) ep, sz);
490 ep += sz;
491 ASSERT(ep == &((byte *)ob->eiov->iov[1].iov_base)[ob->eiov->iov[1].iov_len]);
492 if (ci > 0) {
493 Uint32 flgs_buf[((ERTS_DIST_HDR_ATOM_CACHE_FLAG_BYTES(
494 ERTS_MAX_INTERNAL_ATOM_CACHE_ENTRIES)-1)
495 / sizeof(Uint32))+1];
496 register Uint32 flgs;
497 int iix, flgs_bytes, flgs_buf_ix, used_half_bytes;
498 ErtsAtomCache* cache = dep->cache;
499 #ifdef DEBUG
500 int tot_used_half_bytes, top_buf_ix;
501 #endif
502
503 flgs_bytes = ERTS_DIST_HDR_ATOM_CACHE_FLAG_BYTES(ci);
504
505 ASSERT(flgs_bytes <= sizeof(flgs_buf));
506 flgs = (Uint32) dist_hdr_flags;
507 flgs_buf_ix = 0;
508 if ((ci & 1) == 0)
509 used_half_bytes = 2;
510 else
511 used_half_bytes = 1;
512 #ifdef DEBUG
513 tot_used_half_bytes = used_half_bytes;
514 #endif
515 iix = ci-1;
516 while (iix >= 0) {
517 int cix;
518 Eterm atom;
519
520 if (used_half_bytes != 8)
521 flgs <<= 4;
522 else {
523 flgs_buf[flgs_buf_ix++] = flgs;
524 flgs = 0;
525 used_half_bytes = 0;
526 }
527
528 ip = &instr_buf[0] + (2+4)*iix;
529 cix = (int) get_int16(&ip[0]);
530 ASSERT(0 <= cix && cix < ERTS_ATOM_CACHE_SIZE);
531 atom = make_atom((Uint) get_uint32(&ip[2]));
532 if (cache->out_arr[cix] == atom) {
533 --ep;
534 put_int8(cix, ep);
535 flgs |= ((cix >> 8) & 7);
536 }
537 else {
538 Atom *a;
539 cache->out_arr[cix] = atom;
540 a = atom_tab(atom_val(atom));
541 sz = a->len;
542 ep -= sz;
543 sys_memcpy((void *) ep, (void *) a->name, sz);
544 if (long_atoms) {
545 ep -= 2;
546 put_int16(sz, ep);
547 }
548 else {
549 ASSERT(0 <= sz && sz <= 255);
550 --ep;
551 put_int8(sz, ep);
552 }
553 --ep;
554 put_int8(cix, ep);
555 flgs |= (8 | ((cix >> 8) & 7));
556 }
557 iix--;
558 used_half_bytes++;
559 #ifdef DEBUG
560 tot_used_half_bytes++;
561 #endif
562 }
563 ASSERT(tot_used_half_bytes == 2*flgs_bytes);
564 flgs_buf[flgs_buf_ix] = flgs;
565 #ifdef DEBUG
566 top_buf_ix = flgs_buf_ix;
567 #endif
568 flgs_buf_ix = 0;
569 while (1) {
570 ASSERT(flgs_buf_ix <= top_buf_ix);
571 flgs = flgs_buf[flgs_buf_ix];
572 if (flgs_bytes > 4) {
573 *--ep = (byte) ((flgs >> 24) & 0xff);
574 *--ep = (byte) ((flgs >> 16) & 0xff);
575 *--ep = (byte) ((flgs >> 8) & 0xff);
576 *--ep = (byte) (flgs & 0xff);
577 flgs_buf_ix++;
578 flgs_bytes -= 4;
579 }
580 else {
581 ASSERT(flgs_buf_ix == top_buf_ix);
582 switch (flgs_bytes) {
583 case 4:
584 *--ep = (byte) ((flgs >> 24) & 0xff);
585 case 3:
586 *--ep = (byte) ((flgs >> 16) & 0xff);
587 case 2:
588 *--ep = (byte) ((flgs >> 8) & 0xff);
589 case 1:
590 *--ep = (byte) (flgs & 0xff);
591 }
592 break;
593 }
594 }
595 reds -= 3; /*was ERTS_PORT_REDS_DIST_CMD_FINALIZE*/
596 }
597 --ep;
598 put_int8(ci, ep);
599 if (seq_id) {
600 ep -= 8;
601 put_int64(frag_id, ep);
602 ep -= 8;
603 put_int64(seq_id, ep);
604 *--ep = DIST_FRAG_HEADER;
605 } else {
606 *--ep = DIST_HEADER;
607 }
608 *--ep = VERSION_MAGIC;
609
610 sz = ((byte *) ob->eiov->iov[1].iov_base) - ep;
611 ob->eiov->size += sz;
612 ob->eiov->iov[1].iov_len += sz;
613 ob->eiov->iov[1].iov_base = ep;
614
615 return reds < 0 ? 0 : reds;
616 }
617
618 ErtsExtSzRes
erts_encode_dist_ext_size(Eterm term,ErtsAtomCacheMap * acmp,TTBSizeContext * ctx,Uint * szp,Sint * redsp,Sint * vlenp,Uint * fragmentsp)619 erts_encode_dist_ext_size(Eterm term,
620 ErtsAtomCacheMap *acmp,
621 TTBSizeContext* ctx,
622 Uint* szp, Sint *redsp,
623 Sint *vlenp, Uint *fragmentsp)
624 {
625 Uint sz;
626 ErtsExtSzRes res;
627
628 ASSERT(ctx);
629 ASSERT(szp);
630 ASSERT(vlenp);
631 ASSERT(fragmentsp);
632
633 sz = *szp;
634
635 if (!ctx->wstack.wstart) {
636 /*
637 * First call for this 'term'. We might however encode
638 * multiple terms and this might not be the first term
639 * in the sequence. 'ctx' should contain valid info about
640 * about previous terms regarding fragments, and vlen.
641 * 'szp' should contain valid info about the total size
642 * of previous terms.
643 */
644 if (ctx->vlen < 0) {
645 /* First term as well */
646 ctx->vlen = 0;
647 if (ctx->dflags & DFLAG_FRAGMENTS)
648 ctx->fragment_size = ERTS_DIST_FRAGMENT_SIZE;
649 }
650
651 #ifndef ERTS_DEBUG_USE_DIST_SEP
652 if (!(ctx->dflags & (DFLAG_DIST_HDR_ATOM_CACHE|DFLAG_FRAGMENTS)))
653 #endif
654 sz++ /* VERSION_MAGIC */;
655
656 }
657
658 res = encode_size_struct_int(ctx, acmp, term, ctx->dflags, redsp, &sz);
659
660 if (res == ERTS_EXT_SZ_OK) {
661 Uint total_size, fragments;
662
663 /*
664 * Each fragment use
665 * - one element for driver header
666 * - one element for fragment header
667 * - and (at least) one for data
668 */
669 total_size = sz + ctx->extra_size;
670 fragments = (total_size - 1)/ctx->fragment_size + 1;
671
672 *szp = sz;
673 *fragmentsp = fragments;
674 *vlenp = ctx->vlen + 3*fragments;
675 }
676
677 return res;
678 }
679
erts_encode_ext_size_2(Eterm term,unsigned dflags,Uint * szp)680 ErtsExtSzRes erts_encode_ext_size_2(Eterm term, unsigned dflags, Uint *szp)
681 {
682 ErtsExtSzRes res;
683 *szp = 0;
684 res = encode_size_struct_int(NULL, NULL, term, dflags, NULL, szp);
685 (*szp)++ /* VERSION_MAGIC */;
686 return res;
687 }
688
erts_encode_ext_size(Eterm term,Uint * szp)689 ErtsExtSzRes erts_encode_ext_size(Eterm term, Uint *szp)
690 {
691 return erts_encode_ext_size_2(term, TERM_TO_BINARY_DFLAGS, szp);
692 }
693
erts_encode_ext_size_ets(Eterm term)694 Uint erts_encode_ext_size_ets(Eterm term)
695 {
696 return encode_size_struct2(NULL, term,
697 TERM_TO_BINARY_DFLAGS|DFLAG_ETS_COMPRESSED);
698 }
699
700
erts_encode_dist_ext(Eterm term,byte ** ext,Uint64 flags,ErtsAtomCacheMap * acmp,TTBEncodeContext * ctx,Uint * fragmentsp,Sint * reds)701 int erts_encode_dist_ext(Eterm term, byte **ext, Uint64 flags, ErtsAtomCacheMap *acmp,
702 TTBEncodeContext* ctx, Uint *fragmentsp, Sint* reds)
703 {
704 int res;
705 ASSERT(ctx);
706
707 if (!ctx->wstack.wstart) {
708 ctx->cptr = *ext;
709 #ifndef ERTS_DEBUG_USE_DIST_SEP
710 if (!(flags & (DFLAG_DIST_HDR_ATOM_CACHE|DFLAG_PENDING_CONNECT|DFLAG_FRAGMENTS)))
711 #endif
712 *(*ext)++ = VERSION_MAGIC;
713 #ifndef ERTS_DEBUG_USE_DIST_SEP
714 if (flags & DFLAG_PENDING_CONNECT) {
715 Sint payload_ix = ctx->vlen;
716 ASSERT(ctx->payload_ixp);
717 if (payload_ix) {
718 /* we potentially need a version magic on the payload... */
719 (*ext)++;
720 ctx->cptr = *ext;
721 put_int32(payload_ix, ctx->payload_ixp);
722 }
723 }
724 #endif
725 }
726 res = enc_term_int(ctx, acmp, term, *ext, flags, NULL, reds, ext);
727 if (fragmentsp)
728 *fragmentsp = res == 0 ? ctx->frag_ix + 1 : ctx->frag_ix;
729 if (flags & DFLAG_PENDING_CONNECT) {
730 ASSERT(ctx->hopefull_flagsp);
731 put_int64(ctx->hopefull_flags, ctx->hopefull_flagsp);
732 }
733 return res;
734 }
735
erts_encode_ext(Eterm term,byte ** ext)736 void erts_encode_ext(Eterm term, byte **ext)
737 {
738 byte *ep = *ext;
739 *ep++ = VERSION_MAGIC;
740 ep = enc_term(NULL, term, ep, TERM_TO_BINARY_DFLAGS, NULL);
741 if (!ep)
742 erts_exit(ERTS_ABORT_EXIT,
743 "%s:%d:erts_encode_ext(): Internal data structure error\n",
744 __FILE__, __LINE__);
745 *ext = ep;
746 }
747
erts_encode_ext_ets(Eterm term,byte * ep,struct erl_off_heap_header ** off_heap)748 byte* erts_encode_ext_ets(Eterm term, byte *ep, struct erl_off_heap_header** off_heap)
749 {
750 return enc_term(NULL, term, ep, TERM_TO_BINARY_DFLAGS|DFLAG_ETS_COMPRESSED,
751 off_heap);
752 }
753
754
755 static Uint
dist_ext_size(ErtsDistExternal * edep)756 dist_ext_size(ErtsDistExternal *edep)
757 {
758 Uint sz = sizeof(ErtsDistExternal);
759
760 ASSERT(edep->data->ext_endp && edep->data->extp);
761 ASSERT(edep->data->ext_endp >= edep->data->extp);
762
763 if (edep->flags & ERTS_DIST_EXT_ATOM_TRANS_TAB) {
764 ASSERT(0 <= edep->attab.size \
765 && edep->attab.size <= ERTS_ATOM_CACHE_SIZE);
766 sz -= sizeof(Eterm)*(ERTS_ATOM_CACHE_SIZE - edep->attab.size);
767 } else {
768 sz -= sizeof(ErtsAtomTranslationTable);
769 }
770 ASSERT(sz % 4 == 0);
771 return sz;
772 }
773
774 Uint
erts_dist_ext_size(ErtsDistExternal * edep)775 erts_dist_ext_size(ErtsDistExternal *edep)
776 {
777 Uint sz = dist_ext_size(edep);
778 sz += 4; /* may need to pad to 8-byte-align ErtsDistExternalData */
779 sz += edep->data[0].frag_id * sizeof(ErtsDistExternalData);
780 return sz;
781 }
782
783 Uint
erts_dist_ext_data_size(ErtsDistExternal * edep)784 erts_dist_ext_data_size(ErtsDistExternal *edep)
785 {
786 Uint sz = 0, i;
787 for (i = 0; i < edep->data->frag_id; i++)
788 sz += edep->data[i].ext_endp - edep->data[i].extp;
789 return sz;
790 }
791
792 void
erts_dist_ext_frag(ErtsDistExternalData * ede_datap,ErtsDistExternal * edep)793 erts_dist_ext_frag(ErtsDistExternalData *ede_datap, ErtsDistExternal *edep)
794 {
795 ErtsDistExternalData *new_ede_datap = &edep->data[edep->data->frag_id - ede_datap->frag_id];
796 sys_memcpy(new_ede_datap, ede_datap, sizeof(ErtsDistExternalData));
797
798 /* If the data is not backed by a binary, we create one here to keep
799 things simple. Only custom distribution drivers should use lists. */
800 if (new_ede_datap->binp == NULL) {
801 size_t ext_sz = ede_datap->ext_endp - ede_datap->extp;
802 new_ede_datap->binp = erts_bin_nrml_alloc(ext_sz);
803 sys_memcpy(new_ede_datap->binp->orig_bytes, (void *) ede_datap->extp, ext_sz);
804 new_ede_datap->extp = (byte*)new_ede_datap->binp->orig_bytes;
805 new_ede_datap->ext_endp = (byte*)new_ede_datap->binp->orig_bytes + ext_sz;
806 } else {
807 erts_refc_inc(&new_ede_datap->binp->intern.refc, 2);
808 }
809 }
810
811 void
erts_make_dist_ext_copy(ErtsDistExternal * edep,ErtsDistExternal * new_edep)812 erts_make_dist_ext_copy(ErtsDistExternal *edep, ErtsDistExternal *new_edep)
813 {
814 size_t dist_ext_sz = dist_ext_size(edep);
815 byte *ep;
816
817 ep = (byte *) new_edep;
818 sys_memcpy((void *) ep, (void *) edep, dist_ext_sz);
819 erts_ref_dist_entry(new_edep->dep);
820
821 ep += dist_ext_sz;
822 ep += (UWord)ep & 4; /* 8-byte alignment for ErtsDistExternalData */
823 ASSERT((UWord)ep % 8 == 0);
824
825 new_edep->data = (ErtsDistExternalData*)ep;
826 sys_memzero(new_edep->data, sizeof(ErtsDistExternalData) * edep->data->frag_id);
827 new_edep->data->frag_id = edep->data->frag_id;
828 erts_dist_ext_frag(edep->data, new_edep);
829 }
830
831 void
erts_free_dist_ext_copy(ErtsDistExternal * edep)832 erts_free_dist_ext_copy(ErtsDistExternal *edep)
833 {
834 int i;
835 erts_deref_dist_entry(edep->dep);
836 for (i = 0; i < edep->data->frag_id; i++)
837 if (edep->data[i].binp)
838 erts_bin_release(edep->data[i].binp);
839 }
840
841 ErtsPrepDistExtRes
erts_prepare_dist_ext(ErtsDistExternal * edep,byte * ext,Uint size,Binary * binp,DistEntry * dep,Uint32 conn_id,ErtsAtomCache * cache)842 erts_prepare_dist_ext(ErtsDistExternal *edep,
843 byte *ext,
844 Uint size,
845 Binary *binp,
846 DistEntry *dep,
847 Uint32 conn_id,
848 ErtsAtomCache *cache)
849 {
850 register byte *ep;
851
852 ASSERT(dep);
853 erts_de_rlock(dep);
854
855 ASSERT(dep->dflags & DFLAG_UTF8_ATOMS);
856
857
858 if ((dep->state != ERTS_DE_STATE_CONNECTED &&
859 dep->state != ERTS_DE_STATE_PENDING)
860 || dep->connection_id != conn_id) {
861 erts_de_runlock(dep);
862 return ERTS_PREP_DIST_EXT_CLOSED;
863 }
864
865 if (!(dep->dflags & (DFLAG_DIST_HDR_ATOM_CACHE|DFLAG_FRAGMENTS))) {
866 /* Skip PASS_THROUGH */
867 ext++;
868 size--;
869 }
870
871 ep = ext;
872
873 if (size < 2)
874 goto fail;
875
876 if (ep[0] != VERSION_MAGIC) {
877 erts_dsprintf_buf_t *dsbufp = erts_create_logger_dsbuf();
878 erts_dsprintf(dsbufp,
879 "** Got message from incompatible erlang on "
880 "channel %d\n",
881 dist_entry_channel_no(dep));
882 erts_send_error_to_logger_nogl(dsbufp);
883 goto fail;
884 }
885
886 edep->heap_size = -1;
887 edep->flags = 0;
888 edep->dep = dep;
889 edep->mld = dep->mld;
890 edep->connection_id = conn_id;
891 edep->data->ext_endp = ext+size;
892 edep->data->binp = binp;
893 edep->data->seq_id = 0;
894 edep->data->frag_id = 1;
895
896 if (dep->dflags & (DFLAG_DIST_HDR_ATOM_CACHE|DFLAG_FRAGMENTS))
897 edep->flags |= ERTS_DIST_EXT_DFLAG_HDR;
898
899 if (ep[1] != DIST_HEADER && ep[1] != DIST_FRAG_HEADER && ep[1] != DIST_FRAG_CONT) {
900 if (edep->flags & ERTS_DIST_EXT_DFLAG_HDR)
901 goto bad_hdr;
902 edep->attab.size = 0;
903 edep->data->extp = ext;
904 }
905 else if (ep[1] == DIST_FRAG_CONT) {
906 if (!(dep->dflags & DFLAG_FRAGMENTS))
907 goto bad_hdr;
908 edep->attab.size = 0;
909 edep->data->extp = ext + 1 + 1 + 8 + 8;
910 edep->data->seq_id = get_int64(&ep[2]);
911 edep->data->frag_id = get_int64(&ep[2+8]);
912 erts_de_runlock(dep);
913 return ERTS_PREP_DIST_EXT_FRAG_CONT;
914 }
915 else {
916 int tix;
917 int no_atoms;
918
919 if (!(edep->flags & ERTS_DIST_EXT_DFLAG_HDR))
920 goto bad_hdr;
921
922 if (ep[1] == DIST_FRAG_HEADER) {
923 if (!(dep->dflags & DFLAG_FRAGMENTS))
924 goto bad_hdr;
925 edep->data->seq_id = get_int64(&ep[2]);
926 edep->data->frag_id = get_int64(&ep[2+8]);
927 ep += 16;
928 }
929
930 #undef CHKSIZE
931 #define CHKSIZE(SZ) \
932 do { if ((SZ) > edep->data->ext_endp - ep) goto bad_hdr; } while(0)
933
934 CHKSIZE(1+1+1);
935 ep += 2;
936 no_atoms = (int) get_int8(ep);
937 if (no_atoms < 0 || ERTS_ATOM_CACHE_SIZE < no_atoms)
938 goto bad_hdr;
939 ep++;
940 if (no_atoms) {
941 int long_atoms = 0;
942 #ifdef DEBUG
943 byte *flgs_buf = ep;
944 #endif
945 byte *flgsp = ep;
946 int flgs_size = ERTS_DIST_HDR_ATOM_CACHE_FLAG_BYTES(no_atoms);
947 int byte_ix;
948 int bit_ix;
949 int got_flgs;
950 register Uint32 flgs = 0;
951
952 CHKSIZE(flgs_size);
953 ep += flgs_size;
954
955 /*
956 * Check long atoms flag
957 */
958 byte_ix = ERTS_DIST_HDR_ATOM_CACHE_FLAG_BYTE_IX(no_atoms);
959 bit_ix = ERTS_DIST_HDR_ATOM_CACHE_FLAG_BIT_IX(no_atoms);
960 if (flgsp[byte_ix] & (((byte) ERTS_DIST_HDR_LONG_ATOMS_FLG) << bit_ix))
961 long_atoms = 1;
962
963 #ifdef DEBUG
964 byte_ix = 0;
965 bit_ix = 0;
966 #endif
967 got_flgs = 0;
968 /*
969 * Setup the atom translation table.
970 */
971 edep->flags |= ERTS_DIST_EXT_ATOM_TRANS_TAB;
972 edep->attab.size = no_atoms;
973 for (tix = 0; tix < no_atoms; tix++) {
974 Eterm atom;
975 int cix;
976 int len;
977
978 if (!got_flgs) {
979 int left = no_atoms - tix;
980 if (left > 6) {
981 flgs = ((((Uint32) flgsp[3]) << 24)
982 | (((Uint32) flgsp[2]) << 16)
983 | (((Uint32) flgsp[1]) << 8)
984 | ((Uint32) flgsp[0]));
985 flgsp += 4;
986 }
987 else {
988 flgs = 0;
989 switch (left) {
990 case 6:
991 case 5:
992 flgs |= (((Uint32) flgsp[2]) << 16);
993 case 4:
994 case 3:
995 flgs |= (((Uint32) flgsp[1]) << 8);
996 case 2:
997 case 1:
998 flgs |= ((Uint32) flgsp[0]);
999 }
1000 }
1001 got_flgs = 8;
1002 }
1003
1004 ASSERT(byte_ix == ERTS_DIST_HDR_ATOM_CACHE_FLAG_BYTE_IX(tix));
1005 ASSERT(bit_ix == ERTS_DIST_HDR_ATOM_CACHE_FLAG_BIT_IX(tix));
1006 ASSERT((flgs & 3)
1007 == (((flgs_buf[byte_ix]
1008 & (((byte) 3) << bit_ix)) >> bit_ix) & 3));
1009
1010 CHKSIZE(1);
1011 cix = (int) ((flgs & 7) << 8);
1012 if ((flgs & 8) == 0) {
1013 /* atom already cached */
1014 cix += (int) get_int8(ep);
1015 if (cix >= ERTS_ATOM_CACHE_SIZE)
1016 goto bad_hdr;
1017 ep++;
1018 atom = cache->in_arr[cix];
1019 if (!is_atom(atom))
1020 goto bad_hdr;
1021 edep->attab.atom[tix] = atom;
1022 }
1023 else {
1024 /* new cached atom */
1025 cix += (int) get_int8(ep);
1026 if (cix >= ERTS_ATOM_CACHE_SIZE)
1027 goto bad_hdr;
1028 ep++;
1029 if (long_atoms) {
1030 CHKSIZE(2);
1031 len = get_int16(ep);
1032 ep += 2;
1033 }
1034 else {
1035 CHKSIZE(1);
1036 len = get_int8(ep);
1037 ep++;
1038 }
1039 CHKSIZE(len);
1040 atom = erts_atom_put((byte *) ep,
1041 len,
1042 ERTS_ATOM_ENC_UTF8,
1043 0);
1044 if (is_non_value(atom))
1045 goto bad_hdr;
1046 ep += len;
1047 cache->in_arr[cix] = atom;
1048 edep->attab.atom[tix] = atom;
1049 }
1050 flgs >>= 4;
1051 got_flgs--;
1052 #ifdef DEBUG
1053 bit_ix += 4;
1054 if (bit_ix >= 8) {
1055 bit_ix = 0;
1056 flgs = (int) flgs_buf[++byte_ix];
1057 ASSERT(byte_ix < flgs_size);
1058 }
1059 #endif
1060 }
1061 }
1062 edep->data->extp = ep;
1063 #ifdef ERTS_DEBUG_USE_DIST_SEP
1064 if (*ep != VERSION_MAGIC)
1065 goto bad_hdr;
1066 #endif
1067 }
1068 #ifdef ERTS_DEBUG_USE_DIST_SEP
1069 if (*ep != VERSION_MAGIC)
1070 goto fail;
1071 #endif
1072
1073 erts_de_runlock(dep);
1074
1075 return ERTS_PREP_DIST_EXT_SUCCESS;
1076
1077 #undef CHKSIZE
1078
1079 bad_hdr: {
1080 erts_dsprintf_buf_t *dsbufp = erts_create_logger_dsbuf();
1081 erts_dsprintf(dsbufp,
1082 "%T got a corrupted distribution header from %T "
1083 "on distribution channel %d\n",
1084 erts_this_node->sysname,
1085 edep->dep->sysname,
1086 dist_entry_channel_no(edep->dep));
1087 for (ep = ext; ep < edep->data->ext_endp; ep++)
1088 erts_dsprintf(dsbufp, ep != ext ? ",%b8u" : "<<%b8u", *ep);
1089 erts_dsprintf(dsbufp, ">>");
1090 erts_send_warning_to_logger_nogl(dsbufp);
1091 }
1092 fail: {
1093 erts_de_runlock(dep);
1094 erts_kill_dist_connection(dep, conn_id);
1095 }
1096 return ERTS_PREP_DIST_EXT_FAILED;
1097 }
1098
1099 static void
bad_dist_ext(ErtsDistExternal * edep)1100 bad_dist_ext(ErtsDistExternal *edep)
1101 {
1102 if (edep->dep) {
1103 DistEntry *dep = edep->dep;
1104 erts_dsprintf_buf_t *dsbufp = erts_create_logger_dsbuf();
1105 byte *ep;
1106 erts_dsprintf(dsbufp,
1107 "%T got a corrupted external term from %T "
1108 "on distribution channel %d\n",
1109 erts_this_node->sysname,
1110 dep->sysname,
1111 dist_entry_channel_no(dep));
1112 for (ep = edep->data->extp; ep < edep->data->ext_endp; ep++)
1113 erts_dsprintf(dsbufp,
1114 ep != edep->data->extp ? ",%b8u" : "<<...,%b8u",
1115 *ep);
1116 erts_dsprintf(dsbufp, ">>\n");
1117 erts_dsprintf(dsbufp, "ATOM_CACHE_REF translations: ");
1118 if (!(edep->flags & ERTS_DIST_EXT_ATOM_TRANS_TAB) || !edep->attab.size)
1119 erts_dsprintf(dsbufp, "none");
1120 else {
1121 int i;
1122 erts_dsprintf(dsbufp, "0=%T", edep->attab.atom[0]);
1123 for (i = 1; i < edep->attab.size; i++)
1124 erts_dsprintf(dsbufp, ", %d=%T", i, edep->attab.atom[i]);
1125 }
1126 erts_send_warning_to_logger_nogl(dsbufp);
1127 erts_kill_dist_connection(dep, edep->connection_id);
1128 }
1129 }
1130
1131 Sint
erts_decode_dist_ext_size(ErtsDistExternal * edep,int kill_connection,int payload)1132 erts_decode_dist_ext_size(ErtsDistExternal *edep, int kill_connection, int payload)
1133 {
1134 Sint res;
1135 byte *ep;
1136
1137 if (edep->data->frag_id > 1 && payload) {
1138 Uint sz = 0;
1139 Binary *bin;
1140 int i;
1141 byte *ep;
1142
1143 for (i = 0; i < edep->data->frag_id; i++)
1144 sz += edep->data[i].ext_endp - edep->data[i].extp;
1145
1146 bin = erts_bin_nrml_alloc(sz);
1147 ep = (byte*)bin->orig_bytes;
1148
1149 for (i = 0; i < edep->data->frag_id; i++) {
1150 sys_memcpy(ep, edep->data[i].extp, edep->data[i].ext_endp - edep->data[i].extp);
1151 ep += edep->data[i].ext_endp - edep->data[i].extp;
1152 erts_bin_release(edep->data[i].binp);
1153 edep->data[i].binp = NULL;
1154 edep->data[i].extp = NULL;
1155 edep->data[i].ext_endp = NULL;
1156 }
1157
1158 edep->data->frag_id = 1;
1159 edep->data->extp = (byte*)bin->orig_bytes;
1160 edep->data->ext_endp = ep;
1161 edep->data->binp = bin;
1162 }
1163
1164 if (edep->data->extp >= edep->data->ext_endp)
1165 goto fail;
1166 #ifndef ERTS_DEBUG_USE_DIST_SEP
1167 if (edep->flags & ERTS_DIST_EXT_DFLAG_HDR) {
1168 if (*edep->data->extp == VERSION_MAGIC)
1169 goto fail;
1170 ep = edep->data->extp;
1171 }
1172 else
1173 #endif
1174 {
1175 if (*edep->data->extp != VERSION_MAGIC)
1176 goto fail;
1177 ep = edep->data->extp+1;
1178 }
1179 res = decoded_size(ep, edep->data->ext_endp, 0, NULL);
1180 if (res >= 0)
1181 return res;
1182 fail:
1183 if (kill_connection)
1184 bad_dist_ext(edep);
1185 return -1;
1186 }
1187
erts_decode_ext_size(const byte * ext,Uint size)1188 Sint erts_decode_ext_size(const byte *ext, Uint size)
1189 {
1190 if (size == 0 || *ext != VERSION_MAGIC)
1191 return -1;
1192 return decoded_size(ext+1, ext+size, 0, NULL);
1193 }
1194
erts_decode_ext_size_ets(const byte * ext,Uint size)1195 Sint erts_decode_ext_size_ets(const byte *ext, Uint size)
1196 {
1197 Sint sz = decoded_size(ext, ext+size, 1, NULL);
1198 ASSERT(sz >= 0);
1199 return sz;
1200 }
1201
1202
1203 /*
1204 ** hpp is set to either a &p->htop or
1205 ** a pointer to a memory pointer (form message buffers)
1206 ** on return hpp is updated to point after allocated data
1207 */
1208 Eterm
erts_decode_dist_ext(ErtsHeapFactory * factory,ErtsDistExternal * edep,int kill_connection)1209 erts_decode_dist_ext(ErtsHeapFactory* factory,
1210 ErtsDistExternal *edep,
1211 int kill_connection)
1212 {
1213 Eterm obj;
1214 const byte* ep;
1215
1216 ep = edep->data->extp;
1217
1218 if (ep >= edep->data->ext_endp)
1219 goto error;
1220 #ifndef ERTS_DEBUG_USE_DIST_SEP
1221 if (edep->flags & ERTS_DIST_EXT_DFLAG_HDR) {
1222 if (*ep == VERSION_MAGIC)
1223 goto error;
1224 }
1225 else
1226 #endif
1227 {
1228 if (*ep != VERSION_MAGIC)
1229 goto error;
1230 ep++;
1231 }
1232 ep = dec_term(edep, factory, ep, &obj, NULL, 0);
1233 if (!ep)
1234 goto error;
1235
1236 edep->data->extp = (byte*)ep;
1237
1238 return obj;
1239
1240 error:
1241 erts_factory_undo(factory);
1242
1243 if (kill_connection)
1244 bad_dist_ext(edep);
1245
1246 return THE_NON_VALUE;
1247 }
1248
erts_decode_ext(ErtsHeapFactory * factory,const byte ** ext,Uint32 flags)1249 Eterm erts_decode_ext(ErtsHeapFactory* factory, const byte **ext, Uint32 flags)
1250 {
1251 ErtsDistExternal ede, *edep;
1252 Eterm obj;
1253 const byte *ep = *ext;
1254 if (*ep++ != VERSION_MAGIC) {
1255 erts_factory_undo(factory);
1256 return THE_NON_VALUE;
1257 }
1258 if (flags) {
1259 ASSERT(flags == ERTS_DIST_EXT_BTT_SAFE);
1260 ede.flags = flags; /* a dummy struct just for the flags */
1261 ede.data = NULL;
1262 edep = &ede;
1263 } else {
1264 edep = NULL;
1265 }
1266 ep = dec_term(edep, factory, ep, &obj, NULL, 0);
1267 if (!ep) {
1268 return THE_NON_VALUE;
1269 }
1270 *ext = ep;
1271 return obj;
1272 }
1273
erts_decode_ext_ets(ErtsHeapFactory * factory,const byte * ext)1274 Eterm erts_decode_ext_ets(ErtsHeapFactory* factory, const byte *ext)
1275 {
1276 Eterm obj;
1277 ext = dec_term(NULL, factory, ext, &obj, NULL, 1);
1278 ASSERT(ext);
1279 return obj;
1280 }
1281
1282 /**********************************************************************/
1283
erts_debug_dist_ext_to_term_2(BIF_ALIST_2)1284 BIF_RETTYPE erts_debug_dist_ext_to_term_2(BIF_ALIST_2)
1285 {
1286 ErtsHeapFactory factory;
1287 Eterm res;
1288 Sint hsz;
1289 ErtsDistExternal ede;
1290 ErtsDistExternalData ede_data;
1291 Eterm *tp;
1292 Eterm real_bin;
1293 Uint offset;
1294 Uint size;
1295 Uint bitsize;
1296 Uint bitoffs;
1297 Uint arity;
1298 int i;
1299
1300 ede.flags = ERTS_DIST_EXT_ATOM_TRANS_TAB;
1301 ede.dep = NULL;
1302 ede.heap_size = -1;
1303 ede.data = &ede_data;
1304
1305 if (is_not_tuple(BIF_ARG_1))
1306 goto badarg;
1307 tp = tuple_val(BIF_ARG_1);
1308 arity = arityval(tp[0]);
1309 if (arity > ERTS_MAX_INTERNAL_ATOM_CACHE_ENTRIES)
1310 goto badarg;
1311
1312 ede.attab.size = arity;
1313 for (i = 1; i <= arity; i++) {
1314 if (is_not_atom(tp[i]))
1315 goto badarg;
1316 ede.attab.atom[i-1] = tp[i];
1317 }
1318
1319 if (is_not_binary(BIF_ARG_2))
1320 goto badarg;
1321
1322 size = binary_size(BIF_ARG_2);
1323 if (size == 0)
1324 goto badarg;
1325 ERTS_GET_REAL_BIN(BIF_ARG_2, real_bin, offset, bitoffs, bitsize);
1326 if (bitsize != 0)
1327 goto badarg;
1328
1329 ede.data->extp = binary_bytes(real_bin)+offset;
1330 ede.data->ext_endp = ede.data->extp + size;
1331 ede.data->frag_id = 1;
1332 ede.data->binp = NULL;
1333
1334 hsz = erts_decode_dist_ext_size(&ede, 1, 1);
1335 if (hsz < 0)
1336 goto badarg;
1337
1338 erts_factory_proc_prealloc_init(&factory, BIF_P, hsz);
1339 res = erts_decode_dist_ext(&factory, &ede, 1);
1340 erts_factory_close(&factory);
1341
1342 if (is_value(res))
1343 BIF_RET(res);
1344
1345 badarg:
1346
1347 BIF_ERROR(BIF_P, BADARG);
1348 }
1349
term_to_binary_trap_1(BIF_ALIST_1)1350 static BIF_RETTYPE term_to_binary_trap_1(BIF_ALIST_1)
1351 {
1352 Eterm *tp = tuple_val(BIF_ARG_1);
1353 Eterm Term = tp[1];
1354 Eterm Opts = tp[2];
1355 Eterm bt = tp[3];
1356 Eterm bix = tp[4];
1357 Sint bif_ix = signed_val(bix);
1358 Binary *bin = erts_magic_ref2bin(bt);
1359 Eterm res = erts_term_to_binary_int(BIF_P, bif_ix, Term, Opts,
1360 0, 0,bin, 0, ~((Uint) 0));
1361 if (is_non_value(res)) {
1362 if (erts_set_gc_state(BIF_P, 1)
1363 || MSO(BIF_P).overhead > BIN_VHEAP_SZ(BIF_P)) {
1364 ERTS_VBUMP_ALL_REDS(BIF_P);
1365 }
1366 if (Opts == am_undefined)
1367 ERTS_BIF_ERROR_TRAPPED1(BIF_P, SYSTEM_LIMIT,
1368 BIF_TRAP_EXPORT(bif_ix), Term);
1369 else
1370 ERTS_BIF_ERROR_TRAPPED2(BIF_P, SYSTEM_LIMIT,
1371 BIF_TRAP_EXPORT(bif_ix), Term, Opts);
1372 }
1373 if (is_tuple(res)) {
1374 ASSERT(BIF_P->flags & F_DISABLE_GC);
1375 BIF_TRAP1(&term_to_binary_trap_export,BIF_P,res);
1376 } else {
1377 if (erts_set_gc_state(BIF_P, 1)
1378 || MSO(BIF_P).overhead > BIN_VHEAP_SZ(BIF_P))
1379 ERTS_BIF_YIELD_RETURN(BIF_P, res);
1380 else
1381 BIF_RET(res);
1382 }
1383 }
1384
term_to_binary_1(BIF_ALIST_1)1385 BIF_RETTYPE term_to_binary_1(BIF_ALIST_1)
1386 {
1387 Eterm res = erts_term_to_binary_int(BIF_P, BIF_term_to_binary_1,
1388 BIF_ARG_1, am_undefined,
1389 0, TERM_TO_BINARY_DFLAGS, NULL, 0,
1390 ~((Uint) 0));
1391 if (is_non_value(res)) {
1392 ASSERT(!(BIF_P->flags & F_DISABLE_GC));
1393 BIF_ERROR(BIF_P, SYSTEM_LIMIT);
1394 }
1395 if (is_tuple(res)) {
1396 erts_set_gc_state(BIF_P, 0);
1397 BIF_TRAP1(&term_to_binary_trap_export,BIF_P,res);
1398 } else {
1399 ASSERT(!(BIF_P->flags & F_DISABLE_GC));
1400 BIF_RET(res);
1401 }
1402 }
1403
term_to_iovec_1(BIF_ALIST_1)1404 BIF_RETTYPE term_to_iovec_1(BIF_ALIST_1)
1405 {
1406 Eterm res = erts_term_to_binary_int(BIF_P, BIF_term_to_iovec_1,
1407 BIF_ARG_1, am_undefined,
1408 0, TERM_TO_BINARY_DFLAGS, NULL, !0,
1409 ~((Uint) 0));
1410 if (is_non_value(res)) {
1411 ASSERT(!(BIF_P->flags & F_DISABLE_GC));
1412 BIF_ERROR(BIF_P, SYSTEM_LIMIT);
1413 }
1414 if (is_tuple(res)) {
1415 erts_set_gc_state(BIF_P, 0);
1416 BIF_TRAP1(&term_to_binary_trap_export,BIF_P,res);
1417 } else {
1418 ASSERT(!(BIF_P->flags & F_DISABLE_GC));
1419 BIF_RET(res);
1420 }
1421 }
1422
1423 static ERTS_INLINE int
parse_t2b_opts(Eterm opts,Uint * flagsp,int * levelp,int * iovecp,Uint * fsizep)1424 parse_t2b_opts(Eterm opts, Uint *flagsp, int *levelp, int *iovecp, Uint *fsizep)
1425 {
1426 int level = 0;
1427 int iovec = 0;
1428 Uint flags = TERM_TO_BINARY_DFLAGS;
1429 int deterministic = 0;
1430 Uint fsize = ~((Uint) 0); /* one fragment */
1431
1432 while (is_list(opts)) {
1433 Eterm arg = CAR(list_val(opts));
1434 Eterm* tp;
1435 if (arg == am_compressed) {
1436 level = Z_DEFAULT_COMPRESSION;
1437 }
1438 else if (iovecp && arg == am_iovec) {
1439 iovec = !0;
1440 } else if (arg == am_deterministic) {
1441 deterministic = 1;
1442 } else if (is_tuple(arg) && *(tp = tuple_val(arg)) == make_arityval(2)) {
1443 if (tp[1] == am_minor_version && is_small(tp[2])) {
1444 switch (signed_val(tp[2])) {
1445 case 0:
1446 flags = TERM_TO_BINARY_DFLAGS & ~DFLAG_NEW_FLOATS;
1447 break;
1448 case 1: /* Current default... */
1449 flags = TERM_TO_BINARY_DFLAGS;
1450 break;
1451 case 2:
1452 flags = TERM_TO_BINARY_DFLAGS | DFLAG_UTF8_ATOMS;
1453 break;
1454 default:
1455 return 0; /* badarg */
1456 }
1457 } else if (tp[1] == am_compressed && is_small(tp[2])) {
1458 level = signed_val(tp[2]);
1459 if (!(0 <= level && level < 10)) {
1460 return 0; /* badarg */
1461 }
1462 } else if (fsizep) {
1463 if (ERTS_IS_ATOM_STR("fragment", tp[1])) {
1464 if (!term_to_Uint(tp[2], &fsize))
1465 return 0; /* badarg */
1466 }
1467 else {
1468 return 0; /* badarg */
1469 }
1470 }
1471 else {
1472 return 0; /* badarg */
1473 }
1474 } else {
1475 return 0; /* badarg */
1476 }
1477 opts = CDR(list_val(opts));
1478 }
1479 if (is_not_nil(opts)) {
1480 return 0; /* badarg */
1481 }
1482
1483 if (deterministic) {
1484 flags |= DFLAG_DETERMINISTIC;
1485 }
1486
1487 *flagsp = flags;
1488 *levelp = level;
1489 if (iovecp)
1490 *iovecp = iovec;
1491 if (fsizep)
1492 *fsizep = fsize;
1493
1494 return !0; /* ok */
1495 }
1496
term_to_binary_2(BIF_ALIST_2)1497 BIF_RETTYPE term_to_binary_2(BIF_ALIST_2)
1498 {
1499 int level;
1500 Uint flags;
1501 Eterm res;
1502
1503 if (!parse_t2b_opts(BIF_ARG_2, &flags, &level, NULL, NULL)) {
1504 BIF_ERROR(BIF_P, BADARG);
1505 }
1506
1507 res = erts_term_to_binary_int(BIF_P, BIF_term_to_binary_2,
1508 BIF_ARG_1, BIF_ARG_2,
1509 level, flags, NULL, 0,
1510 ~((Uint) 0));
1511 if (is_non_value(res)) {
1512 ASSERT(!(BIF_P->flags & F_DISABLE_GC));
1513 BIF_ERROR(BIF_P, SYSTEM_LIMIT);
1514 }
1515 if (is_tuple(res)) {
1516 erts_set_gc_state(BIF_P, 0);
1517 BIF_TRAP1(&term_to_binary_trap_export,BIF_P,res);
1518 } else {
1519 ASSERT(!(BIF_P->flags & F_DISABLE_GC));
1520 BIF_RET(res);
1521 }
1522 }
1523
term_to_iovec_2(BIF_ALIST_2)1524 BIF_RETTYPE term_to_iovec_2(BIF_ALIST_2)
1525 {
1526 int level;
1527 Uint flags;
1528 Eterm res;
1529
1530 if (!parse_t2b_opts(BIF_ARG_2, &flags, &level, NULL, NULL)) {
1531 BIF_ERROR(BIF_P, BADARG);
1532 }
1533
1534 res = erts_term_to_binary_int(BIF_P, BIF_term_to_iovec_2,
1535 BIF_ARG_1, BIF_ARG_2,
1536 level, flags, NULL, !0,
1537 ~((Uint) 0));
1538 if (is_non_value(res)) {
1539 ASSERT(!(BIF_P->flags & F_DISABLE_GC));
1540 BIF_ERROR(BIF_P, SYSTEM_LIMIT);
1541 }
1542 if (is_tuple(res)) {
1543 erts_set_gc_state(BIF_P, 0);
1544 BIF_TRAP1(&term_to_binary_trap_export,BIF_P,res);
1545 } else {
1546 ASSERT(!(BIF_P->flags & F_DISABLE_GC));
1547 BIF_RET(res);
1548 }
1549 }
1550
1551 Eterm
erts_debug_term_to_binary(Process * p,Eterm term,Eterm opts)1552 erts_debug_term_to_binary(Process *p, Eterm term, Eterm opts)
1553 {
1554 Eterm ret;
1555 int level, iovec;
1556 Uint flags;
1557 Uint fsize;
1558
1559 if (!parse_t2b_opts(opts, &flags, &level, &iovec, &fsize)) {
1560 ERTS_BIF_PREP_ERROR(ret, p, BADARG);
1561 }
1562 else {
1563 Eterm res = erts_term_to_binary_int(p, BIF_term_to_binary_2,
1564 term, opts, level, flags,
1565 NULL, iovec, fsize);
1566
1567 if (is_non_value(res)) {
1568 ASSERT(!(p->flags & F_DISABLE_GC));
1569 ERTS_BIF_PREP_ERROR(ret, p, SYSTEM_LIMIT);
1570 }
1571 else if (is_tuple(res)) {
1572 erts_set_gc_state(p, 0);
1573 ERTS_BIF_PREP_TRAP1(ret, &term_to_binary_trap_export,p,res);
1574 }
1575 else {
1576 ASSERT(!(p->flags & F_DISABLE_GC));
1577 ERTS_BIF_PREP_RET(ret, res);
1578 }
1579 }
1580 return ret;
1581 }
1582
1583
1584 enum B2TState { /* order is somewhat significant */
1585 B2TPrepare,
1586 B2TUncompressChunk,
1587 B2TSizeInit,
1588 B2TSize,
1589 B2TDecodeInit,
1590 B2TDecode,
1591 B2TDecodeList,
1592 B2TDecodeTuple,
1593 B2TDecodeString,
1594 B2TDecodeBinary,
1595
1596 B2TDone,
1597 B2TDecodeFail,
1598 B2TBadArg
1599 };
1600
1601 typedef struct {
1602 Sint heap_size;
1603 int terms;
1604 const byte* ep;
1605 int atom_extra_skip;
1606 } B2TSizeContext;
1607
1608 typedef struct {
1609 const byte* ep;
1610 Eterm res;
1611 Eterm* next;
1612 ErtsHeapFactory factory;
1613 int remaining_n;
1614 char* remaining_bytes;
1615 ErtsWStack flat_maps;
1616 ErtsPStack hamt_array;
1617 } B2TDecodeContext;
1618
1619 typedef struct {
1620 z_stream stream;
1621 byte* dbytes;
1622 Uint dleft;
1623 } B2TUncompressContext;
1624
1625 typedef struct B2TContext_t {
1626 Sint heap_size;
1627 byte* aligned_alloc;
1628 ErtsBinary2TermState b2ts;
1629 Uint32 flags;
1630 SWord reds;
1631 Uint used_bytes; /* In: boolean, Out: bytes */
1632 Eterm trap_bin; /* THE_NON_VALUE if not exported */
1633 Export *bif;
1634 Eterm arg[2];
1635 enum B2TState state;
1636 union {
1637 B2TSizeContext sc;
1638 B2TDecodeContext dc;
1639 B2TUncompressContext uc;
1640 } u;
1641 } B2TContext;
1642
1643 static B2TContext* b2t_export_context(Process*, B2TContext* src);
1644
binary2term_uncomp_size(byte * data,Sint size)1645 static uLongf binary2term_uncomp_size(byte* data, Sint size)
1646 {
1647 z_stream stream;
1648 int err;
1649 const uInt chunk_size = 64*1024; /* Ask tmp-alloc about a suitable size? */
1650 void* tmp_buf = erts_alloc(ERTS_ALC_T_TMP, chunk_size);
1651 uLongf uncomp_size = 0;
1652
1653 stream.next_in = (Bytef*)data;
1654 stream.avail_in = (uInt)size;
1655 stream.next_out = tmp_buf;
1656 stream.avail_out = (uInt)chunk_size;
1657
1658 erl_zlib_alloc_init(&stream);
1659
1660 err = inflateInit(&stream);
1661 if (err == Z_OK) {
1662 do {
1663 stream.next_out = tmp_buf;
1664 stream.avail_out = chunk_size;
1665 err = inflate(&stream, Z_NO_FLUSH);
1666 uncomp_size += chunk_size - stream.avail_out;
1667 }while (err == Z_OK);
1668 inflateEnd(&stream);
1669 }
1670 erts_free(ERTS_ALC_T_TMP, tmp_buf);
1671 return err == Z_STREAM_END ? uncomp_size : 0;
1672 }
1673
1674 static ERTS_INLINE int
binary2term_prepare(ErtsBinary2TermState * state,byte * data,Sint data_size,B2TContext ** ctxp,Process * p)1675 binary2term_prepare(ErtsBinary2TermState *state, byte *data, Sint data_size,
1676 B2TContext** ctxp, Process* p)
1677 {
1678 byte *bytes = data;
1679 Sint size = data_size;
1680
1681 state->exttmp = 0;
1682
1683 if (size < 1 || *bytes != VERSION_MAGIC) {
1684 return -1;
1685 }
1686 bytes++;
1687 size--;
1688 if (size < 5 || *bytes != COMPRESSED) {
1689 state->extp = bytes;
1690 if (ctxp)
1691 (*ctxp)->state = B2TSizeInit;
1692 }
1693 else {
1694 uLongf dest_len = get_uint32(bytes+1);
1695 bytes += 5;
1696 size -= 5;
1697 if (dest_len > 32*1024*1024
1698 || (state->extp = erts_alloc_fnf(ERTS_ALC_T_EXT_TERM_DATA, dest_len)) == NULL) {
1699 /*
1700 * Try avoid out-of-memory crash due to corrupted 'dest_len'
1701 * by checking the actual length of the uncompressed data.
1702 * The only way to do that is to uncompress it. Sad but true.
1703 */
1704 if (dest_len != binary2term_uncomp_size(bytes, size)) {
1705 return -1;
1706 }
1707 state->extp = erts_alloc(ERTS_ALC_T_EXT_TERM_DATA, dest_len);
1708 if (ctxp)
1709 (*ctxp)->reds -= dest_len;
1710 }
1711 state->exttmp = 1;
1712 if (ctxp) {
1713 /*
1714 * Start decompression by exporting trap context
1715 * so we don't have to deal with deep-copying z_stream.
1716 */
1717 B2TContext* ctx = b2t_export_context(p, *ctxp);
1718 ASSERT(state = &(*ctxp)->b2ts);
1719 state = &ctx->b2ts;
1720
1721 if (erl_zlib_inflate_start(&ctx->u.uc.stream, bytes, size) != Z_OK)
1722 return -1;
1723
1724 ctx->u.uc.dbytes = state->extp;
1725 ctx->u.uc.dleft = dest_len;
1726 if (ctx->used_bytes) {
1727 ASSERT(ctx->used_bytes == 1);
1728 /* to be subtracted by stream.avail_in when done */
1729 ctx->used_bytes = data_size;
1730 }
1731 ctx->state = B2TUncompressChunk;
1732 *ctxp = ctx;
1733 }
1734 else {
1735 uLongf dlen = dest_len;
1736 if (erl_zlib_uncompress(state->extp, &dlen, bytes, size) != Z_OK
1737 || dlen != dest_len) {
1738 return -1;
1739 }
1740 }
1741 size = (Sint) dest_len;
1742 }
1743 state->extsize = size;
1744 return 0;
1745 }
1746
1747 static ERTS_INLINE void
binary2term_abort(ErtsBinary2TermState * state)1748 binary2term_abort(ErtsBinary2TermState *state)
1749 {
1750 if (state->exttmp) {
1751 state->exttmp = 0;
1752 erts_free(ERTS_ALC_T_EXT_TERM_DATA, state->extp);
1753 }
1754 }
1755
1756 static ERTS_INLINE Eterm
binary2term_create(ErtsDistExternal * edep,ErtsBinary2TermState * state,ErtsHeapFactory * factory)1757 binary2term_create(ErtsDistExternal *edep, ErtsBinary2TermState *state,
1758 ErtsHeapFactory* factory)
1759 {
1760 Eterm res;
1761
1762 if (!dec_term(edep, factory, state->extp, &res, NULL, 0))
1763 res = THE_NON_VALUE;
1764 if (state->exttmp) {
1765 state->exttmp = 0;
1766 erts_free(ERTS_ALC_T_EXT_TERM_DATA, state->extp);
1767 }
1768 return res;
1769 }
1770
1771 Sint
erts_binary2term_prepare(ErtsBinary2TermState * state,byte * data,Sint data_size)1772 erts_binary2term_prepare(ErtsBinary2TermState *state, byte *data, Sint data_size)
1773 {
1774 Sint res;
1775
1776 if (binary2term_prepare(state, data, data_size, NULL, NULL) < 0 ||
1777 (res=decoded_size(state->extp, state->extp + state->extsize, 0, NULL)) < 0) {
1778
1779 if (state->exttmp)
1780 erts_free(ERTS_ALC_T_EXT_TERM_DATA, state->extp);
1781 state->extp = NULL;
1782 state->exttmp = 0;
1783 return -1;
1784 }
1785 return res;
1786 }
1787
1788 void
erts_binary2term_abort(ErtsBinary2TermState * state)1789 erts_binary2term_abort(ErtsBinary2TermState *state)
1790 {
1791 binary2term_abort(state);
1792 }
1793
1794 Eterm
erts_binary2term_create(ErtsBinary2TermState * state,ErtsHeapFactory * factory)1795 erts_binary2term_create(ErtsBinary2TermState *state, ErtsHeapFactory* factory)
1796 {
1797 return binary2term_create(NULL,state, factory);
1798 }
1799
b2t_destroy_context(B2TContext * context)1800 static void b2t_destroy_context(B2TContext* context)
1801 {
1802 erts_free_aligned_binary_bytes_extra(context->aligned_alloc,
1803 ERTS_ALC_T_EXT_TERM_DATA);
1804 context->aligned_alloc = NULL;
1805 binary2term_abort(&context->b2ts);
1806 switch (context->state) {
1807 case B2TUncompressChunk:
1808 erl_zlib_inflate_finish(&context->u.uc.stream);
1809 break;
1810 case B2TDecode:
1811 case B2TDecodeList:
1812 case B2TDecodeTuple:
1813 case B2TDecodeString:
1814 case B2TDecodeBinary:
1815 if (context->u.dc.hamt_array.pstart) {
1816 erts_free(context->u.dc.hamt_array.alloc_type,
1817 context->u.dc.hamt_array.pstart);
1818 }
1819 break;
1820 default:;
1821 }
1822 }
1823
b2t_context_destructor(Binary * context_bin)1824 static int b2t_context_destructor(Binary *context_bin)
1825 {
1826 B2TContext* ctx = (B2TContext*) ERTS_MAGIC_BIN_DATA(context_bin);
1827 ASSERT(ERTS_MAGIC_BIN_DESTRUCTOR(context_bin) == b2t_context_destructor);
1828
1829 b2t_destroy_context(ctx);
1830 return 1;
1831 }
1832
1833 static BIF_RETTYPE binary_to_term_int(Process*, Eterm bin, B2TContext*);
1834
1835
binary_to_term_trap_1(BIF_ALIST_1)1836 static BIF_RETTYPE binary_to_term_trap_1(BIF_ALIST_1)
1837 {
1838 Binary *context_bin = erts_magic_ref2bin(BIF_ARG_1);
1839 ASSERT(ERTS_MAGIC_BIN_DESTRUCTOR(context_bin) == b2t_context_destructor);
1840
1841 return binary_to_term_int(BIF_P, THE_NON_VALUE, ERTS_MAGIC_BIN_DATA(context_bin));
1842 }
1843
1844
1845 #define B2T_BYTES_PER_REDUCTION 128
1846 #define B2T_MEMCPY_FACTOR 8
1847
1848 /* Define for testing */
1849 /*#define EXTREME_B2T_TRAPPING 1*/
1850
1851 #ifdef EXTREME_B2T_TRAPPING
b2t_rand(void)1852 static unsigned b2t_rand(void)
1853 {
1854 static unsigned prev = 17;
1855 prev = (prev * 214013 + 2531011);
1856 return prev;
1857 }
1858 #endif
1859
1860
b2t_export_context(Process * p,B2TContext * src)1861 static B2TContext* b2t_export_context(Process* p, B2TContext* src)
1862 {
1863 Binary* context_b = erts_create_magic_binary(sizeof(B2TContext),
1864 b2t_context_destructor);
1865 B2TContext* ctx = ERTS_MAGIC_BIN_DATA(context_b);
1866 Eterm* hp;
1867
1868 ASSERT(is_non_value(src->trap_bin));
1869 sys_memcpy(ctx, src, sizeof(B2TContext));
1870 if (ctx->state >= B2TDecode && ctx->u.dc.next == &src->u.dc.res) {
1871 ctx->u.dc.next = &ctx->u.dc.res;
1872 }
1873 hp = HAlloc(p, ERTS_MAGIC_REF_THING_SIZE);
1874 ctx->trap_bin = erts_mk_magic_ref(&hp, &MSO(p), context_b);
1875 return ctx;
1876 }
1877
binary_to_term_int(Process * p,Eterm bin,B2TContext * ctx)1878 static BIF_RETTYPE binary_to_term_int(Process* p, Eterm bin, B2TContext *ctx)
1879 {
1880 BIF_RETTYPE ret_val;
1881 #ifdef EXTREME_B2T_TRAPPING
1882 SWord initial_reds = 1 + b2t_rand() % 4;
1883 #else
1884 SWord initial_reds = (Uint)(ERTS_BIF_REDS_LEFT(p) * B2T_BYTES_PER_REDUCTION);
1885 #endif
1886 int is_first_call;
1887
1888 if (is_value(bin)) {
1889 /* Setup enough to get started */
1890 is_first_call = 1;
1891 ctx->state = B2TPrepare;
1892 ctx->aligned_alloc = NULL;
1893 } else {
1894 ASSERT(is_value(ctx->trap_bin));
1895 ASSERT(ctx->state != B2TPrepare);
1896 is_first_call = 0;
1897 }
1898 ctx->reds = initial_reds;
1899
1900 do {
1901 switch (ctx->state) {
1902 case B2TPrepare: {
1903 byte* bytes;
1904 Uint bin_size;
1905 bytes = erts_get_aligned_binary_bytes_extra(bin,
1906 &ctx->aligned_alloc,
1907 ERTS_ALC_T_EXT_TERM_DATA,
1908 0);
1909 if (bytes == NULL) {
1910 ctx->b2ts.exttmp = 0;
1911 ctx->state = B2TBadArg;
1912 break;
1913 }
1914 bin_size = binary_size(bin);
1915 if (ctx->aligned_alloc) {
1916 ctx->reds -= bin_size / 8;
1917 }
1918 if (binary2term_prepare(&ctx->b2ts, bytes, bin_size, &ctx, p) < 0) {
1919 ctx->state = B2TBadArg;
1920 }
1921 break;
1922 }
1923 case B2TUncompressChunk: {
1924 uLongf chunk = ctx->reds;
1925 int zret;
1926
1927 if (chunk > ctx->u.uc.dleft)
1928 chunk = ctx->u.uc.dleft;
1929 zret = erl_zlib_inflate_chunk(&ctx->u.uc.stream,
1930 ctx->u.uc.dbytes, &chunk);
1931 ctx->u.uc.dbytes += chunk;
1932 ctx->u.uc.dleft -= chunk;
1933 if (zret == Z_OK && ctx->u.uc.dleft > 0) {
1934 ctx->reds = 0;
1935 }
1936 else if (erl_zlib_inflate_finish(&ctx->u.uc.stream) == Z_OK
1937 && zret == Z_STREAM_END
1938 && ctx->u.uc.dleft == 0) {
1939 ctx->reds -= chunk;
1940 if (ctx->used_bytes) {
1941 ASSERT(ctx->used_bytes > 5 + ctx->u.uc.stream.avail_in);
1942 ctx->used_bytes -= ctx->u.uc.stream.avail_in;
1943 }
1944 ctx->state = B2TSizeInit;
1945 }
1946 else {
1947 ctx->state = B2TBadArg;
1948 }
1949 break;
1950 }
1951 case B2TSizeInit:
1952 ctx->u.sc.ep = NULL;
1953 ctx->state = B2TSize;
1954 /*fall through*/
1955 case B2TSize:
1956 ctx->heap_size = decoded_size(ctx->b2ts.extp,
1957 ctx->b2ts.extp + ctx->b2ts.extsize,
1958 0, ctx);
1959 break;
1960
1961 case B2TDecodeInit:
1962 if (is_non_value(ctx->trap_bin) && ctx->b2ts.extsize > ctx->reds) {
1963 /* dec_term will maybe trap, allocate space for magic bin
1964 before result term to make it easy to trim with HRelease.
1965 */
1966 ctx = b2t_export_context(p, ctx);
1967 }
1968 ctx->u.dc.ep = ctx->b2ts.extp;
1969 ctx->u.dc.res = (Eterm) (UWord) NULL;
1970 ctx->u.dc.next = &ctx->u.dc.res;
1971 erts_factory_proc_prealloc_init(&ctx->u.dc.factory, p, ctx->heap_size);
1972 ctx->u.dc.flat_maps.wstart = NULL;
1973 ctx->u.dc.hamt_array.pstart = NULL;
1974 ctx->state = B2TDecode;
1975 /*fall through*/
1976 case B2TDecode:
1977 case B2TDecodeList:
1978 case B2TDecodeTuple:
1979 case B2TDecodeString:
1980 case B2TDecodeBinary: {
1981 ErtsDistExternal fakedep;
1982 fakedep.flags = ctx->flags;
1983 fakedep.data = NULL;
1984 dec_term(&fakedep, NULL, NULL, NULL, ctx, 0);
1985 break;
1986 }
1987 case B2TDecodeFail:
1988 /*fall through*/
1989 case B2TBadArg:
1990 BUMP_REDS(p, (initial_reds - ctx->reds) / B2T_BYTES_PER_REDUCTION);
1991
1992 ASSERT(ctx->bif == BIF_TRAP_EXPORT(BIF_binary_to_term_1)
1993 || ctx->bif == BIF_TRAP_EXPORT(BIF_binary_to_term_2));
1994
1995 if (is_first_call)
1996 ERTS_BIF_PREP_ERROR(ret_val, p, BADARG);
1997 else {
1998 erts_set_gc_state(p, 1);
1999 if (is_non_value(ctx->arg[1]))
2000 ERTS_BIF_PREP_ERROR_TRAPPED1(ret_val, p, BADARG, ctx->bif,
2001 ctx->arg[0]);
2002 else
2003 ERTS_BIF_PREP_ERROR_TRAPPED2(ret_val, p, BADARG, ctx->bif,
2004 ctx->arg[0], ctx->arg[1]);
2005 }
2006 b2t_destroy_context(ctx);
2007 return ret_val;
2008
2009 case B2TDone:
2010 if (ctx->used_bytes) {
2011 Eterm *hp;
2012 Eterm used;
2013 if (!ctx->b2ts.exttmp) {
2014 ASSERT(ctx->used_bytes == 1);
2015 ctx->used_bytes = (ctx->u.dc.ep - ctx->b2ts.extp
2016 +1); /* VERSION_MAGIC */
2017 }
2018 if (IS_USMALL(0, ctx->used_bytes)) {
2019 hp = erts_produce_heap(&ctx->u.dc.factory, 3, 0);
2020 used = make_small(ctx->used_bytes);
2021 }
2022 else {
2023 hp = erts_produce_heap(&ctx->u.dc.factory, 3+BIG_UINT_HEAP_SIZE, 0);
2024 used = uint_to_big(ctx->used_bytes, hp);
2025 hp += BIG_UINT_HEAP_SIZE;
2026 }
2027 ctx->u.dc.res = TUPLE2(hp, ctx->u.dc.res, used);
2028 }
2029 b2t_destroy_context(ctx);
2030
2031 if (ctx->u.dc.factory.hp > ctx->u.dc.factory.hp_end) {
2032 erts_exit(ERTS_ERROR_EXIT, ":%s, line %d: heap overrun by %d words(s)\n",
2033 __FILE__, __LINE__, ctx->u.dc.factory.hp - ctx->u.dc.factory.hp_end);
2034 }
2035 erts_factory_close(&ctx->u.dc.factory);
2036
2037 if (!is_first_call) {
2038 erts_set_gc_state(p, 1);
2039 }
2040 BUMP_REDS(p, (initial_reds - ctx->reds) / B2T_BYTES_PER_REDUCTION);
2041 ERTS_BIF_PREP_RET(ret_val, ctx->u.dc.res);
2042 return ret_val;
2043
2044 default:
2045 ASSERT(!"Unknown state in binary_to_term");
2046 }
2047 }while (ctx->reds > 0 || ctx->state >= B2TDone);
2048
2049 if (is_non_value(ctx->trap_bin)) {
2050 ctx = b2t_export_context(p, ctx);
2051 ASSERT(is_value(ctx->trap_bin));
2052 }
2053
2054 if (is_first_call) {
2055 erts_set_gc_state(p, 0);
2056 }
2057 BUMP_ALL_REDS(p);
2058
2059 ERTS_BIF_PREP_TRAP1(ret_val, &binary_to_term_trap_export,
2060 p, ctx->trap_bin);
2061
2062 return ret_val;
2063 }
2064
binary_to_term_1(BIF_ALIST_1)2065 BIF_RETTYPE binary_to_term_1(BIF_ALIST_1)
2066 {
2067 B2TContext ctx;
2068
2069 ctx.flags = 0;
2070 ctx.used_bytes = 0;
2071 ctx.trap_bin = THE_NON_VALUE;
2072 ctx.bif = BIF_TRAP_EXPORT(BIF_binary_to_term_1);
2073 ctx.arg[0] = BIF_ARG_1;
2074 ctx.arg[1] = THE_NON_VALUE;
2075 return binary_to_term_int(BIF_P, BIF_ARG_1, &ctx);
2076 }
2077
binary_to_term_2(BIF_ALIST_2)2078 BIF_RETTYPE binary_to_term_2(BIF_ALIST_2)
2079 {
2080 B2TContext ctx;
2081 Eterm opts;
2082 Eterm opt;
2083
2084 ctx.flags = 0;
2085 ctx.used_bytes = 0;
2086 opts = BIF_ARG_2;
2087 while (is_list(opts)) {
2088 opt = CAR(list_val(opts));
2089 if (opt == am_safe) {
2090 ctx.flags |= ERTS_DIST_EXT_BTT_SAFE;
2091 }
2092 else if (opt == am_used) {
2093 ctx.used_bytes = 1;
2094 }
2095 else {
2096 goto error;
2097 }
2098 opts = CDR(list_val(opts));
2099 }
2100
2101 if (is_not_nil(opts))
2102 goto error;
2103
2104 ctx.trap_bin = THE_NON_VALUE;
2105 ctx.bif = BIF_TRAP_EXPORT(BIF_binary_to_term_2);
2106 ctx.arg[0] = BIF_ARG_1;
2107 ctx.arg[1] = BIF_ARG_2;
2108 return binary_to_term_int(BIF_P, BIF_ARG_1, &ctx);
2109
2110 error:
2111 BIF_P->fvalue = am_badopt;
2112 BIF_ERROR(BIF_P, BADARG | EXF_HAS_EXT_INFO);
2113 }
2114
2115 Eterm
external_size_1(BIF_ALIST_1)2116 external_size_1(BIF_ALIST_1)
2117 {
2118 Process* p = BIF_P;
2119 Eterm Term = BIF_ARG_1;
2120 Uint size = 0;
2121
2122 switch (erts_encode_ext_size(Term, &size)) {
2123 case ERTS_EXT_SZ_SYSTEM_LIMIT:
2124 BIF_ERROR(BIF_P, SYSTEM_LIMIT);
2125 case ERTS_EXT_SZ_YIELD:
2126 ERTS_INTERNAL_ERROR("Unexpected yield");
2127 case ERTS_EXT_SZ_OK:
2128 break;
2129 }
2130
2131 if (IS_USMALL(0, size)) {
2132 BIF_RET(make_small(size));
2133 } else {
2134 Eterm* hp = HAlloc(p, BIG_UINT_HEAP_SIZE);
2135 BIF_RET(uint_to_big(size, hp));
2136 }
2137 }
2138
2139 Eterm
external_size_2(BIF_ALIST_2)2140 external_size_2(BIF_ALIST_2)
2141 {
2142 Uint size = 0;
2143 Uint flags = TERM_TO_BINARY_DFLAGS;
2144
2145 while (is_list(BIF_ARG_2)) {
2146 Eterm arg = CAR(list_val(BIF_ARG_2));
2147 Eterm* tp;
2148
2149 if (is_tuple(arg) && *(tp = tuple_val(arg)) == make_arityval(2)) {
2150 if (tp[1] == am_minor_version && is_small(tp[2])) {
2151 switch (signed_val(tp[2])) {
2152 case 0:
2153 flags &= ~DFLAG_NEW_FLOATS;
2154 break;
2155 case 1:
2156 break;
2157 default:
2158 goto error;
2159 }
2160 } else {
2161 goto error;
2162 }
2163 } else {
2164 error:
2165 BIF_ERROR(BIF_P, BADARG);
2166 }
2167 BIF_ARG_2 = CDR(list_val(BIF_ARG_2));
2168 }
2169 if (is_not_nil(BIF_ARG_2)) {
2170 goto error;
2171 }
2172
2173 switch (erts_encode_ext_size_2(BIF_ARG_1, flags, &size)) {
2174 case ERTS_EXT_SZ_SYSTEM_LIMIT:
2175 BIF_ERROR(BIF_P, SYSTEM_LIMIT);
2176 case ERTS_EXT_SZ_YIELD:
2177 ERTS_INTERNAL_ERROR("Unexpected yield");
2178 case ERTS_EXT_SZ_OK:
2179 break;
2180 }
2181
2182 if (IS_USMALL(0, size)) {
2183 BIF_RET(make_small(size));
2184 } else {
2185 Eterm* hp = HAlloc(BIF_P, BIG_UINT_HEAP_SIZE);
2186 BIF_RET(uint_to_big(size, hp));
2187 }
2188 }
2189
2190 static Eterm
erts_term_to_binary_simple(Process * p,Eterm Term,Uint size,int level,Uint64 dflags)2191 erts_term_to_binary_simple(Process* p, Eterm Term, Uint size, int level, Uint64 dflags)
2192 {
2193 Eterm bin;
2194 size_t real_size;
2195 byte* endp;
2196
2197 if (level != 0) {
2198 byte buf[256];
2199 byte* bytes = buf;
2200 byte* out_bytes;
2201 uLongf dest_len;
2202
2203 if (sizeof(buf) < size) {
2204 bytes = erts_alloc(ERTS_ALC_T_TMP, size);
2205 }
2206
2207 if ((endp = enc_term(NULL, Term, bytes, dflags, NULL))
2208 == NULL) {
2209 erts_exit(ERTS_ERROR_EXIT, "%s, line %d: bad term: %x\n",
2210 __FILE__, __LINE__, Term);
2211 }
2212 real_size = endp - bytes;
2213 if (real_size > size) {
2214 erts_exit(ERTS_ERROR_EXIT, "%s, line %d: buffer overflow: %d word(s)\n",
2215 __FILE__, __LINE__, real_size - size);
2216 }
2217
2218 /*
2219 * We don't want to compress if compression actually increases the size.
2220 * Therefore, don't give zlib more out buffer than the size of the
2221 * uncompressed external format (minus the 5 bytes needed for the
2222 * COMPRESSED tag). If zlib returns any error, we'll revert to using
2223 * the original uncompressed external term format.
2224 */
2225
2226 if (real_size < 5) {
2227 dest_len = 0;
2228 } else {
2229 dest_len = real_size - 5;
2230 }
2231 bin = new_binary(p, NULL, real_size+1);
2232 out_bytes = binary_bytes(bin);
2233 out_bytes[0] = VERSION_MAGIC;
2234 if (erl_zlib_compress2(out_bytes+6, &dest_len, bytes, real_size, level) != Z_OK) {
2235 sys_memcpy(out_bytes+1, bytes, real_size);
2236 bin = erts_realloc_binary(bin, real_size+1);
2237 } else {
2238 out_bytes[1] = COMPRESSED;
2239 put_int32(real_size, out_bytes+2);
2240 bin = erts_realloc_binary(bin, dest_len+6);
2241 }
2242 if (bytes != buf) {
2243 erts_free(ERTS_ALC_T_TMP, bytes);
2244 }
2245 return bin;
2246 } else {
2247 byte* bytes;
2248
2249 bin = new_binary(p, (byte *)NULL, size);
2250 bytes = binary_bytes(bin);
2251 bytes[0] = VERSION_MAGIC;
2252 if ((endp = enc_term(NULL, Term, bytes+1, dflags, NULL))
2253 == NULL) {
2254 erts_exit(ERTS_ERROR_EXIT, "%s, line %d: bad term: %x\n",
2255 __FILE__, __LINE__, Term);
2256 }
2257 real_size = endp - bytes;
2258 if (real_size > size) {
2259 erts_exit(ERTS_ERROR_EXIT, "%s, line %d: buffer overflow: %d word(s)\n",
2260 __FILE__, __LINE__, endp - (bytes + size));
2261 }
2262 return erts_realloc_binary(bin, real_size);
2263 }
2264 }
2265
2266 Eterm
erts_term_to_binary(Process * p,Eterm Term,int level,Uint64 flags)2267 erts_term_to_binary(Process* p, Eterm Term, int level, Uint64 flags) {
2268 Uint size = 0;
2269 switch (encode_size_struct_int(NULL, NULL, Term, flags, NULL, &size)) {
2270 case ERTS_EXT_SZ_SYSTEM_LIMIT:
2271 return THE_NON_VALUE;
2272 case ERTS_EXT_SZ_YIELD:
2273 ERTS_INTERNAL_ERROR("Unexpected yield");
2274 case ERTS_EXT_SZ_OK:
2275 break;
2276 }
2277 size++; /* VERSION_MAGIC */;
2278 return erts_term_to_binary_simple(p, Term, size, level, flags);
2279 }
2280
2281 /* Define EXTREME_TTB_TRAPPING for testing in dist.h */
2282
2283 #ifndef EXTREME_TTB_TRAPPING
2284 #define TERM_TO_BINARY_COMPRESS_CHUNK (1 << 18)
2285 #else
2286 #define TERM_TO_BINARY_COMPRESS_CHUNK 10
2287 #endif
2288 #define TERM_TO_BINARY_MEMCPY_FACTOR 8
2289
ttb_context_destructor(Binary * context_bin)2290 static int ttb_context_destructor(Binary *context_bin)
2291 {
2292 TTBContext *context = ERTS_MAGIC_BIN_DATA(context_bin);
2293 if (context->alive) {
2294 context->alive = 0;
2295 switch (context->state) {
2296 case TTBSize:
2297 DESTROY_SAVED_WSTACK(&context->s.sc.wstack);
2298 break;
2299 case TTBEncode:
2300 DESTROY_SAVED_WSTACK(&context->s.ec.wstack);
2301 if (context->s.ec.result_bin != NULL) { /* Set to NULL if ever made alive! */
2302 ASSERT(erts_refc_read(&(context->s.ec.result_bin->intern.refc),1));
2303 erts_bin_free(context->s.ec.result_bin);
2304 context->s.ec.result_bin = NULL;
2305 }
2306 if (context->s.ec.map_array)
2307 erts_free(ERTS_ALC_T_T2B_DETERMINISTIC, context->s.ec.map_array);
2308 if (context->s.ec.ycf_yield_state)
2309 erts_qsort_ycf_gen_destroy(context->s.ec.ycf_yield_state);
2310 if (context->s.ec.iov)
2311 erts_free(ERTS_ALC_T_T2B_VEC, context->s.ec.iov);
2312 break;
2313 case TTBCompress:
2314 erl_zlib_deflate_finish(&(context->s.cc.stream));
2315
2316 if (context->s.cc.destination_bin != NULL) { /* Set to NULL if ever made alive! */
2317 ASSERT(erts_refc_read(&(context->s.cc.destination_bin->intern.refc),1));
2318 erts_bin_free(context->s.cc.destination_bin);
2319 context->s.cc.destination_bin = NULL;
2320 }
2321
2322 if (context->s.cc.result_bin != NULL) { /* Set to NULL if ever made alive! */
2323 ASSERT(erts_refc_read(&(context->s.cc.result_bin->intern.refc),1));
2324 erts_bin_free(context->s.cc.result_bin);
2325 context->s.cc.result_bin = NULL;
2326 }
2327 break;
2328 }
2329 }
2330 return 1;
2331 }
2332
2333 Uint
erts_ttb_iov_size(int use_termv,Sint vlen,Uint fragments)2334 erts_ttb_iov_size(int use_termv, Sint vlen, Uint fragments)
2335 {
2336 Uint sz;
2337 ASSERT(vlen > 0);
2338 ASSERT(fragments > 0);
2339 sz = sizeof(SysIOVec)*vlen;
2340 sz += sizeof(ErlDrvBinary *)*vlen;
2341 if (use_termv)
2342 sz += sizeof(Eterm)*vlen;
2343 sz += sizeof(ErlIOVec *)*fragments;
2344 sz += sizeof(ErlIOVec)*fragments;
2345 ASSERT(sz % sizeof(void*) == 0);
2346 return sz;
2347 }
2348
2349 void
erts_ttb_iov_init(TTBEncodeContext * ctx,int use_termv,char * ptr,Sint vlen,Uint fragments,Uint fragment_size)2350 erts_ttb_iov_init(TTBEncodeContext *ctx, int use_termv, char *ptr,
2351 Sint vlen, Uint fragments, Uint fragment_size)
2352 {
2353 ctx->vlen = 0;
2354 ctx->size = 0;
2355
2356 ctx->iov = (SysIOVec *) ptr;
2357 ptr += sizeof(SysIOVec)*vlen;
2358 ASSERT(((UWord) ptr) % sizeof(void *) == 0);
2359
2360 ctx->binv = (ErlDrvBinary **) ptr;
2361 ptr += sizeof(ErlDrvBinary *)*vlen;
2362
2363 if (!use_termv)
2364 ctx->termv = NULL;
2365 else {
2366 ctx->termv = (Eterm *) ptr;
2367 ptr += sizeof(Eterm)*vlen;
2368 }
2369
2370 ctx->fragment_eiovs = (ErlIOVec *) ptr;
2371 ptr += sizeof(ErlIOVec)*fragments;
2372 ASSERT(((UWord) ptr) % sizeof(void *) == 0);
2373
2374 ctx->frag_ix = -1;
2375 ctx->fragment_size = fragment_size;
2376
2377 #ifdef DEBUG
2378 ctx->cptr = NULL;
2379 ctx->debug_fragments = fragments;
2380 ctx->debug_vlen = vlen;
2381 #endif
2382 }
2383
erts_term_to_binary_int(Process * p,Sint bif_ix,Eterm Term,Eterm opts,int level,Uint64 dflags,Binary * context_b,int iovec,Uint fragment_size)2384 static Eterm erts_term_to_binary_int(Process* p, Sint bif_ix, Eterm Term, Eterm opts,
2385 int level, Uint64 dflags, Binary *context_b,
2386 int iovec, Uint fragment_size)
2387 {
2388 Eterm *hp;
2389 Eterm res;
2390 Eterm c_term;
2391 #ifndef EXTREME_TTB_TRAPPING
2392 Sint reds = (Sint) (ERTS_BIF_REDS_LEFT(p) * TERM_TO_BINARY_LOOP_FACTOR);
2393 #else
2394 Sint reds = 20; /* For testing */
2395 #endif
2396 Sint initial_reds = reds;
2397 TTBContext c_buff;
2398 TTBContext *context = &c_buff;
2399
2400 ASSERT(bif_ix > 0 && IS_USMALL(!0, bif_ix));
2401 ASSERT(bif_ix == BIF_term_to_binary_1 || bif_ix == BIF_term_to_binary_2
2402 || bif_ix == BIF_term_to_iovec_1 || bif_ix == BIF_term_to_iovec_2);
2403
2404 #define EXPORT_CONTEXT() \
2405 do { \
2406 if (context_b == NULL) { \
2407 context_b = erts_create_magic_binary(sizeof(TTBContext), \
2408 ttb_context_destructor);\
2409 context = ERTS_MAGIC_BIN_DATA(context_b); \
2410 sys_memcpy(context,&c_buff,sizeof(TTBContext)); \
2411 } \
2412 } while (0)
2413
2414 #define RETURN_STATE() \
2415 do { \
2416 hp = HAlloc(p, ERTS_MAGIC_REF_THING_SIZE + 1 + 4); \
2417 c_term = erts_mk_magic_ref(&hp, &MSO(p), context_b); \
2418 res = TUPLE4(hp, Term, opts, c_term, make_small(bif_ix)); \
2419 BUMP_ALL_REDS(p); \
2420 return res; \
2421 } while (0);
2422
2423 if (context_b == NULL) {
2424 /* Setup enough to get started */
2425 context->state = TTBSize;
2426 context->alive = 1;
2427 ERTS_INIT_TTBSizeContext(&context->s.sc, dflags);
2428 context->s.sc.level = level;
2429 context->s.sc.fragment_size = fragment_size;
2430 if (!level) {
2431 context->s.sc.vlen = iovec ? 0 : -1;
2432 context->s.sc.iovec = iovec;
2433 }
2434 else {
2435 context->s.sc.vlen = -1;
2436 context->s.sc.iovec = 0;
2437 }
2438 } else {
2439 context = ERTS_MAGIC_BIN_DATA(context_b);
2440 }
2441
2442 /* Initialization done, now we will go through the states */
2443 for (;;) {
2444 switch (context->state) {
2445 case TTBSize:
2446 {
2447 Uint size, fragments = 1;
2448 Binary *result_bin;
2449 int level = context->s.sc.level;
2450 Sint vlen;
2451 iovec = context->s.sc.iovec;
2452 fragment_size = context->s.sc.fragment_size;
2453 size = 1; /* VERSION_MAGIC */
2454 switch (encode_size_struct_int(&context->s.sc, NULL, Term,
2455 context->s.sc.dflags, &reds,
2456 &size)) {
2457 case ERTS_EXT_SZ_SYSTEM_LIMIT:
2458 BUMP_REDS(p, (initial_reds - reds) / TERM_TO_BINARY_LOOP_FACTOR);
2459 return THE_NON_VALUE;
2460 case ERTS_EXT_SZ_YIELD:
2461 EXPORT_CONTEXT();
2462 /* Same state */
2463 RETURN_STATE();
2464 case ERTS_EXT_SZ_OK:
2465 break;
2466 }
2467 /* Move these to next state */
2468 dflags = context->s.sc.dflags;
2469 vlen = context->s.sc.vlen;
2470 if (vlen >= 0) {
2471 Uint total_size = size + context->s.sc.extra_size;
2472 fragments = (total_size - 1)/fragment_size + 1;
2473 vlen += 3*fragments;
2474 ASSERT(vlen);
2475 }
2476 else if (size <= ERL_ONHEAP_BIN_LIMIT) {
2477 /* Finish in one go */
2478 res = erts_term_to_binary_simple(p, Term, size,
2479 level, dflags);
2480 if (iovec) {
2481 Eterm *hp = HAlloc(p, 2);
2482 res = CONS(hp, res, NIL);
2483 }
2484 BUMP_REDS(p, 1);
2485 return res;
2486 }
2487
2488 result_bin = erts_bin_nrml_alloc(size);
2489 result_bin->orig_bytes[0] = (byte)VERSION_MAGIC;
2490 /* Next state immediately, no need to export context */
2491 context->state = TTBEncode;
2492 ERTS_INIT_TTBEncodeContext(&context->s.ec, dflags);
2493 context->s.ec.level = level;
2494 context->s.ec.result_bin = result_bin;
2495 context->s.ec.iovec = iovec;
2496 if (vlen >= 0) {
2497 Uint sz = erts_ttb_iov_size(!0, vlen, fragments);
2498 char *ptr = (char *) erts_alloc(ERTS_ALC_T_T2B_VEC, sz);
2499 erts_ttb_iov_init(&context->s.ec, !0, ptr, vlen,
2500 fragments, fragment_size);
2501 context->s.ec.cptr = (byte *) &result_bin->orig_bytes[0];
2502 }
2503 break;
2504 }
2505 case TTBEncode:
2506 {
2507 byte *endp, *tmp;
2508 byte *bytes = (byte *) context->s.ec.result_bin->orig_bytes;
2509 size_t real_size;
2510 Binary *result_bin;
2511 Sint realloc_offset;
2512 Uint fragments;
2513
2514 dflags = context->s.ec.dflags;
2515 if (enc_term_int(&context->s.ec, NULL,Term, bytes+1, dflags,
2516 NULL, &reds, &endp) < 0) {
2517 EXPORT_CONTEXT();
2518 RETURN_STATE();
2519 }
2520 real_size = endp - bytes;
2521 tmp = (byte *) &context->s.ec.result_bin->orig_bytes[0];
2522 result_bin = erts_bin_realloc(context->s.ec.result_bin,real_size);
2523 realloc_offset = (byte *) &result_bin->orig_bytes[0] - tmp;
2524 level = context->s.ec.level;
2525 BUMP_REDS(p, (initial_reds - reds) / TERM_TO_BINARY_LOOP_FACTOR);
2526 if (level == 0 || real_size < 6) { /* We are done */
2527 Sint cbin_refc_diff;
2528 Eterm result, rb_term, *hp, *hp_end;
2529 Uint hsz;
2530 int ix;
2531 SysIOVec *iov;
2532 Eterm *termv;
2533 return_normal:
2534 fragments = context->s.ec.frag_ix + 1;
2535 context->s.ec.result_bin = NULL;
2536 context->alive = 0;
2537 if (context_b && erts_refc_read(&context_b->intern.refc,0) == 0) {
2538 erts_bin_free(context_b);
2539 }
2540 if (!context->s.ec.iov) {
2541 hsz = PROC_BIN_SIZE + (iovec ? 2 : 0);
2542 hp = HAlloc(p, hsz);
2543 result = erts_build_proc_bin(&MSO(p), hp, result_bin);
2544 if (iovec) {
2545 hp += PROC_BIN_SIZE;
2546 result = CONS(hp, result, NIL);
2547 }
2548 return result;
2549 }
2550 iovec = context->s.ec.iovec;
2551 ASSERT(iovec);
2552 iov = context->s.ec.iov;
2553 termv = context->s.ec.termv;
2554 ASSERT(context->s.ec.vlen <= context->s.ec.debug_vlen);
2555 ASSERT(fragments <= context->s.ec.debug_fragments);
2556 /* first two elements should be unused */
2557 ASSERT(context->s.ec.vlen >= 3*fragments);
2558 ASSERT(!iov[0].iov_base && !iov[0].iov_len);
2559 ASSERT(!iov[1].iov_base && !iov[1].iov_len);
2560
2561 hsz = (2 /* cons */
2562 + (PROC_BIN_SIZE > ERL_SUB_BIN_SIZE
2563 ? PROC_BIN_SIZE
2564 : ERL_SUB_BIN_SIZE)); /* max size per vec */
2565 hsz *= context->s.ec.vlen - 2*fragments; /* number of vecs */
2566 hp = HAlloc(p, hsz);
2567 hp_end = hp + hsz;
2568 rb_term = THE_NON_VALUE;
2569 result = NIL;
2570 ASSERT(erts_refc_read(&result_bin->intern.refc, 1) == 1);
2571 cbin_refc_diff = -1;
2572 for (ix = context->s.ec.vlen - 1; ix > 1; ix--) {
2573 Eterm bin_term, pb_term;
2574 Uint pb_size;
2575 ProcBin *pb;
2576 SysIOVec *iovp = &iov[ix];
2577 if (!iovp->iov_base)
2578 continue; /* empty slot for header */
2579 pb_term = termv[ix];
2580 if (is_value(pb_term)) {
2581 pb_size = binary_size(pb_term);
2582 pb = (ProcBin *) binary_val(pb_term);
2583 }
2584 else {
2585 iovp->iov_base = (void *) (((byte *) iovp->iov_base)
2586 + realloc_offset);
2587 pb_size = result_bin->orig_size;
2588 if (is_non_value(rb_term))
2589 pb = NULL;
2590 else {
2591 pb = (ProcBin *) binary_val(rb_term);
2592 pb_term = rb_term;
2593 }
2594 }
2595 /*
2596 * We intentionally avoid using sub binaries
2597 * since the GC might convert those to heap
2598 * binaries and by this ruin the nice preparation
2599 * for usage of this data as I/O vector in
2600 * nifs/drivers.
2601 */
2602 if (is_value(pb_term) && iovp->iov_len == pb_size)
2603 bin_term = pb_term;
2604 else {
2605 Binary *bin;
2606 if (is_value(pb_term)) {
2607 bin = ((ProcBin *) binary_val(pb_term))->val;
2608 erts_refc_inc(&bin->intern.refc, 2);
2609 }
2610 else {
2611 bin = result_bin;
2612 cbin_refc_diff++;
2613 }
2614 pb = (ProcBin *) (char *) hp;
2615 hp += PROC_BIN_SIZE;
2616 pb->thing_word = HEADER_PROC_BIN;
2617 pb->size = (Uint) iovp->iov_len;
2618 pb->next = MSO(p).first;
2619 MSO(p).first = (struct erl_off_heap_header*) pb;
2620 pb->val = bin;
2621 pb->bytes = (byte*) iovp->iov_base;
2622 pb->flags = 0;
2623 OH_OVERHEAD(&MSO(p), pb->size / sizeof(Eterm));
2624 bin_term = make_binary(pb);
2625 }
2626 result = CONS(hp, bin_term, result);
2627 hp += 2;
2628 }
2629 ASSERT(hp <= hp_end);
2630 HRelease(p, hp_end, hp);
2631 context->s.ec.iov = NULL;
2632 erts_free(ERTS_ALC_T_T2B_VEC, iov);
2633 if (cbin_refc_diff) {
2634 ASSERT(cbin_refc_diff >= -1);
2635 if (cbin_refc_diff > 0)
2636 erts_refc_add(&result_bin->intern.refc,
2637 cbin_refc_diff, 1);
2638 else
2639 erts_bin_free(result_bin);
2640 }
2641 return result;
2642 }
2643 /* Continue with compression... */
2644 /* To make absolutely sure that zlib does not barf on a reallocated context,
2645 we make sure it's "exported" before doing anything compession-like */
2646 EXPORT_CONTEXT();
2647 bytes = (byte *) result_bin->orig_bytes; /* result_bin is reallocated */
2648 if (erl_zlib_deflate_start(&(context->s.cc.stream),bytes+1,real_size-1,level)
2649 != Z_OK) {
2650 goto return_normal;
2651 }
2652 context->state = TTBCompress;
2653 context->s.cc.real_size = real_size;
2654 context->s.cc.result_bin = result_bin;
2655
2656 result_bin = erts_bin_nrml_alloc(real_size);
2657 result_bin->orig_bytes[0] = (byte) VERSION_MAGIC;
2658
2659 context->s.cc.destination_bin = result_bin;
2660 context->s.cc.dest_len = 0;
2661 context->s.cc.dbytes = (byte *) result_bin->orig_bytes+6;
2662 break;
2663 }
2664 case TTBCompress:
2665 {
2666 uLongf tot_dest_len = context->s.cc.real_size - 6;
2667 uLongf left = (tot_dest_len - context->s.cc.dest_len);
2668 uLongf this_time = (left > TERM_TO_BINARY_COMPRESS_CHUNK) ?
2669 TERM_TO_BINARY_COMPRESS_CHUNK :
2670 left;
2671 Binary *result_bin;
2672 ProcBin *pb;
2673 Uint max = (ERTS_BIF_REDS_LEFT(p) * TERM_TO_BINARY_COMPRESS_CHUNK) / CONTEXT_REDS;
2674
2675 if (max < this_time) {
2676 this_time = max + 1; /* do not set this_time to 0 */
2677 }
2678
2679 res = erl_zlib_deflate_chunk(&(context->s.cc.stream), context->s.cc.dbytes, &this_time);
2680 context->s.cc.dbytes += this_time;
2681 context->s.cc.dest_len += this_time;
2682 switch (res) {
2683 case Z_OK:
2684 if (context->s.cc.dest_len >= tot_dest_len) {
2685 goto no_use_compressing;
2686 }
2687 RETURN_STATE();
2688 case Z_STREAM_END:
2689 {
2690 byte *dbytes = (byte *) context->s.cc.destination_bin->orig_bytes + 1;
2691
2692 dbytes[0] = COMPRESSED;
2693 put_int32(context->s.cc.real_size-1,dbytes+1);
2694 erl_zlib_deflate_finish(&(context->s.cc.stream));
2695 result_bin = erts_bin_realloc(context->s.cc.destination_bin,
2696 context->s.cc.dest_len+6);
2697 context->s.cc.destination_bin = NULL;
2698 ASSERT(erts_refc_read(&result_bin->intern.refc, 1));
2699 erts_bin_free(context->s.cc.result_bin);
2700 context->s.cc.result_bin = NULL;
2701 context->alive = 0;
2702 BUMP_REDS(p, (this_time * CONTEXT_REDS) / TERM_TO_BINARY_COMPRESS_CHUNK);
2703 if (context_b && erts_refc_read(&context_b->intern.refc,0) == 0) {
2704 erts_bin_free(context_b);
2705 }
2706 return erts_build_proc_bin(&MSO(p),
2707 HAlloc(p, PROC_BIN_SIZE),
2708 result_bin);
2709 }
2710 default: /* Compression error, revert to uncompressed binary (still in
2711 context) */
2712 no_use_compressing:
2713 result_bin = context->s.cc.result_bin;
2714 context->s.cc.result_bin = NULL;
2715 pb = (ProcBin *) HAlloc(p, PROC_BIN_SIZE);
2716 pb->thing_word = HEADER_PROC_BIN;
2717 pb->size = context->s.cc.real_size;
2718 pb->next = MSO(p).first;
2719 MSO(p).first = (struct erl_off_heap_header*)pb;
2720 pb->val = result_bin;
2721 pb->bytes = (byte*) result_bin->orig_bytes;
2722 pb->flags = 0;
2723 OH_OVERHEAD(&(MSO(p)), pb->size / sizeof(Eterm));
2724 ASSERT(erts_refc_read(&result_bin->intern.refc, 1));
2725 erl_zlib_deflate_finish(&(context->s.cc.stream));
2726 erts_bin_free(context->s.cc.destination_bin);
2727 context->s.cc.destination_bin = NULL;
2728 context->alive = 0;
2729 BUMP_REDS(p, (this_time * CONTEXT_REDS) / TERM_TO_BINARY_COMPRESS_CHUNK);
2730 if (context_b && erts_refc_read(&context_b->intern.refc,0) == 0) {
2731 erts_bin_free(context_b);
2732 }
2733 return make_binary(pb);
2734 }
2735 }
2736 }
2737 }
2738 #undef EXPORT_CONTEXT
2739 #undef RETURN_STATE
2740 }
2741
2742
2743
2744
2745
2746
2747
2748
2749 /*
2750 * This function fills ext with the external format of atom.
2751 * If it's an old atom we just supply an index, otherwise
2752 * we insert the index _and_ the entire atom. This way the receiving side
2753 * does not have to perform an hash on the etom to locate it, and
2754 * we save a lot of space on the wire.
2755 */
2756
2757 static byte*
enc_atom(ErtsAtomCacheMap * acmp,Eterm atom,byte * ep,Uint64 dflags)2758 enc_atom(ErtsAtomCacheMap *acmp, Eterm atom, byte *ep, Uint64 dflags)
2759 {
2760 int iix;
2761 int len;
2762 const int utf8_atoms = (int) (dflags & DFLAG_UTF8_ATOMS);
2763
2764 ASSERT(is_atom(atom));
2765
2766 if (dflags & DFLAG_ETS_COMPRESSED) {
2767 Uint aval = atom_val(atom);
2768 ASSERT(aval < (1<<24));
2769 if (aval >= (1 << 16)) {
2770 *ep++ = ATOM_INTERNAL_REF3;
2771 put_int24(aval, ep);
2772 ep += 3;
2773 }
2774 else {
2775 *ep++ = ATOM_INTERNAL_REF2;
2776 put_int16(aval, ep);
2777 ep += 2;
2778 }
2779 return ep;
2780 }
2781
2782 /*
2783 * term_to_binary/1,2 and the initial distribution message
2784 * don't use the cache.
2785 */
2786
2787 iix = get_iix_acache_map(acmp, atom, dflags);
2788 if (iix < 0) {
2789 Atom *a = atom_tab(atom_val(atom));
2790 len = a->len;
2791 if (utf8_atoms || a->latin1_chars < 0) {
2792 if (len > 255) {
2793 *ep++ = ATOM_UTF8_EXT;
2794 put_int16(len, ep);
2795 ep += 2;
2796 }
2797 else {
2798 *ep++ = SMALL_ATOM_UTF8_EXT;
2799 put_int8(len, ep);
2800 ep += 1;
2801 }
2802 sys_memcpy((char *) ep, (char *) a->name, len);
2803 }
2804 else {
2805 if (a->latin1_chars <= 255 && (dflags & DFLAG_SMALL_ATOM_TAGS)) {
2806 *ep++ = SMALL_ATOM_EXT;
2807 if (len == a->latin1_chars) {
2808 sys_memcpy(ep+1, a->name, len);
2809 }
2810 else {
2811 len = erts_utf8_to_latin1(ep+1, a->name, len);
2812 ASSERT(len == a->latin1_chars);
2813 }
2814 put_int8(len, ep);
2815 ep++;
2816 }
2817 else {
2818 *ep++ = ATOM_EXT;
2819 if (len == a->latin1_chars) {
2820 sys_memcpy(ep+2, a->name, len);
2821 }
2822 else {
2823 len = erts_utf8_to_latin1(ep+2, a->name, len);
2824 ASSERT(len == a->latin1_chars);
2825 }
2826 put_int16(len, ep);
2827 ep += 2;
2828 }
2829 }
2830 ep += len;
2831 return ep;
2832 }
2833
2834 /* The atom is referenced in the cache. */
2835 *ep++ = ATOM_CACHE_REF;
2836 put_int8(iix, ep);
2837 ep++;
2838 return ep;
2839 }
2840
2841 /*
2842 * We use this atom as sysname in local pid/port/refs
2843 * for the ETS compressed format
2844 *
2845 */
2846 #define INTERNAL_LOCAL_SYSNAME am_ErtsSecretAtom
2847
2848 static byte*
enc_pid(ErtsAtomCacheMap * acmp,Eterm pid,byte * ep,Uint64 dflags)2849 enc_pid(ErtsAtomCacheMap *acmp, Eterm pid, byte* ep, Uint64 dflags)
2850 {
2851 Uint on, os;
2852 Eterm sysname = ((is_internal_pid(pid) && (dflags & DFLAG_ETS_COMPRESSED))
2853 ? INTERNAL_LOCAL_SYSNAME : pid_node_name(pid));
2854 Uint32 creation = pid_creation(pid);
2855
2856 *ep++ = NEW_PID_EXT;
2857
2858 ep = enc_atom(acmp, sysname, ep, dflags);
2859
2860 if (is_internal_pid(pid)) {
2861 on = internal_pid_number(pid);
2862 os = internal_pid_serial(pid);
2863 }
2864 else {
2865 on = external_pid_number(pid);
2866 os = external_pid_serial(pid);
2867 }
2868
2869 put_int32(on, ep);
2870 ep += 4;
2871 put_int32(os, ep);
2872 ep += 4;
2873 put_int32(creation, ep);
2874 ep += 4;
2875 return ep;
2876 }
2877
2878 /* Expect an atom in plain text or cached */
2879 static const byte*
dec_atom(ErtsDistExternal * edep,const byte * ep,Eterm * objp)2880 dec_atom(ErtsDistExternal *edep, const byte* ep, Eterm* objp)
2881 {
2882 Uint len;
2883 int n;
2884 ErtsAtomEncoding char_enc;
2885
2886 switch (*ep++) {
2887 case ATOM_CACHE_REF:
2888 if (!(edep && (edep->flags & ERTS_DIST_EXT_ATOM_TRANS_TAB)))
2889 goto error;
2890 n = get_int8(ep);
2891 ep++;
2892 if (n >= edep->attab.size)
2893 goto error;
2894 ASSERT(is_atom(edep->attab.atom[n]));
2895 *objp = edep->attab.atom[n];
2896 break;
2897 case ATOM_EXT:
2898 len = get_int16(ep),
2899 ep += 2;
2900 char_enc = ERTS_ATOM_ENC_LATIN1;
2901 goto dec_atom_common;
2902 case SMALL_ATOM_EXT:
2903 len = get_int8(ep);
2904 ep++;
2905 char_enc = ERTS_ATOM_ENC_LATIN1;
2906 goto dec_atom_common;
2907 case ATOM_UTF8_EXT:
2908 len = get_int16(ep),
2909 ep += 2;
2910 char_enc = ERTS_ATOM_ENC_UTF8;
2911 goto dec_atom_common;
2912 case SMALL_ATOM_UTF8_EXT:
2913 len = get_int8(ep),
2914 ep++;
2915 char_enc = ERTS_ATOM_ENC_UTF8;
2916 dec_atom_common:
2917 if (edep && (edep->flags & ERTS_DIST_EXT_BTT_SAFE)) {
2918 if (!erts_atom_get((char*)ep, len, objp, char_enc)) {
2919 goto error;
2920 }
2921 } else {
2922 Eterm atom = erts_atom_put(ep, len, char_enc, 0);
2923 if (is_non_value(atom))
2924 goto error;
2925 *objp = atom;
2926 }
2927 ep += len;
2928 break;
2929 case ATOM_INTERNAL_REF2:
2930 n = get_int16(ep);
2931 ep += 2;
2932 if (n >= atom_table_size()) {
2933 goto error;
2934 }
2935 *objp = make_atom(n);
2936 break;
2937 case ATOM_INTERNAL_REF3:
2938 n = get_int24(ep);
2939 ep += 3;
2940 if (n >= atom_table_size()) {
2941 goto error;
2942 }
2943 *objp = make_atom(n);
2944 break;
2945
2946 default:
2947 error:
2948 *objp = NIL; /* Don't leave a hole in the heap */
2949 return NULL;
2950 }
2951 return ep;
2952 }
2953
dec_is_this_node(Eterm sysname,Uint32 creation)2954 static ERTS_INLINE int dec_is_this_node(Eterm sysname, Uint32 creation)
2955 {
2956 return (sysname == INTERNAL_LOCAL_SYSNAME
2957 ||
2958 (sysname == erts_this_node->sysname
2959 && (creation == erts_this_node->creation
2960 || creation == ORIG_CREATION)));
2961 }
2962
2963
dec_get_node(Eterm sysname,Uint32 creation,Eterm book)2964 static ERTS_INLINE ErlNode* dec_get_node(Eterm sysname, Uint32 creation, Eterm book)
2965 {
2966 if (dec_is_this_node(sysname, creation))
2967 return erts_this_node;
2968 else
2969 return erts_find_or_insert_node(sysname,creation,book);
2970 }
2971
2972 static const byte*
dec_pid(ErtsDistExternal * edep,ErtsHeapFactory * factory,const byte * ep,Eterm * objp,byte tag)2973 dec_pid(ErtsDistExternal *edep, ErtsHeapFactory* factory, const byte* ep,
2974 Eterm* objp, byte tag)
2975 {
2976 Eterm sysname;
2977 Uint data;
2978 Uint num;
2979 Uint ser;
2980 Uint32 cre;
2981
2982 *objp = NIL; /* In case we fail, don't leave a hole in the heap */
2983
2984 /* eat first atom */
2985 if ((ep = dec_atom(edep, ep, &sysname)) == NULL)
2986 return NULL;
2987 num = get_uint32(ep);
2988 ep += 4;
2989 ser = get_uint32(ep);
2990 ep += 4;
2991 if (tag == PID_EXT) {
2992 cre = get_int8(ep);
2993 ep += 1;
2994 if (!is_tiny_creation(cre)) {
2995 return NULL;
2996 }
2997 } else {
2998 ASSERT(tag == NEW_PID_EXT);
2999 cre = get_uint32(ep);
3000 ep += 4;
3001 }
3002
3003 /*
3004 * We are careful to create the node entry only after all
3005 * validity tests are done.
3006 */
3007 if (dec_is_this_node(sysname, cre)) {
3008 if (num > ERTS_MAX_INTERNAL_PID_NUMBER ||
3009 ser > ERTS_MAX_INTERNAL_PID_SERIAL) {
3010 return NULL;
3011 }
3012
3013 data = make_pid_data(ser, num);
3014 *objp = make_internal_pid(data);
3015 } else {
3016 ExternalThing *etp = (ExternalThing *) factory->hp;
3017 factory->hp += EXTERNAL_PID_HEAP_SIZE;
3018
3019 etp->header = make_external_pid_header();
3020 etp->next = factory->off_heap->first;
3021 etp->node = erts_find_or_insert_node(sysname, cre, make_boxed(&etp->header));
3022 etp->data.pid.num = num;
3023 etp->data.pid.ser = ser;
3024
3025 factory->off_heap->first = (struct erl_off_heap_header*) etp;
3026 *objp = make_external_pid(etp);
3027 }
3028 return ep;
3029 }
3030
3031
3032 #define ENC_TERM ((Eterm) 0)
3033 #define ENC_ONE_CONS ((Eterm) 1)
3034 #define ENC_PATCH_FUN_SIZE ((Eterm) 2)
3035 #define ENC_BIN_COPY ((Eterm) 3)
3036 #define ENC_MAP_PAIR ((Eterm) 4)
3037 #define ENC_HASHMAP_NODE ((Eterm) 5)
3038 #define ENC_STORE_MAP_ELEMENT ((Eterm) 6)
3039 #define ENC_START_SORTING_MAP ((Eterm) 7)
3040 #define ENC_CONTINUE_SORTING_MAP ((Eterm) 8)
3041 #define ENC_PUSH_SORTED_MAP ((Eterm) 9)
3042 #define ENC_LAST_ARRAY_ELEMENT ((Eterm) 10)
3043
alloc_map_array(Uint size)3044 static Eterm* alloc_map_array(Uint size)
3045 {
3046 return (Eterm *) erts_alloc(ERTS_ALC_T_T2B_DETERMINISTIC,
3047 size * 2 * sizeof(Eterm));
3048 }
3049
map_key_compare(Eterm * a,Eterm * b)3050 static int map_key_compare(Eterm *a, Eterm *b)
3051 {
3052 Sint c = CMP_TERM(*a, *b);
3053 if (c < 0) {
3054 return -1;
3055 } else if (c > 0) {
3056 return 1;
3057 } else {
3058 return 0;
3059 }
3060 }
3061
3062 static void*
ycf_yield_alloc(size_t size,void * context)3063 ycf_yield_alloc(size_t size, void* context)
3064 {
3065 (void) context;
3066 return (void *) erts_alloc(ERTS_ALC_T_T2B_DETERMINISTIC, size);
3067 }
3068
3069 static void
ycf_yield_free(void * block,void * context)3070 ycf_yield_free(void* block, void* context)
3071 {
3072 (void) context;
3073 erts_free(ERTS_ALC_T_T2B_DETERMINISTIC, block);
3074 }
3075
3076 static byte*
enc_term(ErtsAtomCacheMap * acmp,Eterm obj,byte * ep,Uint64 dflags,struct erl_off_heap_header ** off_heap)3077 enc_term(ErtsAtomCacheMap *acmp, Eterm obj, byte* ep, Uint64 dflags,
3078 struct erl_off_heap_header** off_heap)
3079 {
3080 byte *res;
3081 (void) enc_term_int(NULL, acmp, obj, ep, dflags, off_heap, NULL, &res);
3082 return res;
3083 }
3084
3085 static int
enc_term_int(TTBEncodeContext * ctx,ErtsAtomCacheMap * acmp,Eterm obj,byte * ep,Uint64 dflags,struct erl_off_heap_header ** off_heap,Sint * reds,byte ** res)3086 enc_term_int(TTBEncodeContext* ctx, ErtsAtomCacheMap *acmp, Eterm obj, byte* ep,
3087 Uint64 dflags,
3088 struct erl_off_heap_header** off_heap, Sint *reds, byte **res)
3089 {
3090 DECLARE_WSTACK(s);
3091 Uint n;
3092 Uint i;
3093 Uint j;
3094 Uint* ptr;
3095 Eterm val;
3096 FloatDef f;
3097 register Sint r = 0;
3098 int use_iov = 0;
3099
3100 /* The following variables are only used during encoding of
3101 * a map when the `deterministic` option is active. */
3102 Eterm* map_array = NULL;
3103 Eterm* next_map_element = NULL;
3104
3105 if (ctx) {
3106 WSTACK_CHANGE_ALLOCATOR(s, ERTS_ALC_T_SAVED_ESTACK);
3107 r = *reds;
3108 use_iov = !!ctx->iov;
3109
3110 if (ctx->wstack.wstart) { /* restore saved stacks and byte pointer */
3111 WSTACK_RESTORE(s, &ctx->wstack);
3112 ep = ctx->ep;
3113 obj = ctx->obj;
3114 map_array = ctx->map_array;
3115 next_map_element = ctx->next_map_element;
3116 if (is_non_value(obj)) {
3117 goto outer_loop;
3118 }
3119 }
3120 }
3121
3122 goto L_jump_start;
3123
3124 outer_loop:
3125 while (!WSTACK_ISEMPTY(s)) {
3126 obj = WSTACK_POP(s);
3127
3128 switch (val = WSTACK_POP(s)) {
3129 case ENC_TERM:
3130 break;
3131 case ENC_ONE_CONS:
3132 encode_one_cons:
3133 {
3134 Eterm* cons = list_val(obj);
3135 Eterm tl;
3136 Uint len_cnt = WSTACK_POP(s);
3137
3138 obj = CAR(cons);
3139 tl = CDR(cons);
3140 if (is_list(tl)) {
3141 len_cnt++;
3142 WSTACK_PUSH3(s, len_cnt, ENC_ONE_CONS, tl);
3143 }
3144 else {
3145 byte* list_lenp = (byte*) WSTACK_POP(s);
3146 ASSERT(list_lenp[-1] == LIST_EXT);
3147 put_int32(len_cnt, list_lenp);
3148
3149 WSTACK_PUSH2(s, ENC_TERM, tl);
3150 }
3151 }
3152 break;
3153 case ENC_PATCH_FUN_SIZE:
3154 {
3155 byte* size_p = (byte *) obj;
3156 Sint32 sz = ep - size_p;
3157 put_int32(sz, size_p);
3158 }
3159 goto outer_loop;
3160 case ENC_BIN_COPY: {
3161 Uint bits = (Uint)obj;
3162 Uint bitoffs = WSTACK_POP(s);
3163 byte* bytes = (byte*) WSTACK_POP(s);
3164 byte* dst = (byte*) WSTACK_POP(s);
3165 if (bits > r * (TERM_TO_BINARY_MEMCPY_FACTOR * 8)) {
3166 Uint n = r * TERM_TO_BINARY_MEMCPY_FACTOR;
3167 WSTACK_PUSH5(s, (UWord)(dst + n), (UWord)(bytes + n), bitoffs,
3168 ENC_BIN_COPY, bits - 8*n);
3169 bits = 8*n;
3170 copy_binary_to_buffer(dst, 0, bytes, bitoffs, bits);
3171 obj = THE_NON_VALUE;
3172 r = 0; /* yield */
3173 break;
3174 } else {
3175 copy_binary_to_buffer(dst, 0, bytes, bitoffs, bits);
3176 r -= bits / (TERM_TO_BINARY_MEMCPY_FACTOR * 8);
3177 goto outer_loop;
3178 }
3179 }
3180 case ENC_MAP_PAIR: {
3181 Uint pairs_left = obj;
3182 Eterm *vptr = (Eterm*) WSTACK_POP(s);
3183 Eterm *kptr = (Eterm*) WSTACK_POP(s);
3184
3185 obj = *kptr;
3186 if (--pairs_left > 0) {
3187 WSTACK_PUSH4(s, (UWord)(kptr+1), (UWord)(vptr+1),
3188 ENC_MAP_PAIR, pairs_left);
3189 }
3190 WSTACK_PUSH2(s, ENC_TERM, *vptr);
3191 break;
3192 }
3193 case ENC_HASHMAP_NODE:
3194 if (is_list(obj)) { /* leaf node [K|V] */
3195 ptr = list_val(obj);
3196 WSTACK_PUSH2(s, ENC_TERM, CDR(ptr));
3197 obj = CAR(ptr);
3198 }
3199 break;
3200 case ENC_STORE_MAP_ELEMENT: /* option `deterministic` */
3201 if (is_list(obj)) { /* leaf node [K|V] */
3202 ptr = list_val(obj);
3203 *next_map_element++ = CAR(ptr);
3204 *next_map_element++ = CDR(ptr);
3205 goto outer_loop;
3206 }
3207 break;
3208 case ENC_START_SORTING_MAP: /* option `deterministic` */
3209 {
3210 long num_reductions = r;
3211
3212 n = next_map_element - map_array;
3213 ASSERT(n > MAP_SMALL_MAP_LIMIT);
3214 if (ctx == NULL) {
3215 /* No context means that the external representation of term
3216 * being encoded will fit in a heap binary (64 bytes). This
3217 * can only happen in the DEBUG build of the runtime system
3218 * where maps with more than 3 elements are large maps. */
3219 ASSERT(n < 64); /* Conservative assertion. */
3220 qsort(map_array, n/2, 2*sizeof(Eterm),
3221 (int (*)(const void *, const void *)) map_key_compare);
3222 WSTACK_PUSH2(s, ENC_PUSH_SORTED_MAP, THE_NON_VALUE);
3223 goto outer_loop;
3224 } else {
3225 /* Use yieldable qsort since the number of elements
3226 * in the map could be huge. */
3227 num_reductions = r;
3228 ctx->ycf_yield_state = NULL;
3229 erts_qsort_ycf_gen_yielding(&num_reductions,
3230 &ctx->ycf_yield_state,
3231 NULL,
3232 ycf_yield_alloc,
3233 ycf_yield_free,
3234 NULL,
3235 0,
3236 NULL,
3237 map_array, n/2, 2*sizeof(Eterm),
3238 (int (*)(const void *, const void *)) map_key_compare);
3239 if (ctx->ycf_yield_state) {
3240 r = 0;
3241 WSTACK_PUSH2(s, ENC_CONTINUE_SORTING_MAP, THE_NON_VALUE);
3242 break;
3243 } else {
3244 WSTACK_PUSH2(s, ENC_PUSH_SORTED_MAP, THE_NON_VALUE);
3245 r = num_reductions;
3246 goto outer_loop;
3247 }
3248 }
3249 }
3250 case ENC_CONTINUE_SORTING_MAP: /* option `deterministic` */
3251 {
3252 long num_reductions = r;
3253
3254 erts_qsort_ycf_gen_continue(&num_reductions,
3255 &ctx->ycf_yield_state,
3256 NULL);
3257 if (ctx->ycf_yield_state) {
3258 r = 0;
3259 WSTACK_PUSH2(s, ENC_CONTINUE_SORTING_MAP, THE_NON_VALUE);
3260 break;
3261 } else {
3262 WSTACK_PUSH2(s, ENC_PUSH_SORTED_MAP, THE_NON_VALUE);
3263 r = num_reductions;
3264 goto outer_loop;
3265 }
3266 }
3267 case ENC_PUSH_SORTED_MAP: /* option `deterministic` */
3268 {
3269 n = next_map_element - map_array;
3270 WSTACK_RESERVE(s, 2*n);
3271 ptr = next_map_element - 1;
3272 do {
3273 WSTACK_FAST_PUSH(s, ENC_TERM);
3274 WSTACK_FAST_PUSH(s, *ptr);
3275 ptr--;
3276 } while (ptr > map_array);
3277 obj = *ptr;
3278 erts_free(ERTS_ALC_T_T2B_DETERMINISTIC, map_array);
3279 map_array = next_map_element = NULL;
3280 break;
3281 }
3282 case ENC_LAST_ARRAY_ELEMENT:
3283 /* obj is the tuple */
3284 {
3285 Eterm* ptr = (Eterm *) obj;
3286 obj = *ptr;
3287 }
3288 break;
3289 default: /* ENC_LAST_ARRAY_ELEMENT+1 and upwards */
3290 {
3291 Eterm* ptr = (Eterm *) obj;
3292 obj = *ptr++;
3293 WSTACK_PUSH2(s, val-1, (UWord)ptr);
3294 }
3295 break;
3296 }
3297
3298 if (ctx && --r <= 0) {
3299 *reds = 0;
3300 ctx->obj = obj;
3301 ctx->ep = ep;
3302 ctx->map_array = map_array;
3303 ctx->next_map_element = next_map_element;
3304 WSTACK_SAVE(s, &ctx->wstack);
3305 return -1;
3306 }
3307
3308 L_jump_start:
3309 switch(tag_val_def(obj)) {
3310 case NIL_DEF:
3311 *ep++ = NIL_EXT;
3312 break;
3313
3314 case ATOM_DEF:
3315 ep = enc_atom(acmp,obj,ep,dflags);
3316 break;
3317
3318 case SMALL_DEF:
3319 {
3320 /* From R14B we no longer restrict INTEGER_EXT to 28 bits,
3321 * as done earlier for backward compatibility reasons. */
3322 Sint val = signed_val(obj);
3323
3324 if ((Uint)val < 256) {
3325 *ep++ = SMALL_INTEGER_EXT;
3326 put_int8(val, ep);
3327 ep++;
3328 } else if (sizeof(Sint) == 4 || IS_SSMALL32(val)) {
3329 *ep++ = INTEGER_EXT;
3330 put_int32(val, ep);
3331 ep += 4;
3332 } else {
3333 DeclareTmpHeapNoproc(tmp_big,2);
3334 Eterm big;
3335 UseTmpHeapNoproc(2);
3336 big = small_to_big(val, tmp_big);
3337 *ep++ = SMALL_BIG_EXT;
3338 n = big_bytes(big);
3339 ASSERT(n < 256);
3340 put_int8(n, ep);
3341 ep += 1;
3342 *ep++ = big_sign(big);
3343 ep = big_to_bytes(big, ep);
3344 UnUseTmpHeapNoproc(2);
3345 }
3346 }
3347 break;
3348
3349 case BIG_DEF:
3350 {
3351 int sign = big_sign(obj);
3352 n = big_bytes(obj);
3353 if (sizeof(Sint)==4 && n<=4) {
3354 Uint dig = big_digit(obj,0);
3355 Sint val = sign ? -dig : dig;
3356 if ((val<0) == sign) {
3357 *ep++ = INTEGER_EXT;
3358 put_int32(val, ep);
3359 ep += 4;
3360 break;
3361 }
3362 }
3363 if (n < 256) {
3364 *ep++ = SMALL_BIG_EXT;
3365 put_int8(n, ep);
3366 ep += 1;
3367 }
3368 else {
3369 *ep++ = LARGE_BIG_EXT;
3370 put_int32(n, ep);
3371 ep += 4;
3372 }
3373 *ep++ = sign;
3374 ep = big_to_bytes(obj, ep);
3375 }
3376 break;
3377
3378 case PID_DEF:
3379 case EXTERNAL_PID_DEF:
3380 ep = enc_pid(acmp, obj, ep, dflags);
3381 break;
3382
3383 case REF_DEF:
3384 case EXTERNAL_REF_DEF: {
3385 Uint32 *ref_num;
3386 Eterm sysname = (((dflags & DFLAG_ETS_COMPRESSED) && is_internal_ref(obj))
3387 ? INTERNAL_LOCAL_SYSNAME : ref_node_name(obj));
3388 Uint32 creation = ref_creation(obj);
3389
3390 ASSERT(dflags & DFLAG_EXTENDED_REFERENCES);
3391
3392 erts_magic_ref_save_bin(obj);
3393
3394 *ep++ = NEWER_REFERENCE_EXT;
3395 i = ref_no_numbers(obj);
3396 put_int16(i, ep);
3397 ep += 2;
3398 ep = enc_atom(acmp, sysname, ep, dflags);
3399 put_int32(creation, ep);
3400 ep += 4;
3401 ref_num = ref_numbers(obj);
3402 for (j = 0; j < i; j++) {
3403 put_int32(ref_num[j], ep);
3404 ep += 4;
3405 }
3406 break;
3407 }
3408 case PORT_DEF:
3409 case EXTERNAL_PORT_DEF: {
3410 Eterm sysname = (((dflags & DFLAG_ETS_COMPRESSED) && is_internal_port(obj))
3411 ? INTERNAL_LOCAL_SYSNAME : port_node_name(obj));
3412 Uint32 creation = port_creation(obj);
3413 byte *tagp = ep++;
3414 Uint64 num;
3415
3416 ep = enc_atom(acmp, sysname, ep, dflags);
3417 num = port_number(obj);
3418 if (num > ERTS_MAX_V3_PORT_NUMBER) {
3419 *tagp = V4_PORT_EXT;
3420 put_int64(num, ep);
3421 ep += 8;
3422 }
3423 else {
3424 *tagp = NEW_PORT_EXT;
3425 put_int32(num, ep);
3426 ep += 4;
3427 }
3428 put_int32(creation, ep);
3429 ep += 4;
3430 break;
3431 }
3432 case LIST_DEF:
3433 {
3434 if (is_external_string(obj, &i)) {
3435 *ep++ = STRING_EXT;
3436 put_int16(i, ep);
3437 ep += 2;
3438 while (is_list(obj)) {
3439 Eterm* cons = list_val(obj);
3440 *ep++ = unsigned_val(CAR(cons));
3441 obj = CDR(cons);
3442 }
3443 r -= i;
3444 } else {
3445 r -= i/2;
3446 *ep++ = LIST_EXT;
3447 /* Patch list length when we find end of list */
3448 WSTACK_PUSH2(s, (UWord)ep, 1);
3449 ep += 4;
3450 goto encode_one_cons;
3451 }
3452 }
3453 break;
3454
3455 case TUPLE_DEF:
3456 ptr = tuple_val(obj);
3457 i = arityval(*ptr);
3458 ptr++;
3459 if (i <= 0xff) {
3460 *ep++ = SMALL_TUPLE_EXT;
3461 put_int8(i, ep);
3462 ep += 1;
3463 } else {
3464 *ep++ = LARGE_TUPLE_EXT;
3465 put_int32(i, ep);
3466 ep += 4;
3467 }
3468 if (i > 0) {
3469 WSTACK_PUSH2(s, ENC_LAST_ARRAY_ELEMENT+i-1, (UWord)ptr);
3470 }
3471 break;
3472
3473 case MAP_DEF:
3474 if (is_flatmap(obj)) {
3475 flatmap_t *mp = (flatmap_t*)flatmap_val(obj);
3476 Uint size = flatmap_get_size(mp);
3477
3478 *ep++ = MAP_EXT;
3479 put_int32(size, ep); ep += 4;
3480
3481 if (size > 0) {
3482 Eterm *kptr = flatmap_get_keys(mp);
3483 Eterm *vptr = flatmap_get_values(mp);
3484
3485 WSTACK_PUSH4(s, (UWord)kptr, (UWord)vptr,
3486 ENC_MAP_PAIR, size);
3487 }
3488 } else {
3489 Eterm hdr;
3490 Uint node_sz;
3491 Eterm node_processor;
3492 ptr = boxed_val(obj);
3493 hdr = *ptr;
3494 ASSERT(is_header(hdr));
3495
3496 switch(hdr & _HEADER_MAP_SUBTAG_MASK) {
3497 case HAMT_SUBTAG_HEAD_ARRAY:
3498 *ep++ = MAP_EXT;
3499 ptr++;
3500 put_int32(*ptr, ep); ep += 4;
3501 if (dflags & DFLAG_DETERMINISTIC) {
3502 /* Option `deterministic`: Note that we
3503 * process large maps in a breadth-first
3504 * order, that is, we push all keys and values
3505 * to the stack and deallocate the map array
3506 * before encoding any of the keys and
3507 * values. That means that when we find a
3508 * large map in key or value of an outer map,
3509 * the map array for the outer map has already
3510 * been deallocated. */
3511
3512 ASSERT(map_array == NULL);
3513 next_map_element = map_array = alloc_map_array(*ptr);
3514 WSTACK_PUSH2(s, ENC_START_SORTING_MAP, THE_NON_VALUE);
3515 }
3516 node_sz = 16;
3517 break;
3518 case HAMT_SUBTAG_HEAD_BITMAP:
3519 *ep++ = MAP_EXT;
3520 ptr++;
3521 put_int32(*ptr, ep); ep += 4;
3522 if (dflags & DFLAG_DETERMINISTIC) {
3523 ASSERT(map_array == NULL);
3524 next_map_element = map_array = alloc_map_array(*ptr);
3525 WSTACK_PUSH2(s, ENC_START_SORTING_MAP, THE_NON_VALUE);
3526 }
3527 /*fall through*/
3528 case HAMT_SUBTAG_NODE_BITMAP:
3529 node_sz = hashmap_bitcount(MAP_HEADER_VAL(hdr));
3530 ASSERT(node_sz < 17);
3531 break;
3532 default:
3533 erts_exit(ERTS_ERROR_EXIT, "bad header\r\n");
3534 }
3535
3536 ptr++;
3537 node_processor = (dflags & DFLAG_DETERMINISTIC) ?
3538 ENC_STORE_MAP_ELEMENT : ENC_HASHMAP_NODE;
3539 WSTACK_RESERVE(s, node_sz*2);
3540 while(node_sz--) {
3541 WSTACK_FAST_PUSH(s, node_processor);
3542 WSTACK_FAST_PUSH(s, *ptr++);
3543 }
3544 }
3545 break;
3546 case FLOAT_DEF:
3547 GET_DOUBLE(obj, f);
3548 if (dflags & DFLAG_NEW_FLOATS) {
3549 *ep++ = NEW_FLOAT_EXT;
3550 #if defined(WORDS_BIGENDIAN) || defined(DOUBLE_MIDDLE_ENDIAN)
3551 put_int32(f.fw[0], ep);
3552 ep += 4;
3553 put_int32(f.fw[1], ep);
3554 #else
3555 put_int32(f.fw[1], ep);
3556 ep += 4;
3557 put_int32(f.fw[0], ep);
3558 #endif
3559 ep += 4;
3560 } else {
3561 *ep++ = FLOAT_EXT;
3562
3563 /* now the erts_snprintf which does the work */
3564 i = sys_double_to_chars(f.fd, (char*) ep, (size_t)31);
3565
3566 /* Don't leave garbage after the float */
3567 sys_memset(ep+i, 0, 31-i);
3568 ep += 31;
3569 }
3570 break;
3571
3572 case BINARY_DEF:
3573 {
3574 Uint bitoffs;
3575 Uint bitsize;
3576 byte* bytes;
3577 byte* data_dst;
3578 Uint off_heap_bytesize = 0;
3579 Uint off_heap_tail;
3580 Eterm pb_term;
3581 Binary *pb_val;
3582
3583 ASSERT(!(dflags & DFLAG_PENDING_CONNECT) || (ctx && ctx->iov));
3584
3585 ERTS_GET_BINARY_BYTES(obj, bytes, bitoffs, bitsize);
3586 if (use_iov) {
3587 if (bitoffs == 0) {
3588 ProcBin* pb = (ProcBin*) binary_val(obj);
3589 off_heap_bytesize = pb->size;
3590 if (off_heap_bytesize <= ERL_ONHEAP_BIN_LIMIT)
3591 off_heap_bytesize = 0;
3592 else {
3593 pb_term = obj;
3594 if (pb->thing_word == HEADER_SUB_BIN) {
3595 ErlSubBin* sub = (ErlSubBin*)pb;
3596 pb_term = sub->orig;
3597 pb = (ProcBin*) binary_val(pb_term);
3598 }
3599 if (pb->thing_word != HEADER_PROC_BIN)
3600 off_heap_bytesize = 0;
3601 else {
3602 if (pb->flags) {
3603 char* before_realloc = pb->val->orig_bytes;
3604 erts_emasculate_writable_binary(pb);
3605 bytes += (pb->val->orig_bytes - before_realloc);
3606 ASSERT((byte *) &pb->val->orig_bytes[0] <= bytes
3607 && bytes < ((byte *) &pb->val->orig_bytes[0]
3608 + pb->val->orig_size));
3609 }
3610 pb_val = pb->val;
3611 }
3612 }
3613 }
3614 }
3615 else if (dflags & DFLAG_ETS_COMPRESSED) {
3616 ProcBin* pb = (ProcBin*) binary_val(obj);
3617 Uint bytesize = pb->size;
3618 if (pb->thing_word == HEADER_SUB_BIN) {
3619 ErlSubBin* sub = (ErlSubBin*)pb;
3620 pb = (ProcBin*) binary_val(sub->orig);
3621 ASSERT(bytesize == sub->size);
3622 bytesize += (bitoffs + bitsize + 7) / 8;
3623 }
3624 if (pb->thing_word == HEADER_PROC_BIN
3625 && heap_bin_size(bytesize) > PROC_BIN_SIZE) {
3626 ProcBin tmp;
3627 if (bitoffs || bitsize) {
3628 *ep++ = BIT_BINARY_INTERNAL_REF;
3629 *ep++ = bitoffs;
3630 *ep++ = bitsize;
3631 }
3632 else {
3633 *ep++ = BINARY_INTERNAL_REF;
3634 }
3635 if (pb->flags) {
3636 char* before_realloc = pb->val->orig_bytes;
3637 erts_emasculate_writable_binary(pb);
3638 bytes += (pb->val->orig_bytes - before_realloc);
3639 }
3640 erts_refc_inc(&pb->val->intern.refc, 2);
3641
3642 sys_memcpy(&tmp, pb, sizeof(ProcBin));
3643 tmp.next = *off_heap;
3644 tmp.bytes = bytes;
3645 tmp.size = bytesize;
3646 sys_memcpy(ep, &tmp, sizeof(ProcBin));
3647 *off_heap = (struct erl_off_heap_header*) ep;
3648 ep += sizeof(ProcBin);
3649 break;
3650 }
3651 }
3652 if (bitsize == 0) {
3653 /* Plain old byte-sized binary. */
3654 *ep++ = BINARY_EXT;
3655 j = binary_size(obj);
3656 put_int32(j, ep);
3657 ep += 4;
3658 if (off_heap_bytesize)
3659 off_heap_tail = 0;
3660 else {
3661 data_dst = ep;
3662 ep += j;
3663 }
3664 } else if (dflags & DFLAG_BIT_BINARIES) {
3665 /* Bit-level binary. */
3666 if (dflags & DFLAG_PENDING_CONNECT) {
3667 ASSERT(ctx);
3668 j = off_heap_bytesize;
3669 if (!j) {
3670 pb_val = NULL;
3671 pb_term = THE_NON_VALUE;
3672 j = binary_size(obj);
3673 }
3674 data_dst = hopefull_bit_binary(ctx, &ep, pb_val, pb_term,
3675 bytes, bitoffs, bitsize, j);
3676 if (!data_dst)
3677 break; /* off heap binary referred... */
3678 ASSERT(!off_heap_bytesize);
3679 off_heap_tail = 0;
3680 /*
3681 * Trailing bits already written by hopefull_bit_binary();
3682 * now go copy all whole octets...
3683 */
3684 bitsize = 0;
3685 }
3686 else {
3687 *ep++ = BIT_BINARY_EXT;
3688 j = binary_size(obj);
3689 put_int32((j+1), ep);
3690 ep += 4;
3691 *ep++ = bitsize;
3692 if (off_heap_bytesize) {
3693 /* trailing bits */
3694 ep[0] = 0;
3695 copy_binary_to_buffer(ep, 0, bytes + j, 0, bitsize);
3696 off_heap_tail = 1;
3697 }
3698 else {
3699 ep[j] = 0; /* Zero unused bits at end of binary */
3700 data_dst = ep;
3701 ep += j + 1;
3702 }
3703 }
3704 } else {
3705 /*
3706 * Bit-level binary, but the receiver doesn't support it.
3707 * Build a tuple instead.
3708 */
3709 *ep++ = SMALL_TUPLE_EXT;
3710 *ep++ = 2;
3711 *ep++ = BINARY_EXT;
3712 j = binary_size(obj);
3713 put_int32((j+1), ep);
3714 ep += 4;
3715
3716 if (off_heap_bytesize) {
3717 /* trailing bits */
3718 ep[0] = 0;
3719 copy_binary_to_buffer(ep, 0, bytes + j, 0, bitsize);
3720 ep[1] = SMALL_INTEGER_EXT;
3721 ep[2] = bitsize;
3722 off_heap_tail = 3;
3723 }
3724 else {
3725 ep[j] = 0; /* Zero unused bits at end of binary */
3726 data_dst = ep;
3727 ep += j+1;
3728 *ep++ = SMALL_INTEGER_EXT;
3729 *ep++ = bitsize;
3730 }
3731 }
3732 if (off_heap_bytesize) {
3733 ASSERT(pb_val);
3734 store_in_vec(ctx, ep, pb_val, pb_term,
3735 bytes, off_heap_bytesize);
3736 ep += off_heap_tail;
3737 }
3738 else if (ctx && j > r * TERM_TO_BINARY_MEMCPY_FACTOR) {
3739 WSTACK_PUSH5(s, (UWord)data_dst, (UWord)bytes, bitoffs,
3740 ENC_BIN_COPY, 8*j + bitsize);
3741 } else {
3742 copy_binary_to_buffer(data_dst, 0, bytes, bitoffs,
3743 8 * j + bitsize);
3744 }
3745 }
3746 break;
3747 case EXPORT_DEF:
3748 {
3749 Export* exp = *((Export **) (export_val(obj) + 1));
3750 if (dflags & DFLAG_PENDING_CONNECT) {
3751 ASSERT(ctx);
3752 hopefull_export(ctx, &ep, exp, dflags, off_heap);
3753 }
3754 else if ((dflags & DFLAG_EXPORT_PTR_TAG) != 0) {
3755 *ep++ = EXPORT_EXT;
3756 ep = enc_atom(acmp, exp->info.mfa.module, ep, dflags);
3757 ep = enc_atom(acmp, exp->info.mfa.function, ep, dflags);
3758 ep = enc_term(acmp, make_small(exp->info.mfa.arity),
3759 ep, dflags, off_heap);
3760 } else {
3761 /* Tag, arity */
3762 *ep++ = SMALL_TUPLE_EXT;
3763 put_int8(2, ep);
3764 ep += 1;
3765
3766 /* Module name */
3767 ep = enc_atom(acmp, exp->info.mfa.module, ep, dflags);
3768
3769 /* Function name */
3770 ep = enc_atom(acmp, exp->info.mfa.function, ep, dflags);
3771 }
3772 break;
3773 }
3774 break;
3775 case FUN_DEF:
3776 {
3777 ErlFunThing* funp = (ErlFunThing *) fun_val(obj);
3778 int ei;
3779
3780 ASSERT(dflags & DFLAG_NEW_FUN_TAGS);
3781 *ep++ = NEW_FUN_EXT;
3782 WSTACK_PUSH2(s, ENC_PATCH_FUN_SIZE,
3783 (UWord) ep); /* Position for patching in size */
3784 ep += 4;
3785 *ep = funp->arity;
3786 ep += 1;
3787 sys_memcpy(ep, funp->fe->uniq, 16);
3788 ep += 16;
3789 put_int32(funp->fe->index, ep);
3790 ep += 4;
3791 put_int32(funp->num_free, ep);
3792 ep += 4;
3793 ep = enc_atom(acmp, funp->fe->module, ep, dflags);
3794 ep = enc_term(acmp, make_small(funp->fe->old_index), ep, dflags, off_heap);
3795 ep = enc_term(acmp, make_small(funp->fe->old_uniq), ep, dflags, off_heap);
3796 ep = enc_pid(acmp, funp->creator, ep, dflags);
3797
3798 for (ei = funp->num_free-1; ei >= 0; ei--) {
3799 WSTACK_PUSH2(s, ENC_TERM, (UWord) funp->env[ei]);
3800 }
3801 }
3802 break;
3803 }
3804 }
3805 DESTROY_WSTACK(s);
3806 if (ctx) {
3807 ASSERT(ctx->wstack.wstart == NULL);
3808 *reds = r;
3809 if (use_iov)
3810 store_in_vec(ctx, ep, NULL, THE_NON_VALUE, NULL, 0);
3811 }
3812 *res = ep;
3813 return 0;
3814 }
3815
3816 static ERTS_INLINE void
store_in_vec_aux(TTBEncodeContext * ctx,Binary * bin,Eterm term,byte * ptr,Uint len)3817 store_in_vec_aux(TTBEncodeContext *ctx,
3818 Binary *bin,
3819 Eterm term,
3820 byte *ptr,
3821 Uint len)
3822 {
3823 ErlDrvBinary *dbin = Binary2ErlDrvBinary(bin);
3824 int vlen = ctx->vlen;
3825 Uint iov_len;
3826 ErlIOVec *feiovp;
3827
3828 ASSERT(((byte *) &bin->orig_bytes[0]) <= ptr);
3829 ASSERT(ptr + len <= ((byte *) &bin->orig_bytes[0]) + bin->orig_size);
3830
3831 if (ctx->frag_ix >= 0) {
3832 feiovp = &ctx->fragment_eiovs[ctx->frag_ix];
3833 ASSERT(0 < feiovp->size);
3834 ASSERT(feiovp->size <= ctx->fragment_size);
3835 if (feiovp->size != ctx->fragment_size) {
3836 /* current fragment not full yet... */
3837 iov_len = ctx->fragment_size - feiovp->size;
3838 if (len < iov_len)
3839 iov_len = len;
3840 goto store_iov_data;
3841 }
3842 }
3843
3844 while (len) {
3845 /* Start new fragment... */
3846 ctx->frag_ix++;
3847 feiovp = &ctx->fragment_eiovs[ctx->frag_ix];
3848 ASSERT(ctx->frag_ix >= 0);
3849
3850 if (ctx->termv) {
3851 ctx->termv[vlen] = THE_NON_VALUE;
3852 ctx->termv[vlen+1] = THE_NON_VALUE;
3853 }
3854
3855 feiovp->vsize = 2;
3856 feiovp->size = 0;
3857 feiovp->iov = &ctx->iov[vlen];
3858 feiovp->binv = &ctx->binv[vlen];
3859
3860 /* entry for driver header */
3861 ctx->iov[vlen].iov_base = NULL;
3862 ctx->iov[vlen].iov_len = 0;
3863 ctx->binv[vlen] = NULL;
3864 vlen++;
3865
3866 /* entry for dist header */
3867 ctx->iov[vlen].iov_base = NULL;
3868 ctx->iov[vlen].iov_len = 0;
3869 ctx->binv[vlen] = NULL;
3870 vlen++;
3871
3872 iov_len = len < ctx->fragment_size ? len : ctx->fragment_size;
3873
3874 store_iov_data:
3875
3876 ASSERT(iov_len);
3877
3878 do {
3879 Uint iov_len_left;
3880
3881 if (iov_len <= MAX_SYSIOVEC_IOVLEN)
3882 iov_len_left = 0;
3883 else {
3884 iov_len_left = iov_len - MAX_SYSIOVEC_IOVLEN;
3885 iov_len = MAX_SYSIOVEC_IOVLEN;
3886 }
3887
3888 ctx->iov[vlen].iov_base = ptr;
3889 ctx->iov[vlen].iov_len = iov_len;
3890 ctx->binv[vlen] = dbin;
3891 if (ctx->termv)
3892 ctx->termv[vlen] = term;
3893 else
3894 erts_refc_inc(&bin->intern.refc, 2);
3895 ctx->size += iov_len;
3896 len -= iov_len;
3897 ptr += iov_len;
3898 vlen++;
3899 feiovp->size += iov_len;
3900 feiovp->vsize++;
3901
3902 iov_len = iov_len_left;
3903 } while (iov_len);
3904 }
3905
3906 ctx->vlen = vlen;
3907 }
3908
3909 static void
store_in_vec(TTBEncodeContext * ctx,byte * ep,Binary * ohbin,Eterm ohpb,byte * ohp,Uint ohsz)3910 store_in_vec(TTBEncodeContext *ctx,
3911 byte *ep,
3912 Binary *ohbin,
3913 Eterm ohpb,
3914 byte *ohp,
3915 Uint ohsz)
3916 {
3917 byte *cp = ctx->cptr;
3918 if (cp != ep) {
3919 /* save data in common binary... */
3920 store_in_vec_aux(ctx,
3921 ctx->result_bin,
3922 THE_NON_VALUE,
3923 cp,
3924 ep - cp);
3925 ASSERT(ctx->vlen <= ctx->debug_vlen);
3926 ASSERT(ctx->frag_ix <= ctx->debug_fragments);
3927 ctx->cptr = ep;
3928 }
3929 if (ohbin) {
3930 /* save off-heap binary... */
3931 store_in_vec_aux(ctx,
3932 ohbin,
3933 ohpb,
3934 ohp,
3935 ohsz);
3936 ASSERT(ctx->vlen <= ctx->debug_vlen);
3937 ASSERT(ctx->frag_ix <= ctx->debug_fragments);
3938 }
3939 }
3940
3941 static byte *
begin_hopefull_data(TTBEncodeContext * ctx,byte * ep)3942 begin_hopefull_data(TTBEncodeContext *ctx, byte *ep)
3943 {
3944 store_in_vec(ctx, ep, NULL, THE_NON_VALUE, NULL, 0);
3945 ASSERT(ERTS_NO_HIX == get_uint32(ctx->hopefull_ixp));
3946 put_int32(ctx->vlen, ctx->hopefull_ixp);
3947 ctx->hopefull_ixp = ep;
3948 put_int32(ERTS_NO_HIX, ep);
3949 ep += 4;
3950 ctx->cptr = ep;
3951 return ep;
3952 }
3953
3954 static byte *
end_hopefull_data(TTBEncodeContext * ctx,byte * ep,Uint fallback_size)3955 end_hopefull_data(TTBEncodeContext *ctx, byte *ep, Uint fallback_size)
3956 {
3957 Uint sz;
3958 store_in_vec(ctx, ep, NULL, THE_NON_VALUE, NULL, 0);
3959 /*
3960 * Reserve extra room for fallback if needed. The four
3961 * bytes used for hopefull index can be used for
3962 * fallback encoding...
3963 */
3964 sz = ep - ctx->hopefull_ixp;
3965 if (fallback_size > sz) {
3966 ep += fallback_size - sz;
3967 ctx->cptr = ep;
3968 }
3969 return ep;
3970 }
3971
3972 static byte *
hopefull_bit_binary(TTBEncodeContext * ctx,byte ** epp,Binary * pb_val,Eterm pb_term,byte * bytes,byte bitoffs,byte bitsize,Uint sz)3973 hopefull_bit_binary(TTBEncodeContext* ctx, byte **epp, Binary *pb_val, Eterm pb_term,
3974 byte *bytes, byte bitoffs, byte bitsize, Uint sz)
3975 {
3976 byte *octets, *ep = *epp;
3977
3978 ctx->hopefull_flags |= DFLAG_BIT_BINARIES;
3979
3980 /*
3981 * The fallback:
3982 *
3983 * SMALL_TUPLE_EXT - 1 byte
3984 * 2 - 1 byte
3985 * BINARY_EXT - 1 byte
3986 * whole octet size ('sz') - 4 byte
3987 * whole octets - 'sz' bytes
3988 * trailing bits - 1 byte
3989 * SMALL_INTEGER_EXT - 1 byte
3990 * bitsize - 1 byte
3991 */
3992
3993 /* bit binary prelude in one hopefull data element */
3994 ep = begin_hopefull_data(ctx, ep);
3995 *ep++ = BIT_BINARY_EXT;
3996 put_int32((sz+1), ep);
3997 ep += 4;
3998 *ep++ = bitsize;
3999 ep = end_hopefull_data(ctx, ep, 1+1+1+4);
4000
4001 /* All whole octets... */
4002 if (pb_val) {
4003 octets = NULL;
4004 store_in_vec(ctx, ep, pb_val, pb_term, bytes, sz);
4005 }
4006 else {
4007 /* ... will be copied here afterwards */
4008 octets = ep;
4009 ep += sz;
4010 }
4011
4012 /* copy trailing bits into new hopefull data element */
4013 ep = begin_hopefull_data(ctx, ep);
4014 *ep = 0; /* Clear the bit in the byte */
4015
4016 copy_binary_to_buffer(ep, 0, bytes + sz, bitoffs, bitsize);
4017 ep++;
4018
4019 ep = end_hopefull_data(ctx, ep, 1+1+1);
4020 *epp = ep;
4021
4022 return octets;
4023 }
4024
4025 static void
hopefull_export(TTBEncodeContext * ctx,byte ** epp,Export * exp,Uint32 dflags,struct erl_off_heap_header ** off_heap)4026 hopefull_export(TTBEncodeContext* ctx, byte **epp, Export* exp, Uint32 dflags,
4027 struct erl_off_heap_header** off_heap)
4028 {
4029 Uint fallback_sz;
4030 byte *ep = *epp, *mod_start;
4031
4032 /*
4033 * The fallback:
4034 *
4035 * SMALL_TUPLE_EXT - 1 byte
4036 * 2 - 1 byte
4037 * module atom... - M bytes
4038 * function atom... - F bytes
4039 */
4040
4041 ctx->hopefull_flags |= DFLAG_EXPORT_PTR_TAG;
4042
4043 ep = begin_hopefull_data(ctx, ep);
4044
4045 *ep++ = EXPORT_EXT;
4046 mod_start = ep;
4047 ep = enc_atom(NULL, exp->info.mfa.module, ep, dflags);
4048 ep = enc_atom(NULL, exp->info.mfa.function, ep, dflags);
4049 fallback_sz = 2 + (ep - mod_start);
4050 ep = enc_term(NULL, make_small(exp->info.mfa.arity),
4051 ep, dflags, off_heap);
4052
4053 ep = end_hopefull_data(ctx, ep, fallback_sz);
4054
4055 *epp = ep;
4056 }
4057
4058 /** @brief Is it a list of bytes not longer than MAX_STRING_LEN?
4059 * @param lenp out: string length or number of list cells traversed
4060 * @return true/false
4061 */
4062 static
4063 int
is_external_string(Eterm list,Uint * lenp)4064 is_external_string(Eterm list, Uint* lenp)
4065 {
4066 Uint len = 0;
4067
4068 /*
4069 * Calculate the length of the list as long as all characters
4070 * are integers from 0 through 255.
4071 */
4072 while (is_list(list)) {
4073 Eterm* consp = list_val(list);
4074 Eterm hd = CAR(consp);
4075
4076 if (!is_byte(hd) || ++len > MAX_STRING_LEN) {
4077 *lenp = len;
4078 return 0;
4079 }
4080 list = CDR(consp);
4081 }
4082
4083 *lenp = len;
4084 return is_nil(list);
4085 }
4086
4087
4088 struct dec_term_hamt
4089 {
4090 Eterm* objp; /* write result here */
4091 Uint size; /* nr of leafs */
4092 Eterm* leaf_array;
4093 };
4094
4095
4096 /* Decode term from external format into *objp.
4097 ** On failure calls erts_factory_undo() and returns NULL
4098 */
4099 static const byte*
dec_term(ErtsDistExternal * edep,ErtsHeapFactory * factory,const byte * ep,Eterm * objp,B2TContext * ctx,int ets_decode)4100 dec_term(ErtsDistExternal *edep,
4101 ErtsHeapFactory* factory,
4102 const byte* ep,
4103 Eterm* objp,
4104 B2TContext* ctx,
4105 int ets_decode)
4106 {
4107 #define PSTACK_TYPE struct dec_term_hamt
4108 PSTACK_DECLARE(hamt_array, 5);
4109 int n;
4110 ErtsAtomEncoding char_enc;
4111 register Eterm* hp; /* Please don't take the address of hp */
4112 DECLARE_WSTACK(flat_maps); /* for preprocessing of small maps */
4113 Eterm* next;
4114 SWord reds;
4115 #ifdef DEBUG
4116 Eterm* dbg_resultp = ctx ? &ctx->u.dc.res : objp;
4117 #endif
4118
4119 if (ctx) {
4120 reds = ctx->reds;
4121 next = ctx->u.dc.next;
4122 ep = ctx->u.dc.ep;
4123 factory = &ctx->u.dc.factory;
4124
4125 if (ctx->state != B2TDecode) {
4126 int n_limit = reds;
4127
4128 n = ctx->u.dc.remaining_n;
4129 if (ctx->state == B2TDecodeBinary) {
4130 n_limit *= B2T_MEMCPY_FACTOR;
4131 ASSERT(n_limit >= reds);
4132 reds -= n / B2T_MEMCPY_FACTOR;
4133 }
4134 else
4135 reds -= n;
4136
4137 if (n > n_limit) {
4138 ctx->u.dc.remaining_n -= n_limit;
4139 n = n_limit;
4140 reds = 0;
4141 }
4142 else {
4143 ctx->u.dc.remaining_n = 0;
4144 }
4145
4146 switch (ctx->state) {
4147 case B2TDecodeList:
4148 objp = next - 2;
4149 while (n > 0) {
4150 objp[0] = (Eterm) next;
4151 objp[1] = make_list(next);
4152 next = objp;
4153 objp -= 2;
4154 n--;
4155 }
4156 break;
4157
4158 case B2TDecodeTuple:
4159 objp = next - 1;
4160 while (n-- > 0) {
4161 objp[0] = (Eterm) next;
4162 next = objp;
4163 objp--;
4164 }
4165 break;
4166
4167 case B2TDecodeString:
4168 hp = factory->hp;
4169 hp[-1] = make_list(hp); /* overwrite the premature NIL */
4170 while (n-- > 0) {
4171 hp[0] = make_small(*ep++);
4172 hp[1] = make_list(hp+2);
4173 hp += 2;
4174 }
4175 hp[-1] = NIL;
4176 factory->hp = hp;
4177 break;
4178
4179 case B2TDecodeBinary:
4180 sys_memcpy(ctx->u.dc.remaining_bytes, ep, n);
4181 ctx->u.dc.remaining_bytes += n;
4182 ep += n;
4183 break;
4184
4185 default:
4186 ASSERT(!"Unknown state");
4187 }
4188 if (!ctx->u.dc.remaining_n) {
4189 ctx->state = B2TDecode;
4190 }
4191 if (reds <= 0) {
4192 ctx->u.dc.next = next;
4193 ctx->u.dc.ep = ep;
4194 ctx->reds = 0;
4195 return NULL;
4196 }
4197 }
4198 PSTACK_CHANGE_ALLOCATOR(hamt_array, ERTS_ALC_T_SAVED_ESTACK);
4199 WSTACK_CHANGE_ALLOCATOR(flat_maps, ERTS_ALC_T_SAVED_ESTACK);
4200 if (ctx->u.dc.hamt_array.pstart) {
4201 PSTACK_RESTORE(hamt_array, &ctx->u.dc.hamt_array);
4202 }
4203 if (ctx->u.dc.flat_maps.wstart) {
4204 WSTACK_RESTORE(flat_maps, &ctx->u.dc.flat_maps);
4205 }
4206 }
4207 else {
4208 reds = ERTS_SWORD_MAX;
4209 next = objp;
4210 *next = (Eterm) (UWord) NULL;
4211 }
4212 hp = factory->hp;
4213
4214 while (next != NULL) {
4215
4216 objp = next;
4217 next = (Eterm *) *objp;
4218
4219 switch (*ep++) {
4220 case INTEGER_EXT:
4221 {
4222 Sint sn = get_int32(ep);
4223
4224 ep += 4;
4225 #if defined(ARCH_64)
4226 *objp = make_small(sn);
4227 #else
4228 if (IS_SSMALL(sn)) {
4229 *objp = make_small(sn);
4230 } else {
4231 *objp = small_to_big(sn, hp);
4232 hp += BIG_UINT_HEAP_SIZE;
4233 }
4234 #endif
4235 break;
4236 }
4237 case SMALL_INTEGER_EXT:
4238 n = get_int8(ep);
4239 ep++;
4240 *objp = make_small(n);
4241 break;
4242 case SMALL_BIG_EXT:
4243 n = get_int8(ep);
4244 ep++;
4245 goto big_loop;
4246 case LARGE_BIG_EXT:
4247 n = get_int32(ep);
4248 ep += 4;
4249 big_loop:
4250 {
4251 Eterm big;
4252 const byte* first;
4253 const byte* last;
4254 Uint neg;
4255
4256 neg = get_int8(ep); /* Sign bit */
4257 ep++;
4258
4259 /*
4260 * Strip away leading zeroes to avoid creating illegal bignums.
4261 */
4262 first = ep;
4263 last = ep + n;
4264 ep += n;
4265 do {
4266 --last;
4267 } while (first <= last && *last == 0);
4268
4269 if ((n = last - first + 1) == 0) {
4270 /* Zero width bignum defaults to zero */
4271 big = make_small(0);
4272 } else {
4273 big = bytes_to_big(first, n, neg, hp);
4274 if (is_nil(big))
4275 goto error;
4276 if (is_big(big)) {
4277 hp += big_arity(big) + 1;
4278 }
4279 }
4280 *objp = big;
4281 break;
4282 }
4283 case ATOM_CACHE_REF:
4284 if (edep == 0 || (edep->flags & ERTS_DIST_EXT_ATOM_TRANS_TAB) == 0) {
4285 goto error;
4286 }
4287 n = get_int8(ep);
4288 ep++;
4289 if (n >= edep->attab.size)
4290 goto error;
4291 ASSERT(is_atom(edep->attab.atom[n]));
4292 *objp = edep->attab.atom[n];
4293 break;
4294 case ATOM_EXT:
4295 n = get_int16(ep);
4296 ep += 2;
4297 char_enc = ERTS_ATOM_ENC_LATIN1;
4298 goto dec_term_atom_common;
4299 case SMALL_ATOM_EXT:
4300 n = get_int8(ep);
4301 ep++;
4302 char_enc = ERTS_ATOM_ENC_LATIN1;
4303 goto dec_term_atom_common;
4304 case ATOM_UTF8_EXT:
4305 n = get_int16(ep);
4306 ep += 2;
4307 char_enc = ERTS_ATOM_ENC_UTF8;
4308 goto dec_term_atom_common;
4309 case SMALL_ATOM_UTF8_EXT:
4310 n = get_int8(ep);
4311 ep++;
4312 char_enc = ERTS_ATOM_ENC_UTF8;
4313 dec_term_atom_common:
4314 if (edep && (edep->flags & ERTS_DIST_EXT_BTT_SAFE)) {
4315 if (!erts_atom_get((char*)ep, n, objp, char_enc)) {
4316 goto error;
4317 }
4318 } else {
4319 Eterm atom = erts_atom_put(ep, n, char_enc, 0);
4320 if (is_non_value(atom))
4321 goto error;
4322 *objp = atom;
4323 }
4324 ep += n;
4325 break;
4326 case LARGE_TUPLE_EXT:
4327 n = get_int32(ep);
4328 ep += 4;
4329 goto tuple_loop;
4330 case SMALL_TUPLE_EXT:
4331 n = get_int8(ep);
4332 ep++;
4333 tuple_loop:
4334 *objp = make_tuple(hp);
4335 *hp++ = make_arityval(n);
4336 hp += n;
4337 objp = hp - 1;
4338 if (ctx) {
4339 if (reds < n) {
4340 ASSERT(reds > 0);
4341 ctx->state = B2TDecodeTuple;
4342 ctx->u.dc.remaining_n = n - reds;
4343 n = reds;
4344 }
4345 reds -= n;
4346 }
4347 while (n-- > 0) {
4348 objp[0] = (Eterm) next;
4349 next = objp;
4350 objp--;
4351 }
4352 break;
4353 case NIL_EXT:
4354 *objp = NIL;
4355 break;
4356 case LIST_EXT:
4357 n = get_int32(ep);
4358 ep += 4;
4359 if (n == 0) {
4360 next = objp;
4361 break;
4362 }
4363 *objp = make_list(hp);
4364 hp += 2 * n;
4365 objp = hp - 2;
4366 objp[0] = (Eterm) (objp+1);
4367 objp[1] = (Eterm) next;
4368 next = objp;
4369 objp -= 2;
4370 n--;
4371 if (ctx) {
4372 if (reds < n) {
4373 ASSERT(reds > 0);
4374 ctx->state = B2TDecodeList;
4375 ctx->u.dc.remaining_n = n - reds;
4376 n = reds;
4377 }
4378 reds -= n;
4379 }
4380 while (n > 0) {
4381 objp[0] = (Eterm) next;
4382 objp[1] = make_list(next);
4383 next = objp;
4384 objp -= 2;
4385 n--;
4386 }
4387 break;
4388 case STRING_EXT:
4389 n = get_int16(ep);
4390 ep += 2;
4391 if (n == 0) {
4392 *objp = NIL;
4393 break;
4394 }
4395 *objp = make_list(hp);
4396 if (ctx) {
4397 if (reds < n) {
4398 ctx->state = B2TDecodeString;
4399 ctx->u.dc.remaining_n = n - reds;
4400 n = reds;
4401 }
4402 reds -= n;
4403 }
4404 while (n-- > 0) {
4405 hp[0] = make_small(*ep++);
4406 hp[1] = make_list(hp+2);
4407 hp += 2;
4408 }
4409 hp[-1] = NIL;
4410 break;
4411 case FLOAT_EXT:
4412 {
4413 FloatDef ff;
4414
4415 if (sys_chars_to_double((char*)ep, &ff.fd) != 0) {
4416 goto error;
4417 }
4418 ep += 31;
4419 *objp = make_float(hp);
4420 PUT_DOUBLE(ff, hp);
4421 hp += FLOAT_SIZE_OBJECT;
4422 break;
4423 }
4424 case NEW_FLOAT_EXT:
4425 {
4426 FloatDef ff;
4427
4428 #if defined(WORDS_BIGENDIAN) || defined(DOUBLE_MIDDLE_ENDIAN)
4429 ff.fw[0] = get_int32(ep);
4430 ep += 4;
4431 ff.fw[1] = get_int32(ep);
4432 ep += 4;
4433 #else
4434 ff.fw[1] = get_int32(ep);
4435 ep += 4;
4436 ff.fw[0] = get_int32(ep);
4437 ep += 4;
4438 #endif
4439
4440 if (!erts_isfinite(ff.fd)) {
4441 goto error;
4442 }
4443
4444 *objp = make_float(hp);
4445 PUT_DOUBLE(ff, hp);
4446 hp += FLOAT_SIZE_OBJECT;
4447 break;
4448 }
4449 case PID_EXT:
4450 case NEW_PID_EXT:
4451 factory->hp = hp;
4452 ep = dec_pid(edep, factory, ep, objp, ep[-1]);
4453 hp = factory->hp;
4454 if (ep == NULL) {
4455 goto error;
4456 }
4457 break;
4458 case PORT_EXT:
4459 case NEW_PORT_EXT:
4460 case V4_PORT_EXT:
4461 {
4462 Eterm sysname;
4463 ErlNode *node;
4464 Uint64 num;
4465 Uint32 cre;
4466 byte tag = ep[-1];
4467
4468 if ((ep = dec_atom(edep, ep, &sysname)) == NULL) {
4469 goto error;
4470 }
4471 if (tag == V4_PORT_EXT) {
4472 num = get_int64(ep);
4473 ep += 8;
4474 }
4475 else {
4476 num = get_uint32(ep);
4477 ep += 4;
4478 }
4479 if (tag == PORT_EXT) {
4480 cre = get_int8(ep);
4481 ep++;
4482 if (!is_tiny_creation(cre)) {
4483 goto error;
4484 }
4485 }
4486 else {
4487 cre = get_int32(ep);
4488 ep += 4;
4489 }
4490 node = dec_get_node(sysname, cre, make_boxed(hp));
4491 if(node == erts_this_node) {
4492 if (num > ERTS_MAX_INTERNAL_PORT_NUMBER)
4493 goto error;
4494 *objp = make_internal_port((Uint) num);
4495 }
4496 else {
4497 ExternalThing *etp = (ExternalThing *) hp;
4498 hp += EXTERNAL_PORT_HEAP_SIZE;
4499
4500 etp->header = make_external_port_header();
4501 etp->next = factory->off_heap->first;
4502 etp->node = node;
4503 #ifdef ARCH_64
4504 etp->data.port.id = num;
4505 #else
4506 etp->data.port.low = (Uint32) (num & 0xffffffff);
4507 etp->data.port.high = (Uint32) ((num >> 32) & 0xffffffff);
4508 #endif
4509
4510 factory->off_heap->first = (struct erl_off_heap_header*)etp;
4511 *objp = make_external_port(etp);
4512 }
4513
4514 break;
4515 }
4516 case REFERENCE_EXT:
4517 {
4518 Eterm sysname;
4519 ErlNode *node;
4520 int i;
4521 Uint32 cre;
4522 Uint32 *ref_num;
4523 Uint32 r0;
4524 Uint ref_words;
4525
4526 ref_words = 1;
4527
4528 if ((ep = dec_atom(edep, ep, &sysname)) == NULL)
4529 goto error;
4530 if ((r0 = get_int32(ep)) >= MAX_REFERENCE )
4531 goto error;
4532 ep += 4;
4533
4534 cre = get_int8(ep);
4535 ep += 1;
4536 if (!is_tiny_creation(cre)) {
4537 goto error;
4538 }
4539 goto ref_ext_common;
4540
4541 case NEW_REFERENCE_EXT:
4542 ref_words = get_int16(ep);
4543 ep += 2;
4544
4545 if ((ep = dec_atom(edep, ep, &sysname)) == NULL)
4546 goto error;
4547
4548 cre = get_int8(ep);
4549 ep += 1;
4550 if (!is_tiny_creation(cre)) {
4551 goto error;
4552 }
4553 r0 = get_int32(ep);
4554 ep += 4;
4555 if (r0 >= MAX_REFERENCE)
4556 goto error;
4557 goto ref_ext_common;
4558
4559 case NEWER_REFERENCE_EXT:
4560 ref_words = get_int16(ep);
4561 ep += 2;
4562
4563 if ((ep = dec_atom(edep, ep, &sysname)) == NULL)
4564 goto error;
4565
4566 cre = get_int32(ep);
4567 ep += 4;
4568 r0 = get_int32(ep);
4569 ep += 4;
4570
4571 ref_ext_common:
4572
4573 if (ref_words > ERTS_MAX_REF_NUMBERS)
4574 goto error;
4575
4576 node = dec_get_node(sysname, cre, make_boxed(hp));
4577 if(node == erts_this_node) {
4578 Eterm *rtp = hp;
4579 Uint32 ref_num_buf[ERTS_MAX_REF_NUMBERS];
4580 if (r0 >= MAX_REFERENCE) {
4581 /*
4582 * Must reject local refs with more than 18 bits
4583 * in first word as magic ref table relies on it.
4584 */
4585 goto error;
4586 }
4587
4588 ref_num = &ref_num_buf[0];
4589 ref_num[0] = r0;
4590 for(i = 1; i < ref_words; i++) {
4591 ref_num[i] = get_int32(ep);
4592 ep += 4;
4593 }
4594 if (ref_words != ERTS_REF_NUMBERS) {
4595 int i;
4596 if (ref_words > ERTS_REF_NUMBERS)
4597 goto error; /* Not a ref that we created... */
4598 for (i = ref_words; i < ERTS_REF_NUMBERS; i++)
4599 ref_num[i] = 0;
4600 }
4601 if (erts_is_ordinary_ref_numbers(ref_num)) {
4602 make_ordinary_internal_ref:
4603 write_ref_thing(hp, ref_num[0], ref_num[1], ref_num[2]);
4604 hp += ERTS_REF_THING_SIZE;
4605 }
4606 else {
4607 /* Check if it is a pid reference... */
4608 Eterm pid = erts_pid_ref_lookup(ref_num);
4609 if (is_internal_pid(pid)) {
4610 write_pid_ref_thing(hp, ref_num[0], ref_num[1],
4611 ref_num[2], pid);
4612 hp += ERTS_PID_REF_THING_SIZE;
4613 }
4614 else {
4615 /* Check if it is a magic reference... */
4616 ErtsMagicBinary *mb = erts_magic_ref_lookup_bin(ref_num);
4617 if (!mb)
4618 goto make_ordinary_internal_ref;
4619 /* Refc on binary was increased by lookup above... */
4620 ASSERT(rtp);
4621 write_magic_ref_thing(hp, factory->off_heap, mb);
4622 OH_OVERHEAD(factory->off_heap,
4623 mb->orig_size / sizeof(Eterm));
4624 hp += ERTS_MAGIC_REF_THING_SIZE;
4625 }
4626 }
4627 *objp = make_internal_ref(rtp);
4628 }
4629 else {
4630 ExternalThing *etp = (ExternalThing *) hp;
4631 #if defined(ARCH_64)
4632 hp += EXTERNAL_THING_HEAD_SIZE + ref_words/2 + 1;
4633 #else
4634 hp += EXTERNAL_THING_HEAD_SIZE + ref_words;
4635 #endif
4636
4637 #if defined(ARCH_64)
4638 etp->header = make_external_ref_header(ref_words/2 + 1);
4639 #else
4640 etp->header = make_external_ref_header(ref_words);
4641 #endif
4642 etp->next = factory->off_heap->first;
4643 etp->node = node;
4644
4645 factory->off_heap->first = (struct erl_off_heap_header*)etp;
4646 *objp = make_external_ref(etp);
4647 ref_num = &(etp->data.ui32[0]);
4648 #if defined(ARCH_64)
4649 *(ref_num++) = ref_words /* 32-bit arity */;
4650 #endif
4651
4652 ref_num[0] = r0;
4653
4654 for(i = 1; i < ref_words; i++) {
4655 ref_num[i] = get_int32(ep);
4656 ep += 4;
4657 }
4658 #if defined(ARCH_64)
4659 if ((1 + ref_words) % 2)
4660 ref_num[ref_words] = 0;
4661 #endif
4662 }
4663 break;
4664 }
4665 case BINARY_EXT:
4666 {
4667 n = get_int32(ep);
4668 ep += 4;
4669
4670 if ((unsigned)n <= ERL_ONHEAP_BIN_LIMIT) {
4671 ErlHeapBin* hb = (ErlHeapBin *) hp;
4672
4673 hb->thing_word = header_heap_bin(n);
4674 hb->size = n;
4675 hp += heap_bin_size(n);
4676 sys_memcpy(hb->data, ep, n);
4677 *objp = make_binary(hb);
4678 } else if (edep && edep->data && edep->data->binp &&
4679 n > (edep->data->binp->orig_size / 4)) {
4680 /* If we decode a refc binary from a distribution data
4681 entry we know that it is a refc binary to begin with
4682 so we just increment it and use the reference. This
4683 means that the entire distribution data entry will
4684 remain until this binary is de-allocated so we only
4685 do it if a substantial part (> 25%) of the data
4686 is a binary. */
4687 ProcBin* pb = (ProcBin *) hp;
4688 Binary* bptr = edep->data->binp;
4689 erts_refc_inc(&bptr->intern.refc, 1);
4690 pb->thing_word = HEADER_PROC_BIN;
4691 pb->size = n;
4692 pb->next = factory->off_heap->first;
4693 factory->off_heap->first = (struct erl_off_heap_header*)pb;
4694 pb->val = bptr;
4695 pb->bytes = (byte*) ep;
4696 ERTS_ASSERT((byte*)(bptr->orig_bytes) < ep &&
4697 ep+n <= (byte*)(bptr->orig_bytes+bptr->orig_size));
4698 pb->flags = 0;
4699 OH_OVERHEAD(factory->off_heap, pb->size / sizeof(Eterm));
4700 hp += PROC_BIN_SIZE;
4701 *objp = make_binary(pb);
4702 } else {
4703 Binary* dbin = erts_bin_nrml_alloc(n);
4704
4705 *objp = erts_build_proc_bin(factory->off_heap, hp, dbin);
4706 hp += PROC_BIN_SIZE;
4707 if (ctx) {
4708 int n_limit = reds * B2T_MEMCPY_FACTOR;
4709 if (n > n_limit) {
4710 ctx->state = B2TDecodeBinary;
4711 ctx->u.dc.remaining_n = n - n_limit;
4712 ctx->u.dc.remaining_bytes = dbin->orig_bytes + n_limit;
4713 n = n_limit;
4714 reds = 0;
4715 }
4716 else
4717 reds -= n / B2T_MEMCPY_FACTOR;
4718 }
4719 sys_memcpy(dbin->orig_bytes, ep, n);
4720 }
4721 ep += n;
4722 break;
4723 }
4724 case BIT_BINARY_EXT:
4725 {
4726 Eterm bin;
4727 ErlSubBin* sb;
4728 Uint bitsize;
4729
4730 n = get_int32(ep);
4731 bitsize = ep[4];
4732 if (((bitsize==0) != (n==0)) || bitsize > 8)
4733 goto error;
4734 ep += 5;
4735 if ((unsigned)n <= ERL_ONHEAP_BIN_LIMIT) {
4736 ErlHeapBin* hb = (ErlHeapBin *) hp;
4737
4738 hb->thing_word = header_heap_bin(n);
4739 hb->size = n;
4740 sys_memcpy(hb->data, ep, n);
4741 bin = make_binary(hb);
4742 hp += heap_bin_size(n);
4743 ep += n;
4744 } else {
4745 Binary* dbin = erts_bin_nrml_alloc(n);
4746 Uint n_copy = n;
4747
4748 bin = erts_build_proc_bin(factory->off_heap, hp, dbin);
4749 hp += PROC_BIN_SIZE;
4750 if (ctx) {
4751 int n_limit = reds * B2T_MEMCPY_FACTOR;
4752 if (n > n_limit) {
4753 ctx->state = B2TDecodeBinary;
4754 ctx->u.dc.remaining_n = n - n_limit;
4755 ctx->u.dc.remaining_bytes = dbin->orig_bytes + n_limit;
4756 n_copy = n_limit;
4757 reds = 0;
4758 }
4759 else {
4760 reds -= n / B2T_MEMCPY_FACTOR;
4761 }
4762 }
4763 sys_memcpy(dbin->orig_bytes, ep, n_copy);
4764 ep += n_copy;
4765 }
4766
4767 if (bitsize == 8 || n == 0) {
4768 *objp = bin;
4769 } else {
4770 sb = (ErlSubBin *)hp;
4771 sb->thing_word = HEADER_SUB_BIN;
4772 sb->orig = bin;
4773 sb->size = n - 1;
4774 sb->bitsize = bitsize;
4775 sb->bitoffs = 0;
4776 sb->offs = 0;
4777 sb->is_writable = 0;
4778 *objp = make_binary(sb);
4779 hp += ERL_SUB_BIN_SIZE;
4780 }
4781 break;
4782 }
4783 case EXPORT_EXT:
4784 {
4785 Eterm mod;
4786 Eterm name;
4787 Eterm temp;
4788 Sint arity;
4789
4790 if ((ep = dec_atom(edep, ep, &mod)) == NULL) {
4791 goto error;
4792 }
4793 if ((ep = dec_atom(edep, ep, &name)) == NULL) {
4794 goto error;
4795 }
4796 factory->hp = hp;
4797 ep = dec_term(edep, factory, ep, &temp, NULL, 0);
4798 hp = factory->hp;
4799 if (ep == NULL) {
4800 goto error;
4801 }
4802 if (!is_small(temp)) {
4803 goto error;
4804 }
4805 arity = signed_val(temp);
4806 if (arity < 0) {
4807 goto error;
4808 }
4809 if (edep && (edep->flags & ERTS_DIST_EXT_BTT_SAFE)) {
4810 if (!erts_active_export_entry(mod, name, arity))
4811 goto error;
4812 }
4813 *objp = make_export(hp);
4814 *hp++ = HEADER_EXPORT;
4815 *hp++ = (Eterm) erts_export_get_or_make_stub(mod, name, arity);
4816 break;
4817 }
4818 break;
4819 case MAP_EXT:
4820 {
4821 Uint32 size,n;
4822 Eterm *kptr,*vptr;
4823 Eterm keys;
4824
4825 size = get_int32(ep); ep += 4;
4826
4827 if (size <= MAP_SMALL_MAP_LIMIT) {
4828 flatmap_t *mp;
4829
4830 keys = make_tuple(hp);
4831 *hp++ = make_arityval(size);
4832 hp += size;
4833 kptr = hp - 1;
4834
4835 mp = (flatmap_t*)hp;
4836 hp += MAP_HEADER_FLATMAP_SZ;
4837 hp += size;
4838 vptr = hp - 1;
4839
4840 /* kptr, last word for keys
4841 * vptr, last word for values
4842 */
4843
4844 WSTACK_PUSH(flat_maps, (UWord)mp);
4845 mp->thing_word = MAP_HEADER_FLATMAP;
4846 mp->size = size;
4847 mp->keys = keys;
4848 *objp = make_flatmap(mp);
4849
4850 for (n = size; n; n--) {
4851 *vptr = (Eterm) next;
4852 *kptr = (Eterm) vptr;
4853 next = kptr;
4854 vptr--;
4855 kptr--;
4856 }
4857 }
4858 else { /* Make hamt */
4859 struct dec_term_hamt* hamt = PSTACK_PUSH(hamt_array);
4860
4861 hamt->objp = objp;
4862 hamt->size = size;
4863 hamt->leaf_array = hp;
4864
4865 for (n = size; n; n--) {
4866 CDR(hp) = (Eterm) next;
4867 CAR(hp) = (Eterm) &CDR(hp);
4868 next = &CAR(hp);
4869 hp += 2;
4870 }
4871 }
4872 }
4873 break;
4874 case NEW_FUN_EXT:
4875 {
4876 ErlFunThing* funp = (ErlFunThing *) hp;
4877 Uint arity;
4878 Eterm module;
4879 const byte* uniq;
4880 int index;
4881 Sint old_uniq;
4882 Sint old_index;
4883 unsigned num_free;
4884 int i;
4885 Eterm temp;
4886
4887 ep += 4; /* Skip total size in bytes */
4888 arity = *ep++;
4889 uniq = ep;
4890 ep += 16;
4891 index = get_int32(ep);
4892 ep += 4;
4893 num_free = get_int32(ep);
4894 ep += 4;
4895 hp += ERL_FUN_SIZE;
4896 hp += num_free;
4897 funp->thing_word = HEADER_FUN;
4898 funp->num_free = num_free;
4899 *objp = make_fun(funp);
4900
4901 /* Module */
4902 if ((ep = dec_atom(edep, ep, &module)) == NULL) {
4903 goto error;
4904 }
4905 factory->hp = hp;
4906 /* Index */
4907 if ((ep = dec_term(edep, factory, ep, &temp, NULL, 0)) == NULL) {
4908 goto error;
4909 }
4910 if (!is_small(temp)) {
4911 goto error;
4912 }
4913 old_index = unsigned_val(temp);
4914
4915 /* Uniq */
4916 if ((ep = dec_term(edep, factory, ep, &temp, NULL, 0)) == NULL) {
4917 goto error;
4918 }
4919 if (!is_small(temp)) {
4920 goto error;
4921 }
4922 old_uniq = unsigned_val(temp);
4923
4924 /*
4925 * It is safe to link the fun into the fun list only when
4926 * no more validity tests can fail.
4927 */
4928 funp->next = factory->off_heap->first;
4929 factory->off_heap->first = (struct erl_off_heap_header*)funp;
4930
4931 funp->fe = erts_put_fun_entry2(module, old_uniq, old_index,
4932 uniq, index, arity);
4933 funp->arity = arity;
4934 hp = factory->hp;
4935
4936 /* Environment */
4937 for (i = num_free-1; i >= 0; i--) {
4938 funp->env[i] = (Eterm) next;
4939 next = funp->env + i;
4940 }
4941 /* Creator */
4942 funp->creator = (Eterm) next;
4943 next = &(funp->creator);
4944 break;
4945 }
4946 case ATOM_INTERNAL_REF2:
4947 n = get_int16(ep);
4948 ep += 2;
4949 /* If this is an ets_decode we know that
4950 the atom is valid, so we can skip the
4951 validation check */
4952 if (!ets_decode && n >= atom_table_size()) {
4953 goto error;
4954 }
4955 *objp = make_atom(n);
4956 break;
4957 case ATOM_INTERNAL_REF3:
4958 n = get_int24(ep);
4959 ep += 3;
4960 /* If this is an ets_decode we know that
4961 the atom is valid, so we can skip the
4962 validation check */
4963 if (!ets_decode && n >= atom_table_size()) {
4964 goto error;
4965 }
4966 *objp = make_atom(n);
4967 break;
4968
4969 case BINARY_INTERNAL_REF:
4970 {
4971 ProcBin* pb = (ProcBin*) hp;
4972 sys_memcpy(pb, ep, sizeof(ProcBin));
4973 ep += sizeof(ProcBin);
4974
4975 erts_refc_inc(&pb->val->intern.refc, 1);
4976 hp += PROC_BIN_SIZE;
4977 pb->next = factory->off_heap->first;
4978 factory->off_heap->first = (struct erl_off_heap_header*)pb;
4979 OH_OVERHEAD(factory->off_heap, pb->size / sizeof(Eterm));
4980 pb->flags = 0;
4981 *objp = make_binary(pb);
4982 break;
4983 }
4984 case BIT_BINARY_INTERNAL_REF:
4985 {
4986 Sint bitoffs = *ep++;
4987 Sint bitsize = *ep++;
4988 ProcBin* pb = (ProcBin*) hp;
4989 ErlSubBin* sub;
4990 sys_memcpy(pb, ep, sizeof(ProcBin));
4991 ep += sizeof(ProcBin);
4992
4993 erts_refc_inc(&pb->val->intern.refc, 1);
4994 hp += PROC_BIN_SIZE;
4995 pb->next = factory->off_heap->first;
4996 factory->off_heap->first = (struct erl_off_heap_header*)pb;
4997 OH_OVERHEAD(factory->off_heap, pb->size / sizeof(Eterm));
4998 pb->flags = 0;
4999
5000 sub = (ErlSubBin*)hp;
5001 sub->thing_word = HEADER_SUB_BIN;
5002 sub->size = pb->size - (bitoffs + bitsize + 7)/8;
5003 sub->offs = 0;
5004 sub->bitoffs = bitoffs;
5005 sub->bitsize = bitsize;
5006 sub->is_writable = 0;
5007 sub->orig = make_binary(pb);
5008
5009 hp += ERL_SUB_BIN_SIZE;
5010 *objp = make_binary(sub);
5011 break;
5012 }
5013
5014 default:
5015 goto error;
5016 }
5017
5018 if (--reds <= 0) {
5019 if (ctx) {
5020 if (next || ctx->state != B2TDecode) {
5021 ctx->u.dc.ep = ep;
5022 ctx->u.dc.next = next;
5023 ctx->u.dc.factory.hp = hp;
5024 if (!WSTACK_ISEMPTY(flat_maps)) {
5025 WSTACK_SAVE(flat_maps, &ctx->u.dc.flat_maps);
5026 }
5027 if (!PSTACK_IS_EMPTY(hamt_array)) {
5028 PSTACK_SAVE(hamt_array, &ctx->u.dc.hamt_array);
5029 }
5030 ctx->reds = 0;
5031 return NULL;
5032 }
5033 }
5034 else {
5035 reds = ERTS_SWORD_MAX;
5036 }
5037 }
5038 }
5039
5040 ASSERT(hp <= factory->hp_end
5041 || (factory->mode == FACTORY_CLOSED && is_immed(*dbg_resultp)));
5042 factory->hp = hp;
5043 /*
5044 * From here on factory may produce (more) heap fragments
5045 */
5046
5047 if (!PSTACK_IS_EMPTY(hamt_array)) {
5048 do {
5049 struct dec_term_hamt* hamt = PSTACK_TOP(hamt_array);
5050
5051 *hamt->objp = erts_hashmap_from_array(factory,
5052 hamt->leaf_array,
5053 hamt->size,
5054 1);
5055 if (is_non_value(*hamt->objp))
5056 goto error_hamt;
5057
5058 (void) PSTACK_POP(hamt_array);
5059 } while (!PSTACK_IS_EMPTY(hamt_array));
5060 }
5061
5062 /* Iterate through all the (flat)maps and check for validity and sort keys
5063 * - done here for when we know it is complete.
5064 */
5065
5066 while(!WSTACK_ISEMPTY(flat_maps)) {
5067 next = (Eterm *)WSTACK_POP(flat_maps);
5068 if (!erts_validate_and_sort_flatmap((flatmap_t*)next))
5069 goto error;
5070 }
5071
5072 /* Now that no more errors can occur, the stacks can be destroyed safely. */
5073 PSTACK_DESTROY(hamt_array);
5074 WSTACK_DESTROY(flat_maps);
5075
5076 ASSERT((Eterm*)*dbg_resultp != NULL);
5077
5078 if (ctx) {
5079 ctx->state = B2TDone;
5080 ctx->reds = reds;
5081 ctx->u.dc.ep = ep;
5082 }
5083
5084 return ep;
5085
5086 error:
5087 /* UNDO:
5088 * Must unlink all off-heap objects that may have been
5089 * linked into the process.
5090 */
5091 if (factory->mode != FACTORY_CLOSED) {
5092 if (factory->hp < hp) { /* Sometimes we used hp and sometimes factory->hp */
5093 factory->hp = hp; /* the largest must be the freshest */
5094 }
5095 }
5096 else ASSERT(!factory->hp || factory->hp == hp);
5097
5098 error_hamt:
5099 erts_factory_undo(factory);
5100 PSTACK_DESTROY(hamt_array);
5101 if (ctx) {
5102 ctx->state = B2TDecodeFail;
5103 ctx->reds = reds;
5104 }
5105 WSTACK_DESTROY(flat_maps);
5106
5107 return NULL;
5108 }
5109
5110 /* returns the number of bytes needed to encode an object
5111 to a sequence of bytes
5112 N.B. That this must agree with to_external2() above!!!
5113 (except for cached atoms) */
encode_size_struct2(ErtsAtomCacheMap * acmp,Eterm obj,Uint64 dflags)5114 static Uint encode_size_struct2(ErtsAtomCacheMap *acmp,
5115 Eterm obj,
5116 Uint64 dflags) {
5117 Uint size = 0;
5118 ErtsExtSzRes res = encode_size_struct_int(NULL, acmp, obj,
5119 dflags, NULL,
5120 &size);
5121 /*
5122 * encode_size_struct2() only allowed when
5123 * we know the result will always be OK!
5124 */
5125 ASSERT(res == ERTS_EXT_SZ_OK); (void) res;
5126 return (Uint) size;
5127 }
5128
5129 static ErtsExtSzRes
encode_size_struct_int(TTBSizeContext * ctx,ErtsAtomCacheMap * acmp,Eterm obj,Uint64 dflags,Sint * reds,Uint * res)5130 encode_size_struct_int(TTBSizeContext* ctx, ErtsAtomCacheMap *acmp, Eterm obj,
5131 Uint64 dflags, Sint *reds, Uint *res)
5132 {
5133 DECLARE_WSTACK(s);
5134 Uint m, i, arity;
5135 Uint result = *res;
5136 Sint r = 0;
5137 int vlen = -1;
5138
5139 if (ctx) {
5140 WSTACK_CHANGE_ALLOCATOR(s, ERTS_ALC_T_SAVED_ESTACK);
5141 r = *reds;
5142
5143 vlen = ctx->vlen;
5144
5145 if (!ctx->wstack.wstart)
5146 ctx->last_result = result;
5147 else { /* restore saved stack */
5148 WSTACK_RESTORE(s, &ctx->wstack);
5149 result = ctx->result;
5150 obj = ctx->obj;
5151 }
5152 }
5153
5154 #define LIST_TAIL_OP ((0 << _TAG_PRIMARY_SIZE) | TAG_PRIMARY_HEADER)
5155 #define TERM_ARRAY_OP(N) (((N) << _TAG_PRIMARY_SIZE) | TAG_PRIMARY_HEADER)
5156 #define TERM_ARRAY_OP_DEC(OP) ((OP) - (1 << _TAG_PRIMARY_SIZE))
5157
5158
5159 for (;;) {
5160 ASSERT(!is_header(obj));
5161
5162 if (ctx && --r <= 0) {
5163 *reds = 0;
5164 ctx->obj = obj;
5165 ctx->result = result;
5166 ctx->vlen = vlen;
5167 WSTACK_SAVE(s, &ctx->wstack);
5168 return ERTS_EXT_SZ_YIELD;
5169 }
5170 switch (tag_val_def(obj)) {
5171 case NIL_DEF:
5172 result++;
5173 break;
5174 case ATOM_DEF:
5175 if (dflags & DFLAG_ETS_COMPRESSED) {
5176 if (atom_val(obj) >= (1<<16)) {
5177 result += 1 + 3;
5178 }
5179 else {
5180 result += 1 + 2;
5181 }
5182 }
5183 else {
5184 Atom *a = atom_tab(atom_val(obj));
5185 int alen;
5186 if ((dflags & DFLAG_UTF8_ATOMS) || a->latin1_chars < 0) {
5187 alen = a->len;
5188 result += 1 + 1 + alen;
5189 if (alen > 255) {
5190 result++; /* ATOM_UTF8_EXT (not small) */
5191 }
5192 }
5193 else {
5194 alen = a->latin1_chars;
5195 result += 1 + 1 + alen;
5196 if (alen > 255 || !(dflags & DFLAG_SMALL_ATOM_TAGS))
5197 result++; /* ATOM_EXT (not small) */
5198 }
5199 insert_acache_map(acmp, obj, dflags);
5200 }
5201 break;
5202 case SMALL_DEF:
5203 {
5204 Sint val = signed_val(obj);
5205
5206 if ((Uint)val < 256)
5207 result += 1 + 1; /* SMALL_INTEGER_EXT */
5208 else if (sizeof(Sint) == 4 || IS_SSMALL32(val))
5209 result += 1 + 4; /* INTEGER_EXT */
5210 else {
5211 DeclareTmpHeapNoproc(tmp_big,2);
5212 UseTmpHeapNoproc(2);
5213 i = big_bytes(small_to_big(val, tmp_big));
5214 result += 1 + 1 + 1 + i; /* SMALL_BIG_EXT */
5215 UnUseTmpHeapNoproc(2);
5216 }
5217 }
5218 break;
5219 case BIG_DEF:
5220 i = big_bytes(obj);
5221 if (sizeof(Sint)==4 && i <= 4 && (big_digit(obj,0)-big_sign(obj)) < (1<<31))
5222 result += 1 + 4; /* INTEGER_EXT */
5223 else if (i < 256)
5224 result += 1 + 1 + 1 + i; /* tag,size,sign,digits */
5225 else
5226 result += 1 + 4 + 1 + i; /* tag,size,sign,digits */
5227 break;
5228 case EXTERNAL_PID_DEF:
5229 case PID_DEF:
5230 result += (1 + encode_size_struct2(acmp, pid_node_name(obj), dflags) +
5231 4 + 4 + 4);
5232 break;
5233 case EXTERNAL_REF_DEF:
5234 case REF_DEF:
5235 ASSERT(dflags & DFLAG_EXTENDED_REFERENCES);
5236 i = ref_no_numbers(obj);
5237 result += (1 + 2 + encode_size_struct2(acmp, ref_node_name(obj), dflags) +
5238 4 + 4*i);
5239 break;
5240 case EXTERNAL_PORT_DEF:
5241 case PORT_DEF: {
5242 Uint64 num = port_number(obj);
5243 result += (num > ERTS_MAX_V3_PORT_NUMBER) ? 8 : 4;
5244 result += (1 + encode_size_struct2(acmp, port_node_name(obj), dflags)
5245 /* num */ + 4);
5246 break;
5247 }
5248 case LIST_DEF: {
5249 int is_str = is_external_string(obj, &m);
5250 r -= m/2;
5251 if (is_str) {
5252 result += m + 2 + 1;
5253 } else {
5254 result += 5;
5255 WSTACK_PUSH2(s, (UWord)CDR(list_val(obj)), (UWord)LIST_TAIL_OP);
5256 obj = CAR(list_val(obj));
5257 continue; /* big loop */
5258 }
5259 break;
5260 }
5261 case TUPLE_DEF:
5262 {
5263 Eterm* ptr = tuple_val(obj);
5264 arity = arityval(*ptr);
5265 if (arity <= 0xff) {
5266 result += 1 + 1;
5267 } else {
5268 result += 1 + 4;
5269 }
5270 if (arity > 1) {
5271 WSTACK_PUSH2(s, (UWord) (ptr + 2),
5272 (UWord) TERM_ARRAY_OP(arity-1));
5273 }
5274 else if (arity == 0) {
5275 break;
5276 }
5277 obj = ptr[1];
5278 continue; /* big loop */
5279 }
5280 case MAP_DEF:
5281 if (is_flatmap(obj)) {
5282 flatmap_t *mp = (flatmap_t*)flatmap_val(obj);
5283 Uint size = flatmap_get_size(mp);
5284
5285 result += 1 + 4; /* tag + 4 bytes size */
5286
5287 if (size) {
5288 WSTACK_PUSH4(s, (UWord) flatmap_get_values(mp),
5289 (UWord) TERM_ARRAY_OP(size),
5290 (UWord) flatmap_get_keys(mp),
5291 (UWord) TERM_ARRAY_OP(size));
5292 }
5293 } else {
5294 Eterm *ptr;
5295 Eterm hdr;
5296 Uint node_sz;
5297 ptr = boxed_val(obj);
5298 hdr = *ptr;
5299 ASSERT(is_header(hdr));
5300 switch(hdr & _HEADER_MAP_SUBTAG_MASK) {
5301 case HAMT_SUBTAG_HEAD_ARRAY:
5302 ptr++;
5303 node_sz = 16;
5304 result += 1 + 4; /* tag + 4 bytes size */
5305 break;
5306 case HAMT_SUBTAG_HEAD_BITMAP:
5307 ptr++;
5308 result += 1 + 4; /* tag + 4 bytes size */
5309 /*fall through*/
5310 case HAMT_SUBTAG_NODE_BITMAP:
5311 node_sz = hashmap_bitcount(MAP_HEADER_VAL(hdr));
5312 ASSERT(node_sz < 17);
5313 break;
5314 default:
5315 erts_exit(ERTS_ERROR_EXIT, "bad header\r\n");
5316 }
5317
5318 ptr++;
5319 WSTACK_RESERVE(s, node_sz*2);
5320 while(node_sz--) {
5321 if (is_list(*ptr)) {
5322 WSTACK_FAST_PUSH(s, CAR(list_val(*ptr)));
5323 WSTACK_FAST_PUSH(s, CDR(list_val(*ptr)));
5324 } else {
5325 WSTACK_FAST_PUSH(s, *ptr);
5326 }
5327 ptr++;
5328 }
5329 }
5330 break;
5331 case FLOAT_DEF:
5332 if (dflags & DFLAG_NEW_FLOATS) {
5333 result += 9;
5334 } else {
5335 result += 32; /* Yes, including the tag */
5336 }
5337 break;
5338 case BINARY_DEF: {
5339 ProcBin* pb = (ProcBin*) binary_val(obj);
5340 Uint bin_size = pb->size;
5341 byte bitoffs = 0;
5342 byte bitsize = 0;
5343 if (dflags & DFLAG_ETS_COMPRESSED) {
5344 ProcBin* pb = (ProcBin*) binary_val(obj);
5345 Uint sub_extra = 0;
5346 if (pb->thing_word == HEADER_SUB_BIN) {
5347 ErlSubBin* sub = (ErlSubBin*) pb;
5348 bitoffs = sub->bitoffs;
5349 bitsize = sub->bitsize;
5350 pb = (ProcBin*) binary_val(sub->orig);
5351 sub_extra = 2; /* bitoffs and bitsize */
5352 bin_size += (bitoffs + bitsize + 7) / 8;
5353 }
5354 if (pb->thing_word == HEADER_PROC_BIN
5355 && heap_bin_size(bin_size) > PROC_BIN_SIZE) {
5356
5357 result += 1 + sub_extra + sizeof(ProcBin);
5358 break;
5359 }
5360 }
5361 else {
5362 #ifdef ARCH_64
5363 if (bin_size >= (Uint) 0xffffffff) {
5364 if (pb->thing_word == HEADER_SUB_BIN) {
5365 ErlSubBin* sub = (ErlSubBin*) pb;
5366 bin_size += (sub->bitoffs + sub->bitsize+ 7) / 8;
5367 }
5368 if (bin_size > (Uint) 0xffffffff) {
5369 WSTACK_DESTROY(s);
5370 return ERTS_EXT_SZ_SYSTEM_LIMIT;
5371 }
5372 }
5373 #endif
5374 if (pb->thing_word == HEADER_SUB_BIN) {
5375 ErlSubBin* sub = (ErlSubBin*) pb;
5376 bitoffs = sub->bitoffs;
5377 bitsize = sub->bitsize;
5378 pb = (ProcBin*) binary_val(sub->orig);
5379 }
5380 if (vlen >= 0) {
5381 Uint csz;
5382 if (pb->thing_word == HEADER_PROC_BIN
5383 && bitoffs == 0
5384 && bin_size > ERL_ONHEAP_BIN_LIMIT) {
5385 Uint trailing_result;
5386 if (bitsize == 0) {
5387 result += (1 /* BIT_BINARY_EXT */
5388 + 4 /* size */);
5389 trailing_result = 0;
5390 }
5391 else if (dflags & DFLAG_BIT_BINARIES) {
5392 result += (1 /* BIT_BINARY_EXT */
5393 + 4 /* size */
5394 + 1 /* trailing bitsize */);
5395 trailing_result = 1 /* trailing bits */;
5396 }
5397 else {
5398 /* sigh... */
5399 result += (1 /* SMALL_TUPLE_EXT */
5400 + 1 /* 2 tuple size */
5401 + 1 /* BINARY_EXT */
5402 + 4 /* binary size */);
5403 trailing_result = (1 /* SMALL_INTEGER_EXT */
5404 + 1 /* bitsize */);
5405 }
5406 csz = result - ctx->last_result;
5407 ctx->last_result = result;
5408 result += trailing_result;
5409 vlen += 2; /* data leading up to binary and binary */
5410
5411 /* potentially multiple elements leading up to binary */
5412 vlen += csz/MAX_SYSIOVEC_IOVLEN;
5413 /* potentially multiple elements for binary */
5414 vlen += bin_size/MAX_SYSIOVEC_IOVLEN;
5415 ctx->extra_size += bin_size;
5416
5417 if (dflags & DFLAG_PENDING_CONNECT) {
5418 ASSERT(dflags & DFLAG_BIT_BINARIES);
5419 ASSERT(ctx);
5420 vlen += 2; /* for hopefull prolog and epilog */
5421 result += (4 /* for hopefull prolog (see below) */
5422 + 4); /* for hopefull epilog (see below) */
5423 ctx->last_result = result;
5424 }
5425 break;
5426 }
5427 }
5428 }
5429
5430 if (bitsize == 0) {
5431 result += (1 /* BIT_BINARY_EXT */
5432 + 4 /* size */
5433 + bin_size);
5434 }
5435 else if (dflags & DFLAG_PENDING_CONNECT) {
5436 /* This is the odd case when we have an un-aligned bit-string
5437 during a pending connect. */
5438 Uint csz;
5439 ASSERT(dflags & DFLAG_BIT_BINARIES);
5440 ASSERT(ctx);
5441 csz = result - ctx->last_result;
5442 /* potentially multiple elements leading up to binary */
5443 vlen += (csz + MAX_SYSIOVEC_IOVLEN - 1)/MAX_SYSIOVEC_IOVLEN;
5444
5445 vlen++; /* hopefull prolog */
5446 /*
5447 * Size for hopefull prolog is max of
5448 * - fallback: 1 + 1 + 1 + 4
5449 * - hopfull index + bit binary prolog: 4 + 1 + 4 + 1
5450 */
5451 result += 4 + 1 + 4 + 1;
5452 /* potentially multiple elements for binary */
5453 vlen += bin_size/MAX_SYSIOVEC_IOVLEN + 1;
5454 result += bin_size;
5455 vlen++; /* hopefull epiolog */
5456 /*
5457 * Size for hopefull epiolog is max of
5458 * - fallback: 1 + 1 + 1
5459 * - hopfull index + bit binary epilog: 4 + 1
5460 */
5461 result += 4 + 1;
5462 ctx->last_result = result;
5463 }
5464 else if (dflags & DFLAG_BIT_BINARIES) {
5465 result += 1 + 4 + 1 + bin_size + 1;
5466 }
5467 else {
5468 /* Sigh... */
5469 result += 1 + 1 + 1 + 4 + bin_size + 1 + 1 + 1;
5470 }
5471 break;
5472 }
5473 case FUN_DEF:
5474 {
5475 ErlFunThing* funp = (ErlFunThing *) fun_val(obj);
5476
5477 ASSERT(dflags & DFLAG_NEW_FUN_TAGS);
5478 result += 20+1+1+4; /* New ID + Tag */
5479 result += 4; /* Length field (number of free variables */
5480 result += encode_size_struct2(acmp, funp->creator, dflags);
5481 result += encode_size_struct2(acmp, funp->fe->module, dflags);
5482 result += 2 * (1+4); /* Index, Uniq */
5483 if (funp->num_free > 1) {
5484 WSTACK_PUSH2(s, (UWord) (funp->env + 1),
5485 (UWord) TERM_ARRAY_OP(funp->num_free-1));
5486 }
5487 if (funp->num_free != 0) {
5488 obj = funp->env[0];
5489 continue; /* big loop */
5490 }
5491 break;
5492 }
5493
5494 case EXPORT_DEF:
5495 {
5496 Export* ep = *((Export **) (export_val(obj) + 1));
5497 Uint tmp_result = result;
5498 result += 1;
5499 result += encode_size_struct2(acmp, ep->info.mfa.module, dflags);
5500 result += encode_size_struct2(acmp, ep->info.mfa.function, dflags);
5501 result += encode_size_struct2(acmp, make_small(ep->info.mfa.arity), dflags);
5502 if (dflags & DFLAG_PENDING_CONNECT) {
5503 Uint csz;
5504 ASSERT(ctx);
5505
5506 /*
5507 * Fallback is 1 + 1 + Module size + Function size, that is,
5508 * the hopefull index + hopefull encoding is larger...
5509 */
5510 ASSERT(dflags & DFLAG_EXPORT_PTR_TAG);
5511 csz = tmp_result - ctx->last_result;
5512 /* potentially multiple elements leading up to hopefull entry */
5513 vlen += (csz/MAX_SYSIOVEC_IOVLEN + 1
5514 + 1); /* hopefull entry */
5515 result += 4; /* hopefull index */
5516 ctx->last_result = result;
5517 }
5518 }
5519 break;
5520
5521 default:
5522 erts_exit(ERTS_ERROR_EXIT,"Internal data structure error (in encode_size_struct_int) %x\n",
5523 obj);
5524 }
5525
5526 if (WSTACK_ISEMPTY(s)) {
5527 break;
5528 }
5529 obj = (Eterm) WSTACK_POP(s);
5530
5531 if (is_header(obj)) {
5532 switch (obj) {
5533 case LIST_TAIL_OP:
5534 obj = (Eterm) WSTACK_POP(s);
5535 if (is_list(obj)) {
5536 Eterm* cons = list_val(obj);
5537
5538 WSTACK_PUSH2(s, (UWord)CDR(cons), (UWord)LIST_TAIL_OP);
5539 obj = CAR(cons);
5540 }
5541 break;
5542
5543 case TERM_ARRAY_OP(1):
5544 obj = *(Eterm*)WSTACK_POP(s);
5545 break;
5546 default: { /* TERM_ARRAY_OP(N) when N > 1 */
5547 Eterm* ptr = (Eterm*) WSTACK_POP(s);
5548 WSTACK_PUSH2(s, (UWord) (ptr+1),
5549 (UWord) TERM_ARRAY_OP_DEC(obj));
5550 obj = *ptr;
5551 }
5552 }
5553 }
5554 }
5555
5556 WSTACK_DESTROY(s);
5557 if (ctx) {
5558 ASSERT(ctx->wstack.wstart == NULL);
5559 *reds = r < 0 ? 0 : r;
5560
5561 if (vlen >= 0) {
5562 Uint csz;
5563 csz = result - ctx->last_result;
5564 if (csz)
5565 vlen += csz/MAX_SYSIOVEC_IOVLEN + 1;
5566 ctx->vlen = vlen;
5567 }
5568 }
5569 *res = result;
5570 return ERTS_EXT_SZ_OK;
5571 }
5572
5573
5574
5575 static Sint
decoded_size(const byte * ep,const byte * endp,int internal_tags,B2TContext * ctx)5576 decoded_size(const byte *ep, const byte* endp, int internal_tags, B2TContext* ctx)
5577 {
5578 Sint heap_size;
5579 int terms;
5580 int atom_extra_skip;
5581 Uint n;
5582 SWord reds;
5583
5584 if (ctx) {
5585 reds = ctx->reds;
5586 if (ctx->u.sc.ep) {
5587 heap_size = ctx->u.sc.heap_size;
5588 terms = ctx->u.sc.terms;
5589 ep = ctx->u.sc.ep;
5590 atom_extra_skip = ctx->u.sc.atom_extra_skip;
5591 goto init_done;
5592 }
5593 }
5594 else
5595 ERTS_UNDEF(reds, 0);
5596
5597 heap_size = 0;
5598 terms = 1;
5599 atom_extra_skip = 0;
5600 init_done:
5601
5602 #define SKIP(sz) \
5603 do { \
5604 if ((sz) <= endp-ep) { \
5605 ep += (sz); \
5606 } else { goto error; }; \
5607 } while (0)
5608
5609 #define SKIP2(sz1, sz2) \
5610 do { \
5611 Uint sz = (sz1) + (sz2); \
5612 if (sz1 < sz && (sz) <= endp-ep) { \
5613 ep += (sz); \
5614 } else { goto error; } \
5615 } while (0)
5616
5617 #define CHKSIZE(sz) \
5618 do { \
5619 if ((sz) > endp-ep) { goto error; } \
5620 } while (0)
5621
5622 #define ADDTERMS(n) \
5623 do { \
5624 int before = terms; \
5625 terms += (n); \
5626 if (terms < before) goto error; \
5627 } while (0)
5628
5629 ASSERT(terms > 0);
5630 do {
5631 int tag;
5632 CHKSIZE(1);
5633 tag = ep++[0];
5634 switch (tag) {
5635 case INTEGER_EXT:
5636 SKIP(4);
5637 #if !defined(ARCH_64)
5638 heap_size += BIG_UINT_HEAP_SIZE;
5639 #endif
5640 break;
5641 case SMALL_INTEGER_EXT:
5642 SKIP(1);
5643 break;
5644 case SMALL_BIG_EXT:
5645 CHKSIZE(1);
5646 n = ep[0]; /* number of bytes */
5647 SKIP2(n, 1+1); /* skip size,sign,digits */
5648 heap_size += 1+(n+sizeof(Eterm)-1)/sizeof(Eterm); /* XXX: 1 too much? */
5649 break;
5650 case LARGE_BIG_EXT:
5651 CHKSIZE(4);
5652 n = get_uint32(ep);
5653 if (n > BIG_ARITY_MAX*sizeof(ErtsDigit)) {
5654 goto error;
5655 }
5656 SKIP2(n,4+1); /* skip, size,sign,digits */
5657 heap_size += 1+1+(n+sizeof(Eterm)-1)/sizeof(Eterm); /* XXX: 1 too much? */
5658 break;
5659 case ATOM_EXT:
5660 CHKSIZE(2);
5661 n = get_int16(ep);
5662 if (n > MAX_ATOM_CHARACTERS) {
5663 goto error;
5664 }
5665 SKIP(n+2+atom_extra_skip);
5666 atom_extra_skip = 0;
5667 break;
5668 case ATOM_UTF8_EXT:
5669 CHKSIZE(2);
5670 n = get_int16(ep);
5671 ep += 2;
5672 if (n > MAX_ATOM_SZ_LIMIT) {
5673 goto error;
5674 }
5675 SKIP(n+atom_extra_skip);
5676 atom_extra_skip = 0;
5677 break;
5678 case SMALL_ATOM_EXT:
5679 CHKSIZE(1);
5680 n = get_int8(ep);
5681 if (n > MAX_ATOM_CHARACTERS) {
5682 goto error;
5683 }
5684 SKIP(n+1+atom_extra_skip);
5685 atom_extra_skip = 0;
5686 break;
5687 case SMALL_ATOM_UTF8_EXT:
5688 CHKSIZE(1);
5689 n = get_int8(ep);
5690 ep++;
5691 if (n > MAX_ATOM_SZ_LIMIT) {
5692 goto error;
5693 }
5694 SKIP(n+atom_extra_skip);
5695 atom_extra_skip = 0;
5696 break;
5697 case ATOM_CACHE_REF:
5698 SKIP(1+atom_extra_skip);
5699 atom_extra_skip = 0;
5700 break;
5701 case NEW_PID_EXT:
5702 atom_extra_skip = 12;
5703 goto case_PID;
5704 case PID_EXT:
5705 atom_extra_skip = 9;
5706 case_PID:
5707 /* In case it is an external pid */
5708 heap_size += EXTERNAL_PID_HEAP_SIZE;
5709 terms++;
5710 break;
5711 case V4_PORT_EXT:
5712 atom_extra_skip = 12;
5713 goto case_PORT;
5714 case NEW_PORT_EXT:
5715 atom_extra_skip = 8;
5716 goto case_PORT;
5717 case PORT_EXT:
5718 atom_extra_skip = 5;
5719 case_PORT:
5720 /* In case it is an external port */
5721 heap_size += EXTERNAL_PORT_HEAP_SIZE;
5722 terms++;
5723 break;
5724 case NEWER_REFERENCE_EXT:
5725 atom_extra_skip = 4;
5726 goto case_NEW_REFERENCE;
5727 case NEW_REFERENCE_EXT:
5728 atom_extra_skip = 1;
5729 case_NEW_REFERENCE:
5730 {
5731 int id_words;
5732
5733 CHKSIZE(2);
5734 id_words = get_int16(ep);
5735
5736 if (id_words > ERTS_MAX_REF_NUMBERS)
5737 goto error;
5738
5739 ep += 2;
5740 atom_extra_skip += 4*id_words;
5741 /* In case it is an external ref */
5742 #if defined(ARCH_64)
5743 heap_size += EXTERNAL_THING_HEAD_SIZE + id_words/2 + 1;
5744 #else
5745 heap_size += EXTERNAL_THING_HEAD_SIZE + id_words;
5746 #endif
5747 terms++;
5748 break;
5749 }
5750 case REFERENCE_EXT:
5751 /* In case it is an external ref */
5752 heap_size += EXTERNAL_THING_HEAD_SIZE + 1;
5753 atom_extra_skip = 5;
5754 terms++;
5755 break;
5756 case NIL_EXT:
5757 break;
5758 case LIST_EXT:
5759 CHKSIZE(4);
5760 n = get_uint32(ep);
5761 ep += 4;
5762 ADDTERMS(n);
5763 terms++;
5764 heap_size += 2 * n;
5765 break;
5766 case SMALL_TUPLE_EXT:
5767 CHKSIZE(1);
5768 n = *ep++;
5769 terms += n;
5770 heap_size += n + 1;
5771 break;
5772 case LARGE_TUPLE_EXT:
5773 CHKSIZE(4);
5774 n = get_uint32(ep);
5775 ep += 4;
5776 ADDTERMS(n);
5777 heap_size += n + 1;
5778 break;
5779 case MAP_EXT:
5780 CHKSIZE(4);
5781 n = get_uint32(ep);
5782 ep += 4;
5783 ADDTERMS(2*n);
5784 if (n <= MAP_SMALL_MAP_LIMIT) {
5785 heap_size += 3 + n + 1 + n;
5786 } else {
5787 #if !defined(ARCH_64)
5788 if ((n >> 30) != 0) {
5789 /* Can't possibly fit in memory. */
5790 goto error;
5791 }
5792 #endif
5793 CHKSIZE(2*n); /* Conservative size check */
5794 heap_size += HASHMAP_ESTIMATED_HEAP_SIZE(n);
5795 }
5796 break;
5797 case STRING_EXT:
5798 CHKSIZE(2);
5799 n = get_int16(ep);
5800 SKIP(n+2);
5801 heap_size += 2 * n;
5802 break;
5803 case FLOAT_EXT:
5804 SKIP(31);
5805 heap_size += FLOAT_SIZE_OBJECT;
5806 break;
5807 case NEW_FLOAT_EXT:
5808 SKIP(8);
5809 heap_size += FLOAT_SIZE_OBJECT;
5810 break;
5811 case BINARY_EXT:
5812 CHKSIZE(4);
5813 n = get_uint32(ep);
5814 SKIP2(n, 4);
5815 if (n <= ERL_ONHEAP_BIN_LIMIT) {
5816 heap_size += heap_bin_size(n);
5817 } else {
5818 heap_size += PROC_BIN_SIZE;
5819 }
5820 break;
5821 case BIT_BINARY_EXT:
5822 {
5823 CHKSIZE(5);
5824 n = get_uint32(ep);
5825 SKIP2(n, 5);
5826 if (n <= ERL_ONHEAP_BIN_LIMIT) {
5827 heap_size += heap_bin_size(n) + ERL_SUB_BIN_SIZE;
5828 } else {
5829 heap_size += PROC_BIN_SIZE + ERL_SUB_BIN_SIZE;
5830 }
5831 }
5832 break;
5833 case EXPORT_EXT:
5834 terms += 3;
5835 heap_size += 2;
5836 break;
5837 case NEW_FUN_EXT:
5838 {
5839 unsigned num_free;
5840 Uint total_size;
5841
5842 CHKSIZE(1+16+4+4);
5843 total_size = get_uint32(ep);
5844 CHKSIZE(total_size);
5845 ep += 1+16+4+4;
5846 CHKSIZE(4);
5847 num_free = get_uint32(ep);
5848 ep += 4;
5849 if (num_free > MAX_ARG) {
5850 goto error;
5851 }
5852 terms += 4 + num_free;
5853 heap_size += ERL_FUN_SIZE + num_free;
5854 break;
5855 }
5856 case FUN_EXT:
5857 /*
5858 * OTP 23: No longer support decoding the old fun
5859 * representation.
5860 */
5861 goto error;
5862 case ATOM_INTERNAL_REF2:
5863 SKIP(2+atom_extra_skip);
5864 atom_extra_skip = 0;
5865 break;
5866 case ATOM_INTERNAL_REF3:
5867 SKIP(3+atom_extra_skip);
5868 atom_extra_skip = 0;
5869 break;
5870
5871 case BINARY_INTERNAL_REF:
5872 if (!internal_tags) {
5873 goto error;
5874 }
5875 SKIP(sizeof(ProcBin));
5876 heap_size += PROC_BIN_SIZE;
5877 break;
5878 case BIT_BINARY_INTERNAL_REF:
5879 if (!internal_tags) {
5880 goto error;
5881 }
5882 SKIP(2+sizeof(ProcBin));
5883 heap_size += PROC_BIN_SIZE + ERL_SUB_BIN_SIZE;
5884 break;
5885 default:
5886 goto error;
5887 }
5888 terms--;
5889
5890 if (ctx && --reds <= 0 && terms > 0) {
5891 ctx->u.sc.heap_size = heap_size;
5892 ctx->u.sc.terms = terms;
5893 ctx->u.sc.ep = ep;
5894 ctx->u.sc.atom_extra_skip = atom_extra_skip;
5895 ctx->reds = 0;
5896 return 0;
5897 }
5898 }while (terms > 0);
5899
5900 /* 'terms' may be non-zero if it has wrapped around */
5901 if (terms == 0) {
5902 if (ctx) {
5903 ctx->state = B2TDecodeInit;
5904 ctx->reds = reds;
5905 }
5906 return heap_size;
5907 }
5908
5909 error:
5910 if (ctx) {
5911 ctx->state = B2TBadArg;
5912 }
5913 return -1;
5914 #undef SKIP
5915 #undef SKIP2
5916 #undef CHKSIZE
5917 }
5918
5919 #define ERTS_TRANSCODE_REDS_FACT 4
5920 typedef struct {
5921 ErtsHeapFactory factory;
5922 Eterm *hp;
5923 } ErtsTranscodeDecodeState;
5924
5925 static Eterm
transcode_decode_ctl_msg(ErtsTranscodeDecodeState * state,SysIOVec * iov,int end_ix)5926 transcode_decode_ctl_msg(ErtsTranscodeDecodeState *state,
5927 SysIOVec *iov,
5928 int end_ix)
5929 {
5930 Eterm ctl_msg, *hp;
5931 Uint buf_sz;
5932 byte *buf_start, *buf_end;
5933 const byte *ptr;
5934 Uint hsz;
5935
5936 if (end_ix == 3) {
5937 /* The whole control message is in iov[2].iov_base */
5938 buf_sz = (Uint) iov[2].iov_len;
5939 buf_start = (byte *) iov[2].iov_base;
5940 buf_end = buf_start + buf_sz;
5941 }
5942 else {
5943 /* Control message over multiple buffers... */
5944 int ix;
5945 buf_sz = 0;
5946 for (ix = 2; ix < end_ix; ix++)
5947 buf_sz += iov[ix].iov_len;
5948 ptr = buf_start = erts_alloc(ERTS_ALC_T_TMP, buf_sz);
5949 buf_end = buf_start + buf_sz;
5950 for (ix = 2; ix < end_ix; ix++) {
5951 sys_memcpy((void *) ptr,
5952 (void *) iov[ix].iov_base,
5953 iov[ix].iov_len);
5954 ptr += iov[ix].iov_len;
5955 }
5956 }
5957
5958 hsz = decoded_size(buf_start, buf_end, 0, NULL);
5959 state->hp = hp = erts_alloc(ERTS_ALC_T_TMP, hsz*sizeof(Eterm));
5960 erts_factory_tmp_init(&state->factory, hp, hsz, ERTS_ALC_T_TMP);
5961
5962 ptr = dec_term(NULL, &state->factory, buf_start, &ctl_msg, NULL, 0);
5963 ASSERT(ptr); (void)ptr;
5964 ASSERT(is_tuple(ctl_msg));
5965
5966 if (buf_start != (byte *) iov[2].iov_base)
5967 erts_free(ERTS_ALC_T_TMP, buf_start);
5968
5969 return ctl_msg;
5970 }
5971
5972 static void
transcode_decode_state_destroy(ErtsTranscodeDecodeState * state)5973 transcode_decode_state_destroy(ErtsTranscodeDecodeState *state)
5974 {
5975 erts_factory_close(&state->factory);
5976 erts_free(ERTS_ALC_T_TMP, state->hp);
5977 }
5978
5979 static
transcode_dist_obuf(ErtsDistOutputBuf * ob,DistEntry * dep,Uint64 dflags,Sint reds)5980 Sint transcode_dist_obuf(ErtsDistOutputBuf* ob,
5981 DistEntry* dep,
5982 Uint64 dflags,
5983 Sint reds)
5984 {
5985 ErlIOVec* eiov = ob->eiov;
5986 SysIOVec* iov = eiov->iov;
5987 byte *hdr;
5988 Uint64 hopefull_flags;
5989 Uint32 hopefull_ix, payload_ix;
5990 Sint start_r, r;
5991 Uint new_len;
5992 byte *ep;
5993
5994 if (reds < 0)
5995 return reds;
5996
5997 /*
5998 * HOPEFUL_DATA header always present in io vector
5999 * element 1:
6000 *
6001 * +---+--------------+-----------+----------+
6002 * |'H'|Hopefull Flags|Hopefull IX|Payload IX|
6003 * +---+--------------+-----------+----------+
6004 * 1 8 4 4
6005 *
6006 * Hopefull flags: Flags corresponding to actual
6007 * hopefull encodings in this
6008 * buffer.
6009 * Hopefull IX: Vector index of first hopefull
6010 * encoding. Each hopefull encoding
6011 * is preceeded by 4 bytes containing
6012 * next vector index of hopefull
6013 * encoding. ERTS_NO_HIX marks the
6014 * end.
6015 * Payload IX: Vector index of the beginning
6016 * of the payload if there is
6017 * one; otherwise, zero.
6018 */
6019 hdr = (byte *) iov[1].iov_base;
6020
6021 ASSERT(HOPEFUL_DATA == *((byte *)iov[1].iov_base));
6022 ASSERT(iov[1].iov_len == 1+8+4+4);
6023
6024 /* Control message always begin in vector element 2 */
6025 ep = iov[2].iov_base;
6026 ASSERT(ep[0] == SMALL_TUPLE_EXT || ep[0] == LARGE_TUPLE_EXT);
6027
6028 if (((~dflags & (DFLAG_DIST_MONITOR | DFLAG_DIST_MONITOR_NAME))
6029 && ep[0] == SMALL_TUPLE_EXT
6030 && ep[1] == 4
6031 && ep[2] == SMALL_INTEGER_EXT
6032 && (ep[3] == DOP_MONITOR_P ||
6033 ep[3] == DOP_MONITOR_P_EXIT ||
6034 ep[3] == DOP_DEMONITOR_P)
6035 /* The receiver does not support process monitoring.
6036 Suppress monitor control msg (see erts_dsig_send_monitor). */)
6037 || (!(dflags & DFLAG_ALIAS)
6038 && ep[0] == SMALL_TUPLE_EXT
6039 && (ep[1] == 3 || ep[1] == 4)
6040 && ep[2] == SMALL_INTEGER_EXT
6041 && ((ep[3] == DOP_ALIAS_SEND) || (ep[3] == DOP_ALIAS_SEND_TT))
6042 /* The receiver does not support alias, so the alias
6043 is obviously not present at the receiver. */)) {
6044 /*
6045 * Drop packet by converting it to an empty (tick) packet...
6046 */
6047 int i;
6048 for (i = 1; i < ob->eiov->vsize; i++) {
6049 if (ob->eiov->binv[i])
6050 driver_free_binary(ob->eiov->binv[i]);
6051 }
6052 ob->eiov->vsize = 1;
6053 ob->eiov->size = 0;
6054 return reds;
6055 }
6056
6057 hdr++;
6058 hopefull_flags = get_int64(hdr);
6059
6060 hdr += 8;
6061 hopefull_ix = get_int32(hdr);
6062
6063 if ((~dflags & DFLAG_SPAWN)
6064 && ep[0] == SMALL_TUPLE_EXT
6065 && ((ep[1] == 6
6066 && ep[2] == SMALL_INTEGER_EXT
6067 && ep[3] == DOP_SPAWN_REQUEST)
6068 || (ep[1] == 8
6069 && ep[2] == SMALL_INTEGER_EXT
6070 && ep[3] == DOP_SPAWN_REQUEST_TT))) {
6071 /*
6072 * Receiver does not support distributed spawn. Convert
6073 * this packet to an empty (tick) packet, and inform
6074 * spawning process that this is not supported...
6075 */
6076 ErtsTranscodeDecodeState tds;
6077 Eterm ctl_msg, ref, pid, token, *tp;
6078 int i;
6079
6080 hdr += 4;
6081 payload_ix = get_int32(hdr);
6082 ASSERT(payload_ix >= 3);
6083
6084 ctl_msg = transcode_decode_ctl_msg(&tds, iov, payload_ix);
6085
6086 ASSERT(is_tuple_arity(ctl_msg, 6)
6087 || is_tuple_arity(ctl_msg, 8));
6088 tp = tuple_val(ctl_msg);
6089 ASSERT(tp[1] == make_small(DOP_SPAWN_REQUEST)
6090 || tp[1] == make_small(DOP_SPAWN_REQUEST_TT));
6091
6092 ref = tp[2];
6093 pid = tp[3];
6094 if (tp[1] == make_small(DOP_SPAWN_REQUEST))
6095 token = NIL;
6096 else {
6097 token = tp[8];
6098 erts_seq_trace_update_node_token(token);
6099 }
6100 ASSERT(is_internal_ordinary_ref(tp[2]));
6101 ASSERT(is_internal_pid(tp[3]));
6102
6103 (void) erts_proc_sig_send_dist_spawn_reply(dep->sysname,
6104 ref, pid,
6105 NULL, am_notsup,
6106 token);
6107
6108 transcode_decode_state_destroy(&tds);
6109
6110 for (i = 1; i < ob->eiov->vsize; i++) {
6111 if (ob->eiov->binv[i])
6112 driver_free_binary(ob->eiov->binv[i]);
6113 }
6114 ob->eiov->vsize = 1;
6115 ob->eiov->size = 0;
6116
6117 reds -= 4;
6118
6119 if (reds < 0)
6120 return 0;
6121 return reds;
6122 }
6123
6124 if ((~dflags & DFLAG_UNLINK_ID)
6125 && ep[0] == SMALL_TUPLE_EXT
6126 && ep[1] == 4
6127 && ep[2] == SMALL_INTEGER_EXT
6128 && (ep[3] == DOP_UNLINK_ID_ACK || ep[3] == DOP_UNLINK_ID)) {
6129
6130 if (ep[3] == DOP_UNLINK_ID_ACK) {
6131 /* Drop DOP_UNLINK_ID_ACK signal... */
6132 int i;
6133 for (i = 1; i < ob->eiov->vsize; i++) {
6134 if (ob->eiov->binv[i])
6135 driver_free_binary(ob->eiov->binv[i]);
6136 }
6137 ob->eiov->vsize = 1;
6138 ob->eiov->size = 0;
6139 }
6140 else {
6141 Eterm ctl_msg, remote, local, *tp;
6142 ErtsTranscodeDecodeState tds;
6143 Uint64 id;
6144 byte *ptr;
6145 ASSERT(ep[3] == DOP_UNLINK_ID);
6146 /*
6147 * Rewrite the DOP_UNLINK_ID signal into a
6148 * DOP_UNLINK signal and send an unlink ack
6149 * to the local sender.
6150 */
6151
6152 /*
6153 * decode control message so we get info
6154 * needed for unlink ack signal to send...
6155 */
6156 ASSERT(get_int32(hdr + 4) == 0); /* No payload */
6157 ctl_msg = transcode_decode_ctl_msg(&tds, iov, eiov->vsize);
6158
6159 ASSERT(is_tuple_arity(ctl_msg, 4));
6160
6161 tp = tuple_val(ctl_msg);
6162 ASSERT(tp[1] == make_small(DOP_UNLINK_ID));
6163
6164 if (!term_to_Uint64(tp[2], &id))
6165 ERTS_INTERNAL_ERROR("Invalid encoding of DOP_UNLINK_ID signal");
6166
6167 local = tp[3];
6168 remote = tp[4];
6169
6170 ASSERT(is_internal_pid(local));
6171 ASSERT(is_external_pid(remote));
6172
6173 /*
6174 * Rewrite buffer to an unlink signal by removing
6175 * second element and change first element to
6176 * DOP_UNLINK. That is, to: {DOP_UNLINK, local, remote}
6177 */
6178
6179 ptr = &ep[4];
6180 switch (*ptr) {
6181 case SMALL_INTEGER_EXT:
6182 ptr += 1;
6183 break;
6184 case INTEGER_EXT:
6185 ptr += 4;
6186 break;
6187 case SMALL_BIG_EXT:
6188 ptr += 1;
6189 ASSERT(*ptr <= 8);
6190 ptr += *ptr + 1;
6191 break;
6192 default:
6193 ERTS_INTERNAL_ERROR("Invalid encoding of DOP_UNLINK_ID signal");
6194 break;
6195 }
6196
6197 ASSERT((ptr - ep) <= 16);
6198 ASSERT((ptr - ep) <= iov[2].iov_len);
6199
6200 *(ptr--) = DOP_UNLINK;
6201 *(ptr--) = SMALL_INTEGER_EXT;
6202 *(ptr--) = 3;
6203 *ptr = SMALL_TUPLE_EXT;
6204
6205 iov[2].iov_base = ptr;
6206 iov[2].iov_len -= (ptr - ep);
6207
6208 #ifdef DEBUG
6209 {
6210 ErtsTranscodeDecodeState dbg_tds;
6211 Eterm new_ctl_msg = transcode_decode_ctl_msg(&dbg_tds,
6212 iov,
6213 eiov->vsize);
6214 ASSERT(is_tuple_arity(new_ctl_msg, 3));
6215 tp = tuple_val(new_ctl_msg);
6216 ASSERT(tp[1] == make_small(DOP_UNLINK));
6217 ASSERT(tp[2] == local);
6218 ASSERT(eq(tp[3], remote));
6219 transcode_decode_state_destroy(&dbg_tds);
6220 }
6221 #endif
6222
6223 /* Send unlink ack to local sender... */
6224 erts_proc_sig_send_dist_unlink_ack(NULL, dep,
6225 dep->connection_id,
6226 remote, local, id);
6227
6228 transcode_decode_state_destroy(&tds);
6229
6230 reds -= 5;
6231 }
6232 if (reds < 0)
6233 return 0;
6234 return reds;
6235 }
6236
6237 start_r = r = reds*ERTS_TRANSCODE_REDS_FACT;
6238
6239 if (~dflags & hopefull_flags) {
6240
6241 while (hopefull_ix != ERTS_NO_HIX) {
6242 Uint32 new_hopefull_ix;
6243
6244 if (r <= 0) { /* yield... */
6245 /* save current hopefull_ix... */
6246 ep = (byte *) iov[1].iov_base;
6247 ep += 5;
6248 put_int32(hopefull_ix, ep);
6249 return -1;
6250 }
6251
6252 /* Read next hopefull index */
6253 ep = (byte *) iov[hopefull_ix].iov_base;
6254 ep -= 4;
6255 new_hopefull_ix = get_int32(ep);
6256 ASSERT(new_hopefull_ix == ERTS_NO_HIX
6257 || (hopefull_ix < new_hopefull_ix
6258 && new_hopefull_ix < eiov->vsize));
6259
6260 ep = (byte *) iov[hopefull_ix].iov_base;
6261 switch (*ep) {
6262
6263 case EXPORT_EXT: {
6264 byte *start_ep, *end_ep;
6265 Eterm module, function;
6266 if (!(hopefull_flags & DFLAG_EXPORT_PTR_TAG))
6267 break;
6268 /* Read original encoding... */
6269 ep++;
6270 start_ep = ep;
6271 ep = (byte*)dec_atom(NULL, ep, &module);
6272 ASSERT(ep && is_atom(module));
6273 ep = (byte*)dec_atom(NULL, ep, &function);
6274 ASSERT(ep && is_atom(function));
6275 end_ep = ep;
6276 ASSERT(*ep == SMALL_INTEGER_EXT
6277 || *ep == INTEGER_EXT
6278 || *ep == SMALL_BIG_EXT
6279 || *ep == LARGE_BIG_EXT);
6280
6281 /*
6282 * module and function atoms are encoded
6283 * between start_ep and end_ep. Prepend a
6284 * 2-tuple tag before the atoms and
6285 * remove arity at end.
6286 */
6287
6288 /* write fallback */
6289
6290 ep = start_ep;
6291 ep--;
6292 put_int8(2, ep);
6293 ep--;
6294 *ep = SMALL_TUPLE_EXT;
6295
6296 iov[hopefull_ix].iov_base = ep;
6297
6298 /* Update iov sizes... */
6299 new_len = end_ep - ep;
6300 eiov->size -= iov[hopefull_ix].iov_len;
6301 eiov->size += new_len;
6302 iov[hopefull_ix].iov_len = new_len;
6303 r--;
6304 break;
6305 }
6306
6307 case BIT_BINARY_EXT: {
6308 Uint bin_sz;
6309 byte bitsize, epilog_byte;
6310 ASSERT(hopefull_ix != ERTS_NO_HIX);
6311 if (!(hopefull_flags & DFLAG_BIT_BINARIES)) {
6312 /* skip to epilog... */
6313 hopefull_ix = new_hopefull_ix;
6314 ep = (byte *) iov[hopefull_ix].iov_base;
6315 ep -= 4;
6316 new_hopefull_ix = get_int32(ep);
6317 ASSERT(new_hopefull_ix == ERTS_NO_HIX
6318 || (hopefull_ix < new_hopefull_ix
6319 && new_hopefull_ix < eiov->vsize));
6320 break;
6321 }
6322
6323 /* read original encoded prolog... */
6324 ep++;
6325 bin_sz = get_uint32(ep);
6326 ep += 4;
6327 bitsize = *ep++;
6328
6329 /* write fallback prolog... */
6330 iov[hopefull_ix].iov_base = &((byte*)iov[hopefull_ix].iov_base)[-4];
6331 ep = (byte *) iov[hopefull_ix].iov_base;
6332
6333 *ep++ = SMALL_TUPLE_EXT;
6334 *ep++ = 2;
6335 *ep++ = BINARY_EXT;
6336 put_int32(bin_sz, ep);
6337 ep += 4;
6338
6339 /* Update iov sizes... */
6340 new_len = ep - (byte *) iov[hopefull_ix].iov_base;
6341 eiov->size -= iov[hopefull_ix].iov_len;
6342 eiov->size += new_len;
6343 iov[hopefull_ix].iov_len = new_len;
6344 r--;
6345 #ifdef DEBUG
6346 /*
6347 * The binary data between the prolog and the
6348 * epilog should be of size 'bin_sz - 1' and
6349 * exists in the iov elements between prolog
6350 * and epilog...
6351 */
6352 {
6353 Uint ix, debug_bin_sz = 0;
6354 for (ix = hopefull_ix+1; ix < new_hopefull_ix; ix++)
6355 debug_bin_sz += iov[ix].iov_len;
6356 ASSERT(debug_bin_sz == bin_sz - 1);
6357 }
6358 #endif
6359 /* jump to epilog... */
6360 hopefull_ix = new_hopefull_ix;
6361 ep = (byte *) iov[hopefull_ix].iov_base;
6362
6363 /* read original encoded epilog... */
6364 epilog_byte = *ep;
6365
6366 ASSERT(1 == iov[hopefull_ix].iov_len);
6367
6368 iov[hopefull_ix].iov_base = &((byte*)iov[hopefull_ix].iov_base)[-4];
6369 ep = (byte *) iov[hopefull_ix].iov_base;
6370 new_hopefull_ix = get_int32(ep);
6371 ASSERT(new_hopefull_ix == ERTS_NO_HIX
6372 || (hopefull_ix < new_hopefull_ix
6373 && new_hopefull_ix < eiov->vsize));
6374
6375 /* write fallback epilog... */
6376
6377 *ep++ = epilog_byte;
6378 *ep++ = SMALL_INTEGER_EXT;
6379 *ep++ = bitsize;
6380
6381 /* Update iov sizes... */
6382 new_len = ep - (byte *) iov[hopefull_ix].iov_base;
6383 eiov->size -= iov[hopefull_ix].iov_len;
6384 eiov->size += new_len;
6385 iov[hopefull_ix].iov_len = new_len;
6386 r--;
6387 break;
6388 }
6389
6390 default:
6391 ERTS_INTERNAL_ERROR("Unexpected external tag");
6392 break;
6393 }
6394
6395 hopefull_ix = new_hopefull_ix;
6396 r--;
6397 }
6398 }
6399
6400 /*
6401 * Replace hopefull data header with actual header...
6402 */
6403 ep = (byte *) iov[1].iov_base;
6404 eiov->size -= iov[1].iov_len;
6405
6406 if (dflags & (DFLAG_DIST_HDR_ATOM_CACHE|DFLAG_FRAGMENTS)) {
6407 /*
6408 * Encoding was done without atom caching but receiver expects
6409 * a dist header, so we prepend an empty one.
6410 */
6411 *ep++ = VERSION_MAGIC;
6412 *ep++ = DIST_HEADER;
6413 *ep++ = 0; /* NumberOfAtomCacheRefs */
6414 }
6415 else {
6416 hdr += 4;
6417 payload_ix = get_int32(hdr);
6418
6419 if (payload_ix) {
6420 ASSERT(0 < payload_ix && payload_ix < eiov->vsize);
6421 /* Prepend version magic on payload. */
6422 iov[payload_ix].iov_base = &((byte*)iov[payload_ix].iov_base)[-1];
6423 *((byte *) iov[payload_ix].iov_base) = VERSION_MAGIC;
6424 iov[payload_ix].iov_len++;
6425 eiov->size++;
6426 r--;
6427 }
6428
6429 *ep++ = PASS_THROUGH;
6430 *ep++ = VERSION_MAGIC;
6431 }
6432
6433 iov[1].iov_len = ep - (byte *) iov[1].iov_base;
6434 eiov->size += iov[1].iov_len;
6435
6436 r--;
6437
6438 /* done... */
6439
6440 reds -= (start_r - r)/ERTS_TRANSCODE_REDS_FACT + 1;
6441 if (reds < 0)
6442 return 0;
6443 return reds;
6444 }
6445