1 /*
2 * %CopyrightBegin%
3 *
4 * Copyright Ericsson AB 1996-2020. All Rights Reserved.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * %CopyrightEnd%
19 */
20
21 /* Implementation of the erlang external format
22 *
23 * And a nice cache mechanism which is used just to send a
24 * index indicating a specific atom to a remote node instead of the
25 * entire atom.
26 */
27
28 #ifdef HAVE_CONFIG_H
29 # include "config.h"
30 #endif
31
32 #define ERTS_WANT_EXTERNAL_TAGS
33
34 #include "sys.h"
35 #include "erl_vm.h"
36 #include "global.h"
37 #include "erl_process.h"
38 #include "error.h"
39 #include "external.h"
40 #define ERL_WANT_HIPE_BIF_WRAPPER__
41 #include "bif.h"
42 #undef ERL_WANT_HIPE_BIF_WRAPPER__
43 #include "big.h"
44 #include "dist.h"
45 #include "erl_binary.h"
46 #include "erl_bits.h"
47 #include "erl_zlib.h"
48 #include "erl_map.h"
49 #include "erl_proc_sig_queue.h"
50 #include "erl_trace.h"
51
52 #define PASS_THROUGH 'p'
53
54 #define in_area(ptr,start,nbytes) ((UWord)((char*)(ptr) - (char*)(start)) < (nbytes))
55
56 #define MAX_STRING_LEN 0xffff
57
58 /*
59 * MAX value for the creation field in pid, port and reference
60 * for the old PID_EXT, PORT_EXT, REFERENCE_EXT and NEW_REFERENCE_EXT.
61 * Older nodes (OTP 19-22) will send us these so we must be able to decode them.
62 *
63 * From OTP 23 DFLAG_BIG_CREATION is mandatory so this node will always
64 * encode with new big 32-bit creations using NEW_PID_EXT, NEW_PORT_EXT
65 * and NEWER_REFERENCE_EXT.
66 */
67 #define ERTS_MAX_TINY_CREATION (3)
68 #define is_tiny_creation(Cre) ((unsigned)(Cre) <= ERTS_MAX_TINY_CREATION)
69
70 /*
71 * When 0 is used as creation, the real creation
72 * is unknown. Creation 0 on data will be changed to current
73 * creation of the node which it belongs to when it enters
74 * that node.
75 * This typically happens when a remote pid is created with
76 * list_to_pid/1 and then sent to the remote node. This behavior
77 * has the undesirable effect that a pid can be passed between nodes,
78 * and as a result of that not being equal to itself (the pid that
79 * comes back isn't equal to the original pid).
80 *
81 */
82
83 #undef ERTS_DEBUG_USE_DIST_SEP
84 #ifdef DEBUG
85 # if 0
86 /*
87 * Enabling ERTS_DEBUG_USE_DIST_SEP can be useful when debugging, but the
88 * result refuses to talk to nodes without it!
89 */
90 # define ERTS_DEBUG_USE_DIST_SEP
91 # endif
92 # define IF_DEBUG(X) X
93 #else
94 # define IF_DEBUG(X)
95 #endif
96
97 /* Does Sint fit in Sint32?
98 */
99 #define IS_SSMALL32(x) (((Uint) (((x) >> (32-1)) + 1)) < 2)
100
101 static Export term_to_binary_trap_export;
102
103 static byte* enc_term(ErtsAtomCacheMap *, Eterm, byte*, Uint64, struct erl_off_heap_header** off_heap);
104 struct TTBEncodeContext_;
105 static int enc_term_int(struct TTBEncodeContext_*,ErtsAtomCacheMap *acmp, Eterm obj, byte* ep, Uint64 dflags,
106 struct erl_off_heap_header** off_heap, Sint *reds, byte **res);
107 static int is_external_string(Eterm obj, Uint* lenp);
108 static byte* enc_atom(ErtsAtomCacheMap *, Eterm, byte*, Uint64);
109 static byte* enc_pid(ErtsAtomCacheMap *, Eterm, byte*, Uint64);
110 struct B2TContext_t;
111 static byte* dec_term(ErtsDistExternal*, ErtsHeapFactory*, byte*, Eterm*, struct B2TContext_t*, int);
112 static byte* dec_atom(ErtsDistExternal *, byte*, Eterm*);
113 static byte* dec_pid(ErtsDistExternal *, ErtsHeapFactory*, byte*, Eterm*, byte tag);
114 static Sint decoded_size(byte *ep, byte* endp, int internal_tags, struct B2TContext_t*);
115 static BIF_RETTYPE term_to_binary_trap_1(BIF_ALIST_1);
116
117 static Eterm erts_term_to_binary_int(Process* p, Sint bif_ix, Eterm Term, Eterm opts, int level,
118 Uint64 dflags, Binary *context_b, int iovec,
119 Uint fragment_size);
120
121 static Uint encode_size_struct2(ErtsAtomCacheMap *, Eterm, Uint64);
122 static ErtsExtSzRes encode_size_struct_int(TTBSizeContext*, ErtsAtomCacheMap *acmp,
123 Eterm obj, Uint64 dflags, Sint *reds, Uint *res);
124
125 static Export binary_to_term_trap_export;
126 static BIF_RETTYPE binary_to_term_trap_1(BIF_ALIST_1);
127 static Sint transcode_dist_obuf(ErtsDistOutputBuf*, DistEntry*, Uint64 dflags, Sint reds);
128 static byte *hopefull_bit_binary(TTBEncodeContext* ctx, byte **epp, Binary *pb_val, Eterm pb_term,
129 byte *bytes, byte bitoffs, byte bitsize, Uint sz);
130 static void hopefull_export(TTBEncodeContext* ctx, byte **epp, Export* exp, Uint32 dflags,
131 struct erl_off_heap_header** off_heap);
132 static void store_in_vec(TTBEncodeContext *ctx, byte *ep, Binary *ohbin, Eterm ohpb,
133 byte *ohp, Uint ohsz);
134
erts_init_external(void)135 void erts_init_external(void) {
136 erts_init_trap_export(&term_to_binary_trap_export,
137 am_erts_internal, am_term_to_binary_trap, 1,
138 &term_to_binary_trap_1);
139
140 erts_init_trap_export(&binary_to_term_trap_export,
141 am_erts_internal, am_binary_to_term_trap, 1,
142 &binary_to_term_trap_1);
143 return;
144 }
145
146 #define ERTS_MAX_INTERNAL_ATOM_CACHE_ENTRIES 255
147
148 #define ERTS_DIST_HDR_ATOM_CACHE_FLAG_BYTE_IX(IIX) \
149 (((((Uint32) (IIX)) >> 1) & 0x7fffffff))
150 #define ERTS_DIST_HDR_ATOM_CACHE_FLAG_BIT_IX(IIX) \
151 (((IIX) << 2) & 7)
152 #define ERTS_DIST_HDR_ATOM_CACHE_FLAG_BYTES(NO_ATOMS) \
153 (((((Uint32) (NO_ATOMS)) >> 1) & 0x7fffffff)+1)
154
155 #define ERTS_DIST_HDR_LONG_ATOMS_FLG (1 << 0)
156
157 /* #define ERTS_ATOM_CACHE_HASH */
158 #define ERTS_USE_ATOM_CACHE_SIZE 2039
159 #if ERTS_ATOM_CACHE_SIZE < ERTS_USE_ATOM_CACHE_SIZE
160 #error "ERTS_USE_ATOM_CACHE_SIZE too large"
161 #endif
162
163 static ERTS_INLINE int
atom2cix(Eterm atom)164 atom2cix(Eterm atom)
165 {
166 Uint val;
167 ASSERT(is_atom(atom));
168 val = atom_val(atom);
169 #ifdef ERTS_ATOM_CACHE_HASH
170 val = atom_tab(val)->slot.bucket.hvalue;
171 #endif
172 #if ERTS_USE_ATOM_CACHE_SIZE == 256
173 return (int) (val & ((Uint) 0xff));
174 #else
175 return (int) (val % ERTS_USE_ATOM_CACHE_SIZE);
176 #endif
177 }
178
erts_debug_max_atom_out_cache_index(void)179 int erts_debug_max_atom_out_cache_index(void)
180 {
181 return ERTS_USE_ATOM_CACHE_SIZE-1;
182 }
183
184 int
erts_debug_atom_to_out_cache_index(Eterm atom)185 erts_debug_atom_to_out_cache_index(Eterm atom)
186 {
187 return atom2cix(atom);
188 }
189
190 void
erts_init_atom_cache_map(ErtsAtomCacheMap * acmp)191 erts_init_atom_cache_map(ErtsAtomCacheMap *acmp)
192 {
193 if (acmp) {
194 int ix;
195 acmp->long_atoms = 0;
196 for (ix = 0; ix < ERTS_ATOM_CACHE_SIZE; ix++)
197 acmp->cache[ix].iix = -1;
198 acmp->sz = 0;
199 acmp->hdr_sz = -1;
200 }
201 }
202
203 void
erts_reset_atom_cache_map(ErtsAtomCacheMap * acmp)204 erts_reset_atom_cache_map(ErtsAtomCacheMap *acmp)
205 {
206 if (acmp) {
207 int i;
208 acmp->long_atoms = 0;
209 for (i = 0; i < acmp->sz; i++) {
210 ASSERT(0 <= acmp->cix[i] && acmp->cix[i] < ERTS_ATOM_CACHE_SIZE);
211 acmp->cache[acmp->cix[i]].iix = -1;
212 }
213 acmp->sz = 0;
214 acmp->hdr_sz = -1;
215 #ifdef DEBUG
216 for (i = 0; i < ERTS_ATOM_CACHE_SIZE; i++) {
217 ASSERT(acmp->cache[i].iix < 0);
218 }
219 #endif
220 }
221 }
222
223 void
erts_destroy_atom_cache_map(ErtsAtomCacheMap * acmp)224 erts_destroy_atom_cache_map(ErtsAtomCacheMap *acmp)
225 {
226
227 }
228
229 static ERTS_INLINE void
insert_acache_map(ErtsAtomCacheMap * acmp,Eterm atom,Uint64 dflags)230 insert_acache_map(ErtsAtomCacheMap *acmp, Eterm atom, Uint64 dflags)
231 {
232 if (acmp && acmp->sz < ERTS_MAX_INTERNAL_ATOM_CACHE_ENTRIES) {
233 int ix;
234 ASSERT(acmp->hdr_sz < 0);
235 ASSERT(dflags & DFLAG_UTF8_ATOMS);
236 ix = atom2cix(atom);
237 if (acmp->cache[ix].iix < 0) {
238 acmp->cache[ix].iix = acmp->sz;
239 acmp->cix[acmp->sz++] = ix;
240 acmp->cache[ix].atom = atom;
241 }
242 }
243 }
244
245 static ERTS_INLINE int
get_iix_acache_map(ErtsAtomCacheMap * acmp,Eterm atom,Uint64 dflags)246 get_iix_acache_map(ErtsAtomCacheMap *acmp, Eterm atom, Uint64 dflags)
247 {
248 if (!acmp)
249 return -1;
250 else {
251 int ix;
252 ASSERT(is_atom(atom));
253 ix = atom2cix(atom);
254 if (acmp->cache[ix].iix < 0) {
255 ASSERT(acmp->sz == ERTS_MAX_INTERNAL_ATOM_CACHE_ENTRIES);
256 return -1;
257 }
258 else {
259 ASSERT(acmp->cache[ix].iix < ERTS_ATOM_CACHE_SIZE);
260 return acmp->cache[ix].atom == atom ? acmp->cache[ix].iix : -1;
261 }
262 }
263 }
264
265 void
erts_finalize_atom_cache_map(ErtsAtomCacheMap * acmp,Uint64 dflags)266 erts_finalize_atom_cache_map(ErtsAtomCacheMap *acmp, Uint64 dflags)
267 {
268 if (acmp) {
269 int long_atoms = 0; /* !0 if one or more atoms are longer than 255. */
270 int i;
271 int sz = 0;
272 int min_sz;
273 ASSERT(dflags & DFLAG_UTF8_ATOMS);
274 ASSERT(acmp->hdr_sz < 0);
275 /* Make sure cache update instructions fit */
276 min_sz = (2+4)*acmp->sz;
277 for (i = 0; i < acmp->sz; i++) {
278 Atom *a;
279 Eterm atom;
280 int len;
281 atom = acmp->cache[acmp->cix[i]].atom;
282 ASSERT(is_atom(atom));
283 a = atom_tab(atom_val(atom));
284 len = (int) a->len;
285 ASSERT(len >= 0);
286 if (!long_atoms && len > 255)
287 long_atoms = 1;
288 /* Enough for a new atom cache value */
289 sz += 1 /* cix */ + 1 /* length */ + len /* text */;
290 }
291 if (long_atoms) {
292 acmp->long_atoms = 1;
293 sz += acmp->sz; /* we need 2 bytes per atom for length */
294 }
295 /* Dynamically sized flag field */
296 sz += ERTS_DIST_HDR_ATOM_CACHE_FLAG_BYTES(acmp->sz);
297 if (sz < min_sz)
298 sz = min_sz;
299 acmp->hdr_sz = sz;
300 }
301 }
302
303 Uint
erts_encode_ext_dist_header_size(TTBEncodeContext * ctx,ErtsAtomCacheMap * acmp,Uint fragments)304 erts_encode_ext_dist_header_size(TTBEncodeContext *ctx,
305 ErtsAtomCacheMap *acmp,
306 Uint fragments)
307 {
308 if (ctx->dflags & DFLAG_PENDING_CONNECT) {
309 /* HOPEFUL_DATA + hopefull flags + hopefull ix + payload ix */
310 return 1 + 8 + 4 + 4;
311 }
312 else if (!acmp && !(ctx->dflags & DFLAG_FRAGMENTS))
313 return 1; /* pass through */
314 else {
315 int fix_sz
316 = 1 /* VERSION_MAGIC */
317 + 1 /* DIST_HEADER */
318 + 1 /* dist header flags */
319 + 1 /* number of internal cache entries */
320 ;
321
322 if (fragments > 1)
323 fix_sz += 8 /* sequence id */
324 + 8 /* number of fragments */
325 ;
326 if (acmp) {
327 ASSERT(acmp->hdr_sz >= 0);
328 fix_sz += acmp->hdr_sz;
329 } else {
330 ASSERT(ctx->dflags & DFLAG_FRAGMENTS);
331 }
332
333 return fix_sz;
334 }
335 }
336
erts_encode_ext_dist_header_setup(TTBEncodeContext * ctx,byte * ctl_ext,ErtsAtomCacheMap * acmp,Uint fragments,Eterm from)337 byte *erts_encode_ext_dist_header_setup(TTBEncodeContext *ctx,
338 byte *ctl_ext, ErtsAtomCacheMap *acmp,
339 Uint fragments, Eterm from)
340 {
341 /* Maximum number of atom must be less than the maximum of a 32 bits
342 unsigned integer. Check is done in erl_init.c, erl_start function. */
343 if (ctx->dflags & DFLAG_PENDING_CONNECT) {
344 byte *ep = ctl_ext;
345 ep -= 4;
346 ctx->payload_ixp = ep;
347 put_int32(0, ep);
348 ep -= 4;
349 ctx->hopefull_ixp = ep;
350 put_int32(ERTS_NO_HIX, ep);
351 ep -= 8;
352 ctx->hopefull_flagsp = ep;
353 put_int64(0, ep);
354 *--ep = HOPEFUL_DATA;
355 return ep;
356 }
357 else if (!acmp && !(ctx->dflags & DFLAG_FRAGMENTS)) {
358 byte *ep = ctl_ext;
359 *--ep = PASS_THROUGH;
360 return ep;
361 }
362 else {
363 int i;
364 byte *ep = ctl_ext;
365 byte dist_hdr_flags = acmp && acmp->long_atoms ? ERTS_DIST_HDR_LONG_ATOMS_FLG : 0;
366 ASSERT(!acmp || acmp->hdr_sz >= 0);
367
368 if (acmp) {
369 /*
370 * Write cache update instructions. Note that this is a purely
371 * internal format, never seen on the wire. This section is later
372 * rewritten by erts_encode_ext_dist_header_finalize() while updating
373 * the cache. We write the header backwards just before the
374 * actual term(s).
375 */
376 for (i = acmp->sz-1; i >= 0; i--) {
377 Uint32 aval;
378 ASSERT(0 <= acmp->cix[i] && acmp->cix[i] < ERTS_ATOM_CACHE_SIZE);
379 ASSERT(i == acmp->cache[acmp->cix[i]].iix);
380 ASSERT(is_atom(acmp->cache[acmp->cix[i]].atom));
381
382 aval = (Uint32) atom_val(acmp->cache[acmp->cix[i]].atom);
383 ep -= 4;
384 put_int32(aval, ep);
385 ep -= 2;
386 put_int16(acmp->cix[i], ep);
387 }
388 --ep;
389 put_int8(acmp->sz, ep);
390 } else {
391 ASSERT(ctx->dflags & DFLAG_FRAGMENTS);
392 /* If we don't have an atom cache but are using a dist header we just put 0
393 in the atom cache size slot */
394 --ep;
395 put_int8(0, ep);
396 }
397 --ep;
398 put_int8(dist_hdr_flags, ep);
399 if (fragments > 1) {
400 ASSERT(is_pid(from));
401 ep -= 8;
402 put_int64(fragments, ep);
403 ep -= 8;
404 put_int64(from, ep);
405 *--ep = DIST_FRAG_HEADER;
406 } else {
407 *--ep = DIST_HEADER;
408 }
409 *--ep = VERSION_MAGIC;
410 return ep;
411 }
412 }
413
erts_encode_ext_dist_header_fragment(byte ** hdrpp,Uint fragment,Eterm from)414 byte *erts_encode_ext_dist_header_fragment(byte **hdrpp,
415 Uint fragment,
416 Eterm from)
417 {
418 byte *ep = *hdrpp, *start = ep;
419 ASSERT(is_pid(from));
420 *ep++ = VERSION_MAGIC;
421 *ep++ = DIST_FRAG_CONT;
422 put_int64(from, ep);
423 ep += 8;
424 put_int64(fragment, ep);
425 ep += 8;
426 *hdrpp = ep;
427 return start;
428 }
429
430
erts_encode_ext_dist_header_finalize(ErtsDistOutputBuf * ob,DistEntry * dep,Uint64 dflags,Sint reds)431 Sint erts_encode_ext_dist_header_finalize(ErtsDistOutputBuf* ob,
432 DistEntry* dep,
433 Uint64 dflags,
434 Sint reds)
435 {
436 byte *ip;
437 byte instr_buf[(2+4)*ERTS_ATOM_CACHE_SIZE];
438 int ci, sz;
439 byte dist_hdr_flags;
440 int long_atoms;
441 Uint64 seq_id = 0, frag_id = 0;
442 register byte *ep = ob->eiov->iov[1].iov_base;
443 ASSERT(dflags & DFLAG_UTF8_ATOMS);
444
445 /*
446 * The buffer can have different layouts at this point depending on
447 * what was known when encoded:
448 *
449 * Pending connection: HOPEFUL_DATA, HFlgs, HIX, PIX, CtrlTerm [, MsgTerm]
450 * With atom cache : VERSION_MAGIC, DIST_HEADER, ..., CtrlTerm [, MsgTerm]
451 * No atom cache : VERSION_MAGIC, CtrlTerm [, VERSION_MAGIC, MsgTerm]
452 */
453
454 if (ep[0] == HOPEFUL_DATA)
455 return transcode_dist_obuf(ob, dep, dflags, reds);
456
457 if (ep[0] == PASS_THROUGH) {
458 ASSERT(!(dflags & (DFLAG_DIST_HDR_ATOM_CACHE|DFLAG_FRAGMENTS)));
459 ASSERT(ob->eiov->iov[1].iov_len == 1);
460 return reds;
461 }
462
463 if (ep[1] == DIST_FRAG_CONT) {
464 ASSERT(ep[0] == VERSION_MAGIC);
465 ASSERT(ob->eiov->iov[1].iov_len == 18);
466 return reds;
467 }
468
469 if (ep[1] == DIST_FRAG_HEADER) {
470 /* skip the seq id and frag id */
471 seq_id = get_int64(&ep[2]);
472 ep += 8;
473 frag_id = get_int64(&ep[2]);
474 ep += 8;
475 }
476
477 dist_hdr_flags = ep[2];
478 long_atoms = ERTS_DIST_HDR_LONG_ATOMS_FLG & ((int) dist_hdr_flags);
479
480 /*
481 * Update output atom cache and write the external version of
482 * the dist header. We write the header backwards just
483 * before the actual term(s).
484 */
485 ep += 3;
486 ci = (int) get_int8(ep);
487 ASSERT(0 <= ci && ci < ERTS_ATOM_CACHE_SIZE);
488 ep += 1;
489 sz = (2+4)*ci;
490 ip = &instr_buf[0];
491 sys_memcpy((void *) ip, (void *) ep, sz);
492 ep += sz;
493 ASSERT(ep == (byte *) (ob->eiov->iov[1].iov_base + ob->eiov->iov[1].iov_len));
494 if (ci > 0) {
495 Uint32 flgs_buf[((ERTS_DIST_HDR_ATOM_CACHE_FLAG_BYTES(
496 ERTS_MAX_INTERNAL_ATOM_CACHE_ENTRIES)-1)
497 / sizeof(Uint32))+1];
498 register Uint32 flgs;
499 int iix, flgs_bytes, flgs_buf_ix, used_half_bytes;
500 ErtsAtomCache* cache = dep->cache;
501 #ifdef DEBUG
502 int tot_used_half_bytes;
503 #endif
504
505 flgs_bytes = ERTS_DIST_HDR_ATOM_CACHE_FLAG_BYTES(ci);
506
507 ASSERT(flgs_bytes <= sizeof(flgs_buf));
508 flgs = (Uint32) dist_hdr_flags;
509 flgs_buf_ix = 0;
510 if ((ci & 1) == 0)
511 used_half_bytes = 2;
512 else
513 used_half_bytes = 1;
514 #ifdef DEBUG
515 tot_used_half_bytes = used_half_bytes;
516 #endif
517 iix = ci-1;
518 while (iix >= 0) {
519 int cix;
520 Eterm atom;
521
522 if (used_half_bytes != 8)
523 flgs <<= 4;
524 else {
525 flgs_buf[flgs_buf_ix++] = flgs;
526 flgs = 0;
527 used_half_bytes = 0;
528 }
529
530 ip = &instr_buf[0] + (2+4)*iix;
531 cix = (int) get_int16(&ip[0]);
532 ASSERT(0 <= cix && cix < ERTS_ATOM_CACHE_SIZE);
533 atom = make_atom((Uint) get_int32(&ip[2]));
534 if (cache->out_arr[cix] == atom) {
535 --ep;
536 put_int8(cix, ep);
537 flgs |= ((cix >> 8) & 7);
538 }
539 else {
540 Atom *a;
541 cache->out_arr[cix] = atom;
542 a = atom_tab(atom_val(atom));
543 sz = a->len;
544 ep -= sz;
545 sys_memcpy((void *) ep, (void *) a->name, sz);
546 if (long_atoms) {
547 ep -= 2;
548 put_int16(sz, ep);
549 }
550 else {
551 ASSERT(0 <= sz && sz <= 255);
552 --ep;
553 put_int8(sz, ep);
554 }
555 --ep;
556 put_int8(cix, ep);
557 flgs |= (8 | ((cix >> 8) & 7));
558 }
559 iix--;
560 used_half_bytes++;
561 #ifdef DEBUG
562 tot_used_half_bytes++;
563 #endif
564 }
565 ASSERT(tot_used_half_bytes == 2*flgs_bytes);
566 flgs_buf[flgs_buf_ix] = flgs;
567 flgs_buf_ix = 0;
568 while (1) {
569 flgs = flgs_buf[flgs_buf_ix];
570 if (flgs_bytes > 4) {
571 *--ep = (byte) ((flgs >> 24) & 0xff);
572 *--ep = (byte) ((flgs >> 16) & 0xff);
573 *--ep = (byte) ((flgs >> 8) & 0xff);
574 *--ep = (byte) (flgs & 0xff);
575 flgs_buf_ix++;
576 flgs_bytes -= 4;
577 }
578 else {
579 switch (flgs_bytes) {
580 case 4:
581 *--ep = (byte) ((flgs >> 24) & 0xff);
582 case 3:
583 *--ep = (byte) ((flgs >> 16) & 0xff);
584 case 2:
585 *--ep = (byte) ((flgs >> 8) & 0xff);
586 case 1:
587 *--ep = (byte) (flgs & 0xff);
588 }
589 break;
590 }
591 }
592 reds -= 3; /*was ERTS_PORT_REDS_DIST_CMD_FINALIZE*/
593 }
594 --ep;
595 put_int8(ci, ep);
596 if (seq_id) {
597 ep -= 8;
598 put_int64(frag_id, ep);
599 ep -= 8;
600 put_int64(seq_id, ep);
601 *--ep = DIST_FRAG_HEADER;
602 } else {
603 *--ep = DIST_HEADER;
604 }
605 *--ep = VERSION_MAGIC;
606
607 sz = ((byte *) ob->eiov->iov[1].iov_base) - ep;
608 ob->eiov->size += sz;
609 ob->eiov->iov[1].iov_len += sz;
610 ob->eiov->iov[1].iov_base = ep;
611
612 return reds < 0 ? 0 : reds;
613 }
614
615 ErtsExtSzRes
erts_encode_dist_ext_size(Eterm term,ErtsAtomCacheMap * acmp,TTBSizeContext * ctx,Uint * szp,Sint * redsp,Sint * vlenp,Uint * fragmentsp)616 erts_encode_dist_ext_size(Eterm term,
617 ErtsAtomCacheMap *acmp,
618 TTBSizeContext* ctx,
619 Uint* szp, Sint *redsp,
620 Sint *vlenp, Uint *fragmentsp)
621 {
622 Uint sz;
623 ErtsExtSzRes res;
624
625 ASSERT(ctx);
626 ASSERT(szp);
627 ASSERT(vlenp);
628 ASSERT(fragmentsp);
629
630 sz = *szp;
631
632 if (!ctx->wstack.wstart) {
633 /*
634 * First call for this 'term'. We might however encode
635 * multiple terms and this might not be the first term
636 * in the sequence. 'ctx' should contain valid info about
637 * about previous terms regarding fragments, and vlen.
638 * 'szp' should contain valid info about the total size
639 * of previous terms.
640 */
641 if (ctx->vlen < 0) {
642 /* First term as well */
643 ctx->vlen = 0;
644 if (ctx->dflags & DFLAG_FRAGMENTS)
645 ctx->fragment_size = ERTS_DIST_FRAGMENT_SIZE;
646 }
647
648 #ifndef ERTS_DEBUG_USE_DIST_SEP
649 if (!(ctx->dflags & (DFLAG_DIST_HDR_ATOM_CACHE|DFLAG_FRAGMENTS)))
650 #endif
651 sz++ /* VERSION_MAGIC */;
652
653 }
654
655 res = encode_size_struct_int(ctx, acmp, term, ctx->dflags, redsp, &sz);
656
657 if (res == ERTS_EXT_SZ_OK) {
658 Uint total_size, fragments;
659
660 /*
661 * Each fragment use
662 * - one element for driver header
663 * - one element for fragment header
664 * - and (at least) one for data
665 */
666 total_size = sz + ctx->extra_size;
667 fragments = (total_size - 1)/ctx->fragment_size + 1;
668
669 *szp = sz;
670 *fragmentsp = fragments;
671 *vlenp = ctx->vlen + 3*fragments;
672 }
673
674 return res;
675 }
676
erts_encode_ext_size_2(Eterm term,unsigned dflags,Uint * szp)677 ErtsExtSzRes erts_encode_ext_size_2(Eterm term, unsigned dflags, Uint *szp)
678 {
679 ErtsExtSzRes res;
680 *szp = 0;
681 res = encode_size_struct_int(NULL, NULL, term, dflags, NULL, szp);
682 (*szp)++ /* VERSION_MAGIC */;
683 return res;
684 }
685
erts_encode_ext_size(Eterm term,Uint * szp)686 ErtsExtSzRes erts_encode_ext_size(Eterm term, Uint *szp)
687 {
688 return erts_encode_ext_size_2(term, TERM_TO_BINARY_DFLAGS, szp);
689 }
690
erts_encode_ext_size_ets(Eterm term)691 Uint erts_encode_ext_size_ets(Eterm term)
692 {
693 return encode_size_struct2(NULL, term,
694 TERM_TO_BINARY_DFLAGS|DFLAG_ETS_COMPRESSED);
695 }
696
697
erts_encode_dist_ext(Eterm term,byte ** ext,Uint64 flags,ErtsAtomCacheMap * acmp,TTBEncodeContext * ctx,Uint * fragmentsp,Sint * reds)698 int erts_encode_dist_ext(Eterm term, byte **ext, Uint64 flags, ErtsAtomCacheMap *acmp,
699 TTBEncodeContext* ctx, Uint *fragmentsp, Sint* reds)
700 {
701 int res;
702 ASSERT(ctx);
703
704 if (!ctx->wstack.wstart) {
705 ctx->cptr = *ext;
706 #ifndef ERTS_DEBUG_USE_DIST_SEP
707 if (!(flags & (DFLAG_DIST_HDR_ATOM_CACHE|DFLAG_PENDING_CONNECT|DFLAG_FRAGMENTS)))
708 #endif
709 *(*ext)++ = VERSION_MAGIC;
710 #ifndef ERTS_DEBUG_USE_DIST_SEP
711 if (flags & DFLAG_PENDING_CONNECT) {
712 Sint payload_ix = ctx->vlen;
713 ASSERT(ctx->payload_ixp);
714 if (payload_ix) {
715 /* we potentially need a version magic on the payload... */
716 (*ext)++;
717 ctx->cptr = *ext;
718 put_int32(payload_ix, ctx->payload_ixp);
719 }
720 }
721 #endif
722 }
723 res = enc_term_int(ctx, acmp, term, *ext, flags, NULL, reds, ext);
724 if (fragmentsp)
725 *fragmentsp = res == 0 ? ctx->frag_ix + 1 : ctx->frag_ix;
726 if (flags & DFLAG_PENDING_CONNECT) {
727 ASSERT(ctx->hopefull_flagsp);
728 put_int64(ctx->hopefull_flags, ctx->hopefull_flagsp);
729 }
730 return res;
731 }
732
erts_encode_ext(Eterm term,byte ** ext)733 void erts_encode_ext(Eterm term, byte **ext)
734 {
735 byte *ep = *ext;
736 *ep++ = VERSION_MAGIC;
737 ep = enc_term(NULL, term, ep, TERM_TO_BINARY_DFLAGS, NULL);
738 if (!ep)
739 erts_exit(ERTS_ABORT_EXIT,
740 "%s:%d:erts_encode_ext(): Internal data structure error\n",
741 __FILE__, __LINE__);
742 *ext = ep;
743 }
744
erts_encode_ext_ets(Eterm term,byte * ep,struct erl_off_heap_header ** off_heap)745 byte* erts_encode_ext_ets(Eterm term, byte *ep, struct erl_off_heap_header** off_heap)
746 {
747 return enc_term(NULL, term, ep, TERM_TO_BINARY_DFLAGS|DFLAG_ETS_COMPRESSED,
748 off_heap);
749 }
750
751
752 static Uint
dist_ext_size(ErtsDistExternal * edep)753 dist_ext_size(ErtsDistExternal *edep)
754 {
755 Uint sz = sizeof(ErtsDistExternal);
756
757 ASSERT(edep->data->ext_endp && edep->data->extp);
758 ASSERT(edep->data->ext_endp >= edep->data->extp);
759
760 if (edep->flags & ERTS_DIST_EXT_ATOM_TRANS_TAB) {
761 ASSERT(0 <= edep->attab.size \
762 && edep->attab.size <= ERTS_ATOM_CACHE_SIZE);
763 sz -= sizeof(Eterm)*(ERTS_ATOM_CACHE_SIZE - edep->attab.size);
764 } else {
765 sz -= sizeof(ErtsAtomTranslationTable);
766 }
767 ASSERT(sz % 4 == 0);
768 return sz;
769 }
770
771 Uint
erts_dist_ext_size(ErtsDistExternal * edep)772 erts_dist_ext_size(ErtsDistExternal *edep)
773 {
774 Uint sz = dist_ext_size(edep);
775 sz += 4; /* may need to pad to 8-byte-align ErtsDistExternalData */
776 sz += edep->data[0].frag_id * sizeof(ErtsDistExternalData);
777 return sz;
778 }
779
780 Uint
erts_dist_ext_data_size(ErtsDistExternal * edep)781 erts_dist_ext_data_size(ErtsDistExternal *edep)
782 {
783 Uint sz = 0, i;
784 for (i = 0; i < edep->data->frag_id; i++)
785 sz += edep->data[i].ext_endp - edep->data[i].extp;
786 return sz;
787 }
788
789 void
erts_dist_ext_frag(ErtsDistExternalData * ede_datap,ErtsDistExternal * edep)790 erts_dist_ext_frag(ErtsDistExternalData *ede_datap, ErtsDistExternal *edep)
791 {
792 ErtsDistExternalData *new_ede_datap = &edep->data[edep->data->frag_id - ede_datap->frag_id];
793 sys_memcpy(new_ede_datap, ede_datap, sizeof(ErtsDistExternalData));
794
795 /* If the data is not backed by a binary, we create one here to keep
796 things simple. Only custom distribution drivers should use lists. */
797 if (new_ede_datap->binp == NULL) {
798 size_t ext_sz = ede_datap->ext_endp - ede_datap->extp;
799 new_ede_datap->binp = erts_bin_nrml_alloc(ext_sz);
800 sys_memcpy(new_ede_datap->binp->orig_bytes, (void *) ede_datap->extp, ext_sz);
801 new_ede_datap->extp = (byte*)new_ede_datap->binp->orig_bytes;
802 new_ede_datap->ext_endp = (byte*)new_ede_datap->binp->orig_bytes + ext_sz;
803 } else {
804 erts_refc_inc(&new_ede_datap->binp->intern.refc, 2);
805 }
806 }
807
808 void
erts_make_dist_ext_copy(ErtsDistExternal * edep,ErtsDistExternal * new_edep)809 erts_make_dist_ext_copy(ErtsDistExternal *edep, ErtsDistExternal *new_edep)
810 {
811 size_t dist_ext_sz = dist_ext_size(edep);
812 byte *ep;
813
814 ep = (byte *) new_edep;
815 sys_memcpy((void *) ep, (void *) edep, dist_ext_sz);
816 erts_ref_dist_entry(new_edep->dep);
817
818 ep += dist_ext_sz;
819 ep += (UWord)ep & 4; /* 8-byte alignment for ErtsDistExternalData */
820 ASSERT((UWord)ep % 8 == 0);
821
822 new_edep->data = (ErtsDistExternalData*)ep;
823 sys_memzero(new_edep->data, sizeof(ErtsDistExternalData) * edep->data->frag_id);
824 new_edep->data->frag_id = edep->data->frag_id;
825 erts_dist_ext_frag(edep->data, new_edep);
826 }
827
828 void
erts_free_dist_ext_copy(ErtsDistExternal * edep)829 erts_free_dist_ext_copy(ErtsDistExternal *edep)
830 {
831 int i;
832 erts_deref_dist_entry(edep->dep);
833 for (i = 0; i < edep->data->frag_id; i++)
834 if (edep->data[i].binp)
835 erts_bin_release(edep->data[i].binp);
836 }
837
838 ErtsPrepDistExtRes
erts_prepare_dist_ext(ErtsDistExternal * edep,byte * ext,Uint size,Binary * binp,DistEntry * dep,Uint32 conn_id,ErtsAtomCache * cache)839 erts_prepare_dist_ext(ErtsDistExternal *edep,
840 byte *ext,
841 Uint size,
842 Binary *binp,
843 DistEntry *dep,
844 Uint32 conn_id,
845 ErtsAtomCache *cache)
846 {
847 register byte *ep;
848
849 ASSERT(dep);
850 erts_de_rlock(dep);
851
852 ASSERT(dep->dflags & DFLAG_UTF8_ATOMS);
853
854
855 if ((dep->state != ERTS_DE_STATE_CONNECTED &&
856 dep->state != ERTS_DE_STATE_PENDING)
857 || dep->connection_id != conn_id) {
858 erts_de_runlock(dep);
859 return ERTS_PREP_DIST_EXT_CLOSED;
860 }
861
862 if (!(dep->dflags & (DFLAG_DIST_HDR_ATOM_CACHE|DFLAG_FRAGMENTS))) {
863 /* Skip PASS_THROUGH */
864 ext++;
865 size--;
866 }
867
868 ep = ext;
869
870 if (size < 2)
871 goto fail;
872
873 if (ep[0] != VERSION_MAGIC) {
874 erts_dsprintf_buf_t *dsbufp = erts_create_logger_dsbuf();
875 erts_dsprintf(dsbufp,
876 "** Got message from incompatible erlang on "
877 "channel %d\n",
878 dist_entry_channel_no(dep));
879 erts_send_error_to_logger_nogl(dsbufp);
880 goto fail;
881 }
882
883 edep->heap_size = -1;
884 edep->flags = 0;
885 edep->dep = dep;
886 edep->mld = dep->mld;
887 edep->connection_id = conn_id;
888 edep->data->ext_endp = ext+size;
889 edep->data->binp = binp;
890 edep->data->seq_id = 0;
891 edep->data->frag_id = 1;
892
893 if (dep->dflags & (DFLAG_DIST_HDR_ATOM_CACHE|DFLAG_FRAGMENTS))
894 edep->flags |= ERTS_DIST_EXT_DFLAG_HDR;
895
896 if (ep[1] != DIST_HEADER && ep[1] != DIST_FRAG_HEADER && ep[1] != DIST_FRAG_CONT) {
897 if (edep->flags & ERTS_DIST_EXT_DFLAG_HDR)
898 goto bad_hdr;
899 edep->attab.size = 0;
900 edep->data->extp = ext;
901 }
902 else if (ep[1] == DIST_FRAG_CONT) {
903 if (!(dep->dflags & DFLAG_FRAGMENTS))
904 goto bad_hdr;
905 edep->attab.size = 0;
906 edep->data->extp = ext + 1 + 1 + 8 + 8;
907 edep->data->seq_id = get_int64(&ep[2]);
908 edep->data->frag_id = get_int64(&ep[2+8]);
909 erts_de_runlock(dep);
910 return ERTS_PREP_DIST_EXT_FRAG_CONT;
911 }
912 else {
913 int tix;
914 int no_atoms;
915
916 if (!(edep->flags & ERTS_DIST_EXT_DFLAG_HDR))
917 goto bad_hdr;
918
919 if (ep[1] == DIST_FRAG_HEADER) {
920 if (!(dep->dflags & DFLAG_FRAGMENTS))
921 goto bad_hdr;
922 edep->data->seq_id = get_int64(&ep[2]);
923 edep->data->frag_id = get_int64(&ep[2+8]);
924 ep += 16;
925 }
926
927 #undef CHKSIZE
928 #define CHKSIZE(SZ) \
929 do { if ((SZ) > edep->data->ext_endp - ep) goto bad_hdr; } while(0)
930
931 CHKSIZE(1+1+1);
932 ep += 2;
933 no_atoms = (int) get_int8(ep);
934 if (no_atoms < 0 || ERTS_ATOM_CACHE_SIZE < no_atoms)
935 goto bad_hdr;
936 ep++;
937 if (no_atoms) {
938 int long_atoms = 0;
939 #ifdef DEBUG
940 byte *flgs_buf = ep;
941 #endif
942 byte *flgsp = ep;
943 int flgs_size = ERTS_DIST_HDR_ATOM_CACHE_FLAG_BYTES(no_atoms);
944 int byte_ix;
945 int bit_ix;
946 int got_flgs;
947 register Uint32 flgs = 0;
948
949 CHKSIZE(flgs_size);
950 ep += flgs_size;
951
952 /*
953 * Check long atoms flag
954 */
955 byte_ix = ERTS_DIST_HDR_ATOM_CACHE_FLAG_BYTE_IX(no_atoms);
956 bit_ix = ERTS_DIST_HDR_ATOM_CACHE_FLAG_BIT_IX(no_atoms);
957 if (flgsp[byte_ix] & (((byte) ERTS_DIST_HDR_LONG_ATOMS_FLG) << bit_ix))
958 long_atoms = 1;
959
960 #ifdef DEBUG
961 byte_ix = 0;
962 bit_ix = 0;
963 #endif
964 got_flgs = 0;
965 /*
966 * Setup the atom translation table.
967 */
968 edep->flags |= ERTS_DIST_EXT_ATOM_TRANS_TAB;
969 edep->attab.size = no_atoms;
970 for (tix = 0; tix < no_atoms; tix++) {
971 Eterm atom;
972 int cix;
973 int len;
974
975 if (!got_flgs) {
976 int left = no_atoms - tix;
977 if (left > 6) {
978 flgs = ((((Uint32) flgsp[3]) << 24)
979 | (((Uint32) flgsp[2]) << 16)
980 | (((Uint32) flgsp[1]) << 8)
981 | ((Uint32) flgsp[0]));
982 flgsp += 4;
983 }
984 else {
985 flgs = 0;
986 switch (left) {
987 case 6:
988 case 5:
989 flgs |= (((Uint32) flgsp[2]) << 16);
990 case 4:
991 case 3:
992 flgs |= (((Uint32) flgsp[1]) << 8);
993 case 2:
994 case 1:
995 flgs |= ((Uint32) flgsp[0]);
996 }
997 }
998 got_flgs = 8;
999 }
1000
1001 ASSERT(byte_ix == ERTS_DIST_HDR_ATOM_CACHE_FLAG_BYTE_IX(tix));
1002 ASSERT(bit_ix == ERTS_DIST_HDR_ATOM_CACHE_FLAG_BIT_IX(tix));
1003 ASSERT((flgs & 3)
1004 == (((flgs_buf[byte_ix]
1005 & (((byte) 3) << bit_ix)) >> bit_ix) & 3));
1006
1007 CHKSIZE(1);
1008 cix = (int) ((flgs & 7) << 8);
1009 if ((flgs & 8) == 0) {
1010 /* atom already cached */
1011 cix += (int) get_int8(ep);
1012 if (cix >= ERTS_ATOM_CACHE_SIZE)
1013 goto bad_hdr;
1014 ep++;
1015 atom = cache->in_arr[cix];
1016 if (!is_atom(atom))
1017 goto bad_hdr;
1018 edep->attab.atom[tix] = atom;
1019 }
1020 else {
1021 /* new cached atom */
1022 cix += (int) get_int8(ep);
1023 if (cix >= ERTS_ATOM_CACHE_SIZE)
1024 goto bad_hdr;
1025 ep++;
1026 if (long_atoms) {
1027 CHKSIZE(2);
1028 len = get_int16(ep);
1029 ep += 2;
1030 }
1031 else {
1032 CHKSIZE(1);
1033 len = get_int8(ep);
1034 ep++;
1035 }
1036 CHKSIZE(len);
1037 atom = erts_atom_put((byte *) ep,
1038 len,
1039 ERTS_ATOM_ENC_UTF8,
1040 0);
1041 if (is_non_value(atom))
1042 goto bad_hdr;
1043 ep += len;
1044 cache->in_arr[cix] = atom;
1045 edep->attab.atom[tix] = atom;
1046 }
1047 flgs >>= 4;
1048 got_flgs--;
1049 #ifdef DEBUG
1050 bit_ix += 4;
1051 if (bit_ix >= 8) {
1052 bit_ix = 0;
1053 flgs = (int) flgs_buf[++byte_ix];
1054 ASSERT(byte_ix < flgs_size);
1055 }
1056 #endif
1057 }
1058 }
1059 edep->data->extp = ep;
1060 #ifdef ERTS_DEBUG_USE_DIST_SEP
1061 if (*ep != VERSION_MAGIC)
1062 goto bad_hdr;
1063 #endif
1064 }
1065 #ifdef ERTS_DEBUG_USE_DIST_SEP
1066 if (*ep != VERSION_MAGIC)
1067 goto fail;
1068 #endif
1069
1070 erts_de_runlock(dep);
1071
1072 return ERTS_PREP_DIST_EXT_SUCCESS;
1073
1074 #undef CHKSIZE
1075
1076 bad_hdr: {
1077 erts_dsprintf_buf_t *dsbufp = erts_create_logger_dsbuf();
1078 erts_dsprintf(dsbufp,
1079 "%T got a corrupted distribution header from %T "
1080 "on distribution channel %d\n",
1081 erts_this_node->sysname,
1082 edep->dep->sysname,
1083 dist_entry_channel_no(edep->dep));
1084 for (ep = ext; ep < edep->data->ext_endp; ep++)
1085 erts_dsprintf(dsbufp, ep != ext ? ",%b8u" : "<<%b8u", *ep);
1086 erts_dsprintf(dsbufp, ">>");
1087 erts_send_warning_to_logger_nogl(dsbufp);
1088 }
1089 fail: {
1090 erts_de_runlock(dep);
1091 erts_kill_dist_connection(dep, conn_id);
1092 }
1093 return ERTS_PREP_DIST_EXT_FAILED;
1094 }
1095
1096 static void
bad_dist_ext(ErtsDistExternal * edep)1097 bad_dist_ext(ErtsDistExternal *edep)
1098 {
1099 if (edep->dep) {
1100 DistEntry *dep = edep->dep;
1101 erts_dsprintf_buf_t *dsbufp = erts_create_logger_dsbuf();
1102 byte *ep;
1103 erts_dsprintf(dsbufp,
1104 "%T got a corrupted external term from %T "
1105 "on distribution channel %d\n",
1106 erts_this_node->sysname,
1107 dep->sysname,
1108 dist_entry_channel_no(dep));
1109 for (ep = edep->data->extp; ep < edep->data->ext_endp; ep++)
1110 erts_dsprintf(dsbufp,
1111 ep != edep->data->extp ? ",%b8u" : "<<...,%b8u",
1112 *ep);
1113 erts_dsprintf(dsbufp, ">>\n");
1114 erts_dsprintf(dsbufp, "ATOM_CACHE_REF translations: ");
1115 if (!(edep->flags & ERTS_DIST_EXT_ATOM_TRANS_TAB) || !edep->attab.size)
1116 erts_dsprintf(dsbufp, "none");
1117 else {
1118 int i;
1119 erts_dsprintf(dsbufp, "0=%T", edep->attab.atom[0]);
1120 for (i = 1; i < edep->attab.size; i++)
1121 erts_dsprintf(dsbufp, ", %d=%T", i, edep->attab.atom[i]);
1122 }
1123 erts_send_warning_to_logger_nogl(dsbufp);
1124 erts_kill_dist_connection(dep, edep->connection_id);
1125 }
1126 }
1127
1128 Sint
erts_decode_dist_ext_size(ErtsDistExternal * edep,int kill_connection,int payload)1129 erts_decode_dist_ext_size(ErtsDistExternal *edep, int kill_connection, int payload)
1130 {
1131 Sint res;
1132 byte *ep;
1133
1134 if (edep->data->frag_id > 1 && payload) {
1135 Uint sz = 0;
1136 Binary *bin;
1137 int i;
1138 byte *ep;
1139
1140 for (i = 0; i < edep->data->frag_id; i++)
1141 sz += edep->data[i].ext_endp - edep->data[i].extp;
1142
1143 bin = erts_bin_nrml_alloc(sz);
1144 ep = (byte*)bin->orig_bytes;
1145
1146 for (i = 0; i < edep->data->frag_id; i++) {
1147 sys_memcpy(ep, edep->data[i].extp, edep->data[i].ext_endp - edep->data[i].extp);
1148 ep += edep->data[i].ext_endp - edep->data[i].extp;
1149 erts_bin_release(edep->data[i].binp);
1150 edep->data[i].binp = NULL;
1151 edep->data[i].extp = NULL;
1152 edep->data[i].ext_endp = NULL;
1153 }
1154
1155 edep->data->frag_id = 1;
1156 edep->data->extp = (byte*)bin->orig_bytes;
1157 edep->data->ext_endp = ep;
1158 edep->data->binp = bin;
1159 }
1160
1161 if (edep->data->extp >= edep->data->ext_endp)
1162 goto fail;
1163 #ifndef ERTS_DEBUG_USE_DIST_SEP
1164 if (edep->flags & ERTS_DIST_EXT_DFLAG_HDR) {
1165 if (*edep->data->extp == VERSION_MAGIC)
1166 goto fail;
1167 ep = edep->data->extp;
1168 }
1169 else
1170 #endif
1171 {
1172 if (*edep->data->extp != VERSION_MAGIC)
1173 goto fail;
1174 ep = edep->data->extp+1;
1175 }
1176 res = decoded_size(ep, edep->data->ext_endp, 0, NULL);
1177 if (res >= 0)
1178 return res;
1179 fail:
1180 if (kill_connection)
1181 bad_dist_ext(edep);
1182 return -1;
1183 }
1184
erts_decode_ext_size(byte * ext,Uint size)1185 Sint erts_decode_ext_size(byte *ext, Uint size)
1186 {
1187 if (size == 0 || *ext != VERSION_MAGIC)
1188 return -1;
1189 return decoded_size(ext+1, ext+size, 0, NULL);
1190 }
1191
erts_decode_ext_size_ets(byte * ext,Uint size)1192 Sint erts_decode_ext_size_ets(byte *ext, Uint size)
1193 {
1194 Sint sz = decoded_size(ext, ext+size, 1, NULL);
1195 ASSERT(sz >= 0);
1196 return sz;
1197 }
1198
1199
1200 /*
1201 ** hpp is set to either a &p->htop or
1202 ** a pointer to a memory pointer (form message buffers)
1203 ** on return hpp is updated to point after allocated data
1204 */
1205 Eterm
erts_decode_dist_ext(ErtsHeapFactory * factory,ErtsDistExternal * edep,int kill_connection)1206 erts_decode_dist_ext(ErtsHeapFactory* factory,
1207 ErtsDistExternal *edep,
1208 int kill_connection)
1209 {
1210 Eterm obj;
1211 byte* ep;
1212
1213 ep = edep->data->extp;
1214
1215 if (ep >= edep->data->ext_endp)
1216 goto error;
1217 #ifndef ERTS_DEBUG_USE_DIST_SEP
1218 if (edep->flags & ERTS_DIST_EXT_DFLAG_HDR) {
1219 if (*ep == VERSION_MAGIC)
1220 goto error;
1221 }
1222 else
1223 #endif
1224 {
1225 if (*ep != VERSION_MAGIC)
1226 goto error;
1227 ep++;
1228 }
1229 ep = dec_term(edep, factory, ep, &obj, NULL, 0);
1230 if (!ep)
1231 goto error;
1232
1233 edep->data->extp = ep;
1234
1235 return obj;
1236
1237 error:
1238 erts_factory_undo(factory);
1239
1240 if (kill_connection)
1241 bad_dist_ext(edep);
1242
1243 return THE_NON_VALUE;
1244 }
1245
erts_decode_ext(ErtsHeapFactory * factory,byte ** ext,Uint32 flags)1246 Eterm erts_decode_ext(ErtsHeapFactory* factory, byte **ext, Uint32 flags)
1247 {
1248 ErtsDistExternal ede, *edep;
1249 Eterm obj;
1250 byte *ep = *ext;
1251 if (*ep++ != VERSION_MAGIC) {
1252 erts_factory_undo(factory);
1253 return THE_NON_VALUE;
1254 }
1255 if (flags) {
1256 ASSERT(flags == ERTS_DIST_EXT_BTT_SAFE);
1257 ede.flags = flags; /* a dummy struct just for the flags */
1258 ede.data = NULL;
1259 edep = &ede;
1260 } else {
1261 edep = NULL;
1262 }
1263 ep = dec_term(edep, factory, ep, &obj, NULL, 0);
1264 if (!ep) {
1265 return THE_NON_VALUE;
1266 }
1267 *ext = ep;
1268 return obj;
1269 }
1270
erts_decode_ext_ets(ErtsHeapFactory * factory,byte * ext)1271 Eterm erts_decode_ext_ets(ErtsHeapFactory* factory, byte *ext)
1272 {
1273 Eterm obj;
1274 ext = dec_term(NULL, factory, ext, &obj, NULL, 1);
1275 ASSERT(ext);
1276 return obj;
1277 }
1278
1279 /**********************************************************************/
1280
erts_debug_dist_ext_to_term_2(BIF_ALIST_2)1281 BIF_RETTYPE erts_debug_dist_ext_to_term_2(BIF_ALIST_2)
1282 {
1283 ErtsHeapFactory factory;
1284 Eterm res;
1285 Sint hsz;
1286 ErtsDistExternal ede;
1287 ErtsDistExternalData ede_data;
1288 Eterm *tp;
1289 Eterm real_bin;
1290 Uint offset;
1291 Uint size;
1292 Uint bitsize;
1293 Uint bitoffs;
1294 Uint arity;
1295 int i;
1296
1297 ede.flags = ERTS_DIST_EXT_ATOM_TRANS_TAB;
1298 ede.dep = NULL;
1299 ede.heap_size = -1;
1300 ede.data = &ede_data;
1301
1302 if (is_not_tuple(BIF_ARG_1))
1303 goto badarg;
1304 tp = tuple_val(BIF_ARG_1);
1305 arity = arityval(tp[0]);
1306 if (arity > ERTS_MAX_INTERNAL_ATOM_CACHE_ENTRIES)
1307 goto badarg;
1308
1309 ede.attab.size = arity;
1310 for (i = 1; i <= arity; i++) {
1311 if (is_not_atom(tp[i]))
1312 goto badarg;
1313 ede.attab.atom[i-1] = tp[i];
1314 }
1315
1316 if (is_not_binary(BIF_ARG_2))
1317 goto badarg;
1318
1319 size = binary_size(BIF_ARG_2);
1320 if (size == 0)
1321 goto badarg;
1322 ERTS_GET_REAL_BIN(BIF_ARG_2, real_bin, offset, bitoffs, bitsize);
1323 if (bitsize != 0)
1324 goto badarg;
1325
1326 ede.data->extp = binary_bytes(real_bin)+offset;
1327 ede.data->ext_endp = ede.data->extp + size;
1328 ede.data->frag_id = 1;
1329 ede.data->binp = NULL;
1330
1331 hsz = erts_decode_dist_ext_size(&ede, 1, 1);
1332 if (hsz < 0)
1333 goto badarg;
1334
1335 erts_factory_proc_prealloc_init(&factory, BIF_P, hsz);
1336 res = erts_decode_dist_ext(&factory, &ede, 1);
1337 erts_factory_close(&factory);
1338
1339 if (is_value(res))
1340 BIF_RET(res);
1341
1342 badarg:
1343
1344 BIF_ERROR(BIF_P, BADARG);
1345 }
1346
term_to_binary_trap_1(BIF_ALIST_1)1347 static BIF_RETTYPE term_to_binary_trap_1(BIF_ALIST_1)
1348 {
1349 Eterm *tp = tuple_val(BIF_ARG_1);
1350 Eterm Term = tp[1];
1351 Eterm Opts = tp[2];
1352 Eterm bt = tp[3];
1353 Eterm bix = tp[4];
1354 Sint bif_ix = signed_val(bix);
1355 Binary *bin = erts_magic_ref2bin(bt);
1356 Eterm res = erts_term_to_binary_int(BIF_P, bif_ix, Term, Opts,
1357 0, 0,bin, 0, ~((Uint) 0));
1358 if (is_non_value(res)) {
1359 if (erts_set_gc_state(BIF_P, 1)
1360 || MSO(BIF_P).overhead > BIN_VHEAP_SZ(BIF_P)) {
1361 ERTS_VBUMP_ALL_REDS(BIF_P);
1362 }
1363 if (Opts == am_undefined)
1364 ERTS_BIF_ERROR_TRAPPED1(BIF_P, SYSTEM_LIMIT,
1365 &bif_trap_export[bif_ix], Term);
1366 else
1367 ERTS_BIF_ERROR_TRAPPED2(BIF_P, SYSTEM_LIMIT,
1368 &bif_trap_export[bif_ix], Term, Opts);
1369 }
1370 if (is_tuple(res)) {
1371 ASSERT(BIF_P->flags & F_DISABLE_GC);
1372 BIF_TRAP1(&term_to_binary_trap_export,BIF_P,res);
1373 } else {
1374 if (erts_set_gc_state(BIF_P, 1)
1375 || MSO(BIF_P).overhead > BIN_VHEAP_SZ(BIF_P))
1376 ERTS_BIF_YIELD_RETURN(BIF_P, res);
1377 else
1378 BIF_RET(res);
1379 }
1380 }
1381
1382 HIPE_WRAPPER_BIF_DISABLE_GC(term_to_binary, 1)
1383
term_to_binary_1(BIF_ALIST_1)1384 BIF_RETTYPE term_to_binary_1(BIF_ALIST_1)
1385 {
1386 Eterm res = erts_term_to_binary_int(BIF_P, BIF_term_to_binary_1,
1387 BIF_ARG_1, am_undefined,
1388 0, TERM_TO_BINARY_DFLAGS, NULL, 0,
1389 ~((Uint) 0));
1390 if (is_non_value(res)) {
1391 ASSERT(!(BIF_P->flags & F_DISABLE_GC));
1392 BIF_ERROR(BIF_P, SYSTEM_LIMIT);
1393 }
1394 if (is_tuple(res)) {
1395 erts_set_gc_state(BIF_P, 0);
1396 BIF_TRAP1(&term_to_binary_trap_export,BIF_P,res);
1397 } else {
1398 ASSERT(!(BIF_P->flags & F_DISABLE_GC));
1399 BIF_RET(res);
1400 }
1401 }
1402
1403 HIPE_WRAPPER_BIF_DISABLE_GC(term_to_iovec, 1)
1404
term_to_iovec_1(BIF_ALIST_1)1405 BIF_RETTYPE term_to_iovec_1(BIF_ALIST_1)
1406 {
1407 Eterm res = erts_term_to_binary_int(BIF_P, BIF_term_to_iovec_1,
1408 BIF_ARG_1, am_undefined,
1409 0, TERM_TO_BINARY_DFLAGS, NULL, !0,
1410 ~((Uint) 0));
1411 if (is_non_value(res)) {
1412 ASSERT(!(BIF_P->flags & F_DISABLE_GC));
1413 BIF_ERROR(BIF_P, SYSTEM_LIMIT);
1414 }
1415 if (is_tuple(res)) {
1416 erts_set_gc_state(BIF_P, 0);
1417 BIF_TRAP1(&term_to_binary_trap_export,BIF_P,res);
1418 } else {
1419 ASSERT(!(BIF_P->flags & F_DISABLE_GC));
1420 BIF_RET(res);
1421 }
1422 }
1423
1424 static ERTS_INLINE int
parse_t2b_opts(Eterm opts,Uint * flagsp,int * levelp,int * iovecp,Uint * fsizep)1425 parse_t2b_opts(Eterm opts, Uint *flagsp, int *levelp, int *iovecp, Uint *fsizep)
1426 {
1427 int level = 0;
1428 int iovec = 0;
1429 Uint flags = TERM_TO_BINARY_DFLAGS;
1430 Uint fsize = ~((Uint) 0); /* one fragment */
1431
1432 while (is_list(opts)) {
1433 Eterm arg = CAR(list_val(opts));
1434 Eterm* tp;
1435 if (arg == am_compressed) {
1436 level = Z_DEFAULT_COMPRESSION;
1437 }
1438 else if (iovecp && arg == am_iovec) {
1439 iovec = !0;
1440 } else if (is_tuple(arg) && *(tp = tuple_val(arg)) == make_arityval(2)) {
1441 if (tp[1] == am_minor_version && is_small(tp[2])) {
1442 switch (signed_val(tp[2])) {
1443 case 0:
1444 flags = TERM_TO_BINARY_DFLAGS & ~DFLAG_NEW_FLOATS;
1445 break;
1446 case 1: /* Current default... */
1447 flags = TERM_TO_BINARY_DFLAGS;
1448 break;
1449 case 2:
1450 flags = TERM_TO_BINARY_DFLAGS | DFLAG_UTF8_ATOMS;
1451 break;
1452 default:
1453 return 0; /* badarg */
1454 }
1455 } else if (tp[1] == am_compressed && is_small(tp[2])) {
1456 level = signed_val(tp[2]);
1457 if (!(0 <= level && level < 10)) {
1458 return 0; /* badarg */
1459 }
1460 } else if (fsizep) {
1461 if (ERTS_IS_ATOM_STR("fragment", tp[1])) {
1462 if (!term_to_Uint(tp[2], &fsize))
1463 return 0; /* badarg */
1464 }
1465 else {
1466 return 0; /* badarg */
1467 }
1468 }
1469 else {
1470 return 0; /* badarg */
1471 }
1472 } else {
1473 return 0; /* badarg */
1474 }
1475 opts = CDR(list_val(opts));
1476 }
1477 if (is_not_nil(opts)) {
1478 return 0; /* badarg */
1479 }
1480
1481 *flagsp = flags;
1482 *levelp = level;
1483 if (iovecp)
1484 *iovecp = iovec;
1485 if (fsizep)
1486 *fsizep = fsize;
1487
1488 return !0; /* ok */
1489 }
1490
1491 HIPE_WRAPPER_BIF_DISABLE_GC(term_to_binary, 2)
1492
term_to_binary_2(BIF_ALIST_2)1493 BIF_RETTYPE term_to_binary_2(BIF_ALIST_2)
1494 {
1495 int level;
1496 Uint flags;
1497 Eterm res;
1498
1499 if (!parse_t2b_opts(BIF_ARG_2, &flags, &level, NULL, NULL)) {
1500 BIF_ERROR(BIF_P, BADARG);
1501 }
1502
1503 res = erts_term_to_binary_int(BIF_P, BIF_term_to_binary_2,
1504 BIF_ARG_1, BIF_ARG_2,
1505 level, flags, NULL, 0,
1506 ~((Uint) 0));
1507 if (is_non_value(res)) {
1508 ASSERT(!(BIF_P->flags & F_DISABLE_GC));
1509 BIF_ERROR(BIF_P, SYSTEM_LIMIT);
1510 }
1511 if (is_tuple(res)) {
1512 erts_set_gc_state(BIF_P, 0);
1513 BIF_TRAP1(&term_to_binary_trap_export,BIF_P,res);
1514 } else {
1515 ASSERT(!(BIF_P->flags & F_DISABLE_GC));
1516 BIF_RET(res);
1517 }
1518 }
1519
1520 HIPE_WRAPPER_BIF_DISABLE_GC(term_to_iovec, 2)
1521
term_to_iovec_2(BIF_ALIST_2)1522 BIF_RETTYPE term_to_iovec_2(BIF_ALIST_2)
1523 {
1524 int level;
1525 Uint flags;
1526 Eterm res;
1527
1528 if (!parse_t2b_opts(BIF_ARG_2, &flags, &level, NULL, NULL)) {
1529 BIF_ERROR(BIF_P, BADARG);
1530 }
1531
1532 res = erts_term_to_binary_int(BIF_P, BIF_term_to_iovec_2,
1533 BIF_ARG_1, BIF_ARG_2,
1534 level, flags, NULL, !0,
1535 ~((Uint) 0));
1536 if (is_non_value(res)) {
1537 ASSERT(!(BIF_P->flags & F_DISABLE_GC));
1538 BIF_ERROR(BIF_P, SYSTEM_LIMIT);
1539 }
1540 if (is_tuple(res)) {
1541 erts_set_gc_state(BIF_P, 0);
1542 BIF_TRAP1(&term_to_binary_trap_export,BIF_P,res);
1543 } else {
1544 ASSERT(!(BIF_P->flags & F_DISABLE_GC));
1545 BIF_RET(res);
1546 }
1547 }
1548
1549 Eterm
erts_debug_term_to_binary(Process * p,Eterm term,Eterm opts)1550 erts_debug_term_to_binary(Process *p, Eterm term, Eterm opts)
1551 {
1552 Eterm ret;
1553 int level, iovec;
1554 Uint flags;
1555 Uint fsize;
1556
1557 if (!parse_t2b_opts(opts, &flags, &level, &iovec, &fsize)) {
1558 ERTS_BIF_PREP_ERROR(ret, p, BADARG);
1559 }
1560 else {
1561 Eterm res = erts_term_to_binary_int(p, BIF_term_to_binary_2,
1562 term, opts, level, flags,
1563 NULL, iovec, fsize);
1564
1565 if (is_non_value(res)) {
1566 ASSERT(!(p->flags & F_DISABLE_GC));
1567 ERTS_BIF_PREP_ERROR(ret, p, SYSTEM_LIMIT);
1568 }
1569 else if (is_tuple(res)) {
1570 erts_set_gc_state(p, 0);
1571 ERTS_BIF_PREP_TRAP1(ret, &term_to_binary_trap_export,p,res);
1572 }
1573 else {
1574 ASSERT(!(p->flags & F_DISABLE_GC));
1575 ERTS_BIF_PREP_RET(ret, res);
1576 }
1577 }
1578 return ret;
1579 }
1580
1581
1582 enum B2TState { /* order is somewhat significant */
1583 B2TPrepare,
1584 B2TUncompressChunk,
1585 B2TSizeInit,
1586 B2TSize,
1587 B2TDecodeInit,
1588 B2TDecode,
1589 B2TDecodeList,
1590 B2TDecodeTuple,
1591 B2TDecodeString,
1592 B2TDecodeBinary,
1593
1594 B2TDone,
1595 B2TDecodeFail,
1596 B2TBadArg
1597 };
1598
1599 typedef struct {
1600 Sint heap_size;
1601 int terms;
1602 byte* ep;
1603 int atom_extra_skip;
1604 } B2TSizeContext;
1605
1606 typedef struct {
1607 byte* ep;
1608 Eterm res;
1609 Eterm* next;
1610 ErtsHeapFactory factory;
1611 int remaining_n;
1612 char* remaining_bytes;
1613 ErtsWStack flat_maps;
1614 ErtsPStack hamt_array;
1615 } B2TDecodeContext;
1616
1617 typedef struct {
1618 z_stream stream;
1619 byte* dbytes;
1620 Uint dleft;
1621 } B2TUncompressContext;
1622
1623 typedef struct B2TContext_t {
1624 Sint heap_size;
1625 byte* aligned_alloc;
1626 ErtsBinary2TermState b2ts;
1627 Uint32 flags;
1628 SWord reds;
1629 Uint used_bytes; /* In: boolean, Out: bytes */
1630 Eterm trap_bin; /* THE_NON_VALUE if not exported */
1631 Export *bif;
1632 Eterm arg[2];
1633 enum B2TState state;
1634 union {
1635 B2TSizeContext sc;
1636 B2TDecodeContext dc;
1637 B2TUncompressContext uc;
1638 } u;
1639 } B2TContext;
1640
1641 static B2TContext* b2t_export_context(Process*, B2TContext* src);
1642
binary2term_uncomp_size(byte * data,Sint size)1643 static uLongf binary2term_uncomp_size(byte* data, Sint size)
1644 {
1645 z_stream stream;
1646 int err;
1647 const uInt chunk_size = 64*1024; /* Ask tmp-alloc about a suitable size? */
1648 void* tmp_buf = erts_alloc(ERTS_ALC_T_TMP, chunk_size);
1649 uLongf uncomp_size = 0;
1650
1651 stream.next_in = (Bytef*)data;
1652 stream.avail_in = (uInt)size;
1653 stream.next_out = tmp_buf;
1654 stream.avail_out = (uInt)chunk_size;
1655
1656 erl_zlib_alloc_init(&stream);
1657
1658 err = inflateInit(&stream);
1659 if (err == Z_OK) {
1660 do {
1661 stream.next_out = tmp_buf;
1662 stream.avail_out = chunk_size;
1663 err = inflate(&stream, Z_NO_FLUSH);
1664 uncomp_size += chunk_size - stream.avail_out;
1665 }while (err == Z_OK);
1666 inflateEnd(&stream);
1667 }
1668 erts_free(ERTS_ALC_T_TMP, tmp_buf);
1669 return err == Z_STREAM_END ? uncomp_size : 0;
1670 }
1671
1672 static ERTS_INLINE int
binary2term_prepare(ErtsBinary2TermState * state,byte * data,Sint data_size,B2TContext ** ctxp,Process * p)1673 binary2term_prepare(ErtsBinary2TermState *state, byte *data, Sint data_size,
1674 B2TContext** ctxp, Process* p)
1675 {
1676 byte *bytes = data;
1677 Sint size = data_size;
1678
1679 state->exttmp = 0;
1680
1681 if (size < 1 || *bytes != VERSION_MAGIC) {
1682 return -1;
1683 }
1684 bytes++;
1685 size--;
1686 if (size < 5 || *bytes != COMPRESSED) {
1687 state->extp = bytes;
1688 if (ctxp)
1689 (*ctxp)->state = B2TSizeInit;
1690 }
1691 else {
1692 uLongf dest_len = (Uint32) get_int32(bytes+1);
1693 bytes += 5;
1694 size -= 5;
1695 if (dest_len > 32*1024*1024
1696 || (state->extp = erts_alloc_fnf(ERTS_ALC_T_EXT_TERM_DATA, dest_len)) == NULL) {
1697 /*
1698 * Try avoid out-of-memory crash due to corrupted 'dest_len'
1699 * by checking the actual length of the uncompressed data.
1700 * The only way to do that is to uncompress it. Sad but true.
1701 */
1702 if (dest_len != binary2term_uncomp_size(bytes, size)) {
1703 return -1;
1704 }
1705 state->extp = erts_alloc(ERTS_ALC_T_EXT_TERM_DATA, dest_len);
1706 if (ctxp)
1707 (*ctxp)->reds -= dest_len;
1708 }
1709 state->exttmp = 1;
1710 if (ctxp) {
1711 /*
1712 * Start decompression by exporting trap context
1713 * so we don't have to deal with deep-copying z_stream.
1714 */
1715 B2TContext* ctx = b2t_export_context(p, *ctxp);
1716 ASSERT(state = &(*ctxp)->b2ts);
1717 state = &ctx->b2ts;
1718
1719 if (erl_zlib_inflate_start(&ctx->u.uc.stream, bytes, size) != Z_OK)
1720 return -1;
1721
1722 ctx->u.uc.dbytes = state->extp;
1723 ctx->u.uc.dleft = dest_len;
1724 if (ctx->used_bytes) {
1725 ASSERT(ctx->used_bytes == 1);
1726 /* to be subtracted by stream.avail_in when done */
1727 ctx->used_bytes = data_size;
1728 }
1729 ctx->state = B2TUncompressChunk;
1730 *ctxp = ctx;
1731 }
1732 else {
1733 uLongf dlen = dest_len;
1734 if (erl_zlib_uncompress(state->extp, &dlen, bytes, size) != Z_OK
1735 || dlen != dest_len) {
1736 return -1;
1737 }
1738 }
1739 size = (Sint) dest_len;
1740 }
1741 state->extsize = size;
1742 return 0;
1743 }
1744
1745 static ERTS_INLINE void
binary2term_abort(ErtsBinary2TermState * state)1746 binary2term_abort(ErtsBinary2TermState *state)
1747 {
1748 if (state->exttmp) {
1749 state->exttmp = 0;
1750 erts_free(ERTS_ALC_T_EXT_TERM_DATA, state->extp);
1751 }
1752 }
1753
1754 static ERTS_INLINE Eterm
binary2term_create(ErtsDistExternal * edep,ErtsBinary2TermState * state,ErtsHeapFactory * factory)1755 binary2term_create(ErtsDistExternal *edep, ErtsBinary2TermState *state,
1756 ErtsHeapFactory* factory)
1757 {
1758 Eterm res;
1759
1760 if (!dec_term(edep, factory, state->extp, &res, NULL, 0))
1761 res = THE_NON_VALUE;
1762 if (state->exttmp) {
1763 state->exttmp = 0;
1764 erts_free(ERTS_ALC_T_EXT_TERM_DATA, state->extp);
1765 }
1766 return res;
1767 }
1768
1769 Sint
erts_binary2term_prepare(ErtsBinary2TermState * state,byte * data,Sint data_size)1770 erts_binary2term_prepare(ErtsBinary2TermState *state, byte *data, Sint data_size)
1771 {
1772 Sint res;
1773
1774 if (binary2term_prepare(state, data, data_size, NULL, NULL) < 0 ||
1775 (res=decoded_size(state->extp, state->extp + state->extsize, 0, NULL)) < 0) {
1776
1777 if (state->exttmp)
1778 erts_free(ERTS_ALC_T_EXT_TERM_DATA, state->extp);
1779 state->extp = NULL;
1780 state->exttmp = 0;
1781 return -1;
1782 }
1783 return res;
1784 }
1785
1786 void
erts_binary2term_abort(ErtsBinary2TermState * state)1787 erts_binary2term_abort(ErtsBinary2TermState *state)
1788 {
1789 binary2term_abort(state);
1790 }
1791
1792 Eterm
erts_binary2term_create(ErtsBinary2TermState * state,ErtsHeapFactory * factory)1793 erts_binary2term_create(ErtsBinary2TermState *state, ErtsHeapFactory* factory)
1794 {
1795 return binary2term_create(NULL,state, factory);
1796 }
1797
b2t_destroy_context(B2TContext * context)1798 static void b2t_destroy_context(B2TContext* context)
1799 {
1800 erts_free_aligned_binary_bytes_extra(context->aligned_alloc,
1801 ERTS_ALC_T_EXT_TERM_DATA);
1802 context->aligned_alloc = NULL;
1803 binary2term_abort(&context->b2ts);
1804 switch (context->state) {
1805 case B2TUncompressChunk:
1806 erl_zlib_inflate_finish(&context->u.uc.stream);
1807 break;
1808 case B2TDecode:
1809 case B2TDecodeList:
1810 case B2TDecodeTuple:
1811 case B2TDecodeString:
1812 case B2TDecodeBinary:
1813 if (context->u.dc.hamt_array.pstart) {
1814 erts_free(context->u.dc.hamt_array.alloc_type,
1815 context->u.dc.hamt_array.pstart);
1816 }
1817 break;
1818 default:;
1819 }
1820 }
1821
b2t_context_destructor(Binary * context_bin)1822 static int b2t_context_destructor(Binary *context_bin)
1823 {
1824 B2TContext* ctx = (B2TContext*) ERTS_MAGIC_BIN_DATA(context_bin);
1825 ASSERT(ERTS_MAGIC_BIN_DESTRUCTOR(context_bin) == b2t_context_destructor);
1826
1827 b2t_destroy_context(ctx);
1828 return 1;
1829 }
1830
1831 static BIF_RETTYPE binary_to_term_int(Process*, Eterm bin, B2TContext*);
1832
1833
binary_to_term_trap_1(BIF_ALIST_1)1834 static BIF_RETTYPE binary_to_term_trap_1(BIF_ALIST_1)
1835 {
1836 Binary *context_bin = erts_magic_ref2bin(BIF_ARG_1);
1837 ASSERT(ERTS_MAGIC_BIN_DESTRUCTOR(context_bin) == b2t_context_destructor);
1838
1839 return binary_to_term_int(BIF_P, THE_NON_VALUE, ERTS_MAGIC_BIN_DATA(context_bin));
1840 }
1841
1842
1843 #define B2T_BYTES_PER_REDUCTION 128
1844 #define B2T_MEMCPY_FACTOR 8
1845
1846 /* Define for testing */
1847 /*#define EXTREME_B2T_TRAPPING 1*/
1848
1849 #ifdef EXTREME_B2T_TRAPPING
b2t_rand(void)1850 static unsigned b2t_rand(void)
1851 {
1852 static unsigned prev = 17;
1853 prev = (prev * 214013 + 2531011);
1854 return prev;
1855 }
1856 #endif
1857
1858
b2t_export_context(Process * p,B2TContext * src)1859 static B2TContext* b2t_export_context(Process* p, B2TContext* src)
1860 {
1861 Binary* context_b = erts_create_magic_binary(sizeof(B2TContext),
1862 b2t_context_destructor);
1863 B2TContext* ctx = ERTS_MAGIC_BIN_DATA(context_b);
1864 Eterm* hp;
1865
1866 ASSERT(is_non_value(src->trap_bin));
1867 sys_memcpy(ctx, src, sizeof(B2TContext));
1868 if (ctx->state >= B2TDecode && ctx->u.dc.next == &src->u.dc.res) {
1869 ctx->u.dc.next = &ctx->u.dc.res;
1870 }
1871 hp = HAlloc(p, ERTS_MAGIC_REF_THING_SIZE);
1872 ctx->trap_bin = erts_mk_magic_ref(&hp, &MSO(p), context_b);
1873 return ctx;
1874 }
1875
binary_to_term_int(Process * p,Eterm bin,B2TContext * ctx)1876 static BIF_RETTYPE binary_to_term_int(Process* p, Eterm bin, B2TContext *ctx)
1877 {
1878 BIF_RETTYPE ret_val;
1879 #ifdef EXTREME_B2T_TRAPPING
1880 SWord initial_reds = 1 + b2t_rand() % 4;
1881 #else
1882 SWord initial_reds = (Uint)(ERTS_BIF_REDS_LEFT(p) * B2T_BYTES_PER_REDUCTION);
1883 #endif
1884 int is_first_call;
1885
1886 if (is_value(bin)) {
1887 /* Setup enough to get started */
1888 is_first_call = 1;
1889 ctx->state = B2TPrepare;
1890 ctx->aligned_alloc = NULL;
1891 } else {
1892 ASSERT(is_value(ctx->trap_bin));
1893 ASSERT(ctx->state != B2TPrepare);
1894 is_first_call = 0;
1895 }
1896 ctx->reds = initial_reds;
1897
1898 do {
1899 switch (ctx->state) {
1900 case B2TPrepare: {
1901 byte* bytes;
1902 Uint bin_size;
1903 bytes = erts_get_aligned_binary_bytes_extra(bin,
1904 &ctx->aligned_alloc,
1905 ERTS_ALC_T_EXT_TERM_DATA,
1906 0);
1907 if (bytes == NULL) {
1908 ctx->b2ts.exttmp = 0;
1909 ctx->state = B2TBadArg;
1910 break;
1911 }
1912 bin_size = binary_size(bin);
1913 if (ctx->aligned_alloc) {
1914 ctx->reds -= bin_size / 8;
1915 }
1916 if (binary2term_prepare(&ctx->b2ts, bytes, bin_size, &ctx, p) < 0) {
1917 ctx->state = B2TBadArg;
1918 }
1919 break;
1920 }
1921 case B2TUncompressChunk: {
1922 uLongf chunk = ctx->reds;
1923 int zret;
1924
1925 if (chunk > ctx->u.uc.dleft)
1926 chunk = ctx->u.uc.dleft;
1927 zret = erl_zlib_inflate_chunk(&ctx->u.uc.stream,
1928 ctx->u.uc.dbytes, &chunk);
1929 ctx->u.uc.dbytes += chunk;
1930 ctx->u.uc.dleft -= chunk;
1931 if (zret == Z_OK && ctx->u.uc.dleft > 0) {
1932 ctx->reds = 0;
1933 }
1934 else if (erl_zlib_inflate_finish(&ctx->u.uc.stream) == Z_OK
1935 && zret == Z_STREAM_END
1936 && ctx->u.uc.dleft == 0) {
1937 ctx->reds -= chunk;
1938 if (ctx->used_bytes) {
1939 ASSERT(ctx->used_bytes > 5 + ctx->u.uc.stream.avail_in);
1940 ctx->used_bytes -= ctx->u.uc.stream.avail_in;
1941 }
1942 ctx->state = B2TSizeInit;
1943 }
1944 else {
1945 ctx->state = B2TBadArg;
1946 }
1947 break;
1948 }
1949 case B2TSizeInit:
1950 ctx->u.sc.ep = NULL;
1951 ctx->state = B2TSize;
1952 /*fall through*/
1953 case B2TSize:
1954 ctx->heap_size = decoded_size(ctx->b2ts.extp,
1955 ctx->b2ts.extp + ctx->b2ts.extsize,
1956 0, ctx);
1957 break;
1958
1959 case B2TDecodeInit:
1960 if (is_non_value(ctx->trap_bin) && ctx->b2ts.extsize > ctx->reds) {
1961 /* dec_term will maybe trap, allocate space for magic bin
1962 before result term to make it easy to trim with HRelease.
1963 */
1964 ctx = b2t_export_context(p, ctx);
1965 }
1966 ctx->u.dc.ep = ctx->b2ts.extp;
1967 ctx->u.dc.res = (Eterm) (UWord) NULL;
1968 ctx->u.dc.next = &ctx->u.dc.res;
1969 erts_factory_proc_prealloc_init(&ctx->u.dc.factory, p, ctx->heap_size);
1970 ctx->u.dc.flat_maps.wstart = NULL;
1971 ctx->u.dc.hamt_array.pstart = NULL;
1972 ctx->state = B2TDecode;
1973 /*fall through*/
1974 case B2TDecode:
1975 case B2TDecodeList:
1976 case B2TDecodeTuple:
1977 case B2TDecodeString:
1978 case B2TDecodeBinary: {
1979 ErtsDistExternal fakedep;
1980 fakedep.flags = ctx->flags;
1981 fakedep.data = NULL;
1982 dec_term(&fakedep, NULL, NULL, NULL, ctx, 0);
1983 break;
1984 }
1985 case B2TDecodeFail:
1986 /*fall through*/
1987 case B2TBadArg:
1988 BUMP_REDS(p, (initial_reds - ctx->reds) / B2T_BYTES_PER_REDUCTION);
1989
1990 ASSERT(ctx->bif == &bif_trap_export[BIF_binary_to_term_1]
1991 || ctx->bif == &bif_trap_export[BIF_binary_to_term_2]);
1992
1993 if (is_first_call)
1994 ERTS_BIF_PREP_ERROR(ret_val, p, BADARG);
1995 else {
1996 erts_set_gc_state(p, 1);
1997 if (is_non_value(ctx->arg[1]))
1998 ERTS_BIF_PREP_ERROR_TRAPPED1(ret_val, p, BADARG, ctx->bif,
1999 ctx->arg[0]);
2000 else
2001 ERTS_BIF_PREP_ERROR_TRAPPED2(ret_val, p, BADARG, ctx->bif,
2002 ctx->arg[0], ctx->arg[1]);
2003 }
2004 b2t_destroy_context(ctx);
2005 return ret_val;
2006
2007 case B2TDone:
2008 if (ctx->used_bytes) {
2009 Eterm *hp;
2010 Eterm used;
2011 if (!ctx->b2ts.exttmp) {
2012 ASSERT(ctx->used_bytes == 1);
2013 ctx->used_bytes = (ctx->u.dc.ep - ctx->b2ts.extp
2014 +1); /* VERSION_MAGIC */
2015 }
2016 if (IS_USMALL(0, ctx->used_bytes)) {
2017 hp = erts_produce_heap(&ctx->u.dc.factory, 3, 0);
2018 used = make_small(ctx->used_bytes);
2019 }
2020 else {
2021 hp = erts_produce_heap(&ctx->u.dc.factory, 3+BIG_UINT_HEAP_SIZE, 0);
2022 used = uint_to_big(ctx->used_bytes, hp);
2023 hp += BIG_UINT_HEAP_SIZE;
2024 }
2025 ctx->u.dc.res = TUPLE2(hp, ctx->u.dc.res, used);
2026 }
2027 b2t_destroy_context(ctx);
2028
2029 if (ctx->u.dc.factory.hp > ctx->u.dc.factory.hp_end) {
2030 erts_exit(ERTS_ERROR_EXIT, ":%s, line %d: heap overrun by %d words(s)\n",
2031 __FILE__, __LINE__, ctx->u.dc.factory.hp - ctx->u.dc.factory.hp_end);
2032 }
2033 erts_factory_close(&ctx->u.dc.factory);
2034
2035 if (!is_first_call) {
2036 erts_set_gc_state(p, 1);
2037 }
2038 BUMP_REDS(p, (initial_reds - ctx->reds) / B2T_BYTES_PER_REDUCTION);
2039 ERTS_BIF_PREP_RET(ret_val, ctx->u.dc.res);
2040 return ret_val;
2041
2042 default:
2043 ASSERT(!"Unknown state in binary_to_term");
2044 }
2045 }while (ctx->reds > 0 || ctx->state >= B2TDone);
2046
2047 if (is_non_value(ctx->trap_bin)) {
2048 ctx = b2t_export_context(p, ctx);
2049 ASSERT(is_value(ctx->trap_bin));
2050 }
2051
2052 if (is_first_call) {
2053 erts_set_gc_state(p, 0);
2054 }
2055 BUMP_ALL_REDS(p);
2056
2057 ERTS_BIF_PREP_TRAP1(ret_val, &binary_to_term_trap_export,
2058 p, ctx->trap_bin);
2059
2060 return ret_val;
2061 }
2062
2063 HIPE_WRAPPER_BIF_DISABLE_GC(binary_to_term, 1)
2064
binary_to_term_1(BIF_ALIST_1)2065 BIF_RETTYPE binary_to_term_1(BIF_ALIST_1)
2066 {
2067 B2TContext ctx;
2068
2069 ctx.flags = 0;
2070 ctx.used_bytes = 0;
2071 ctx.trap_bin = THE_NON_VALUE;
2072 ctx.bif = &bif_trap_export[BIF_binary_to_term_1];
2073 ctx.arg[0] = BIF_ARG_1;
2074 ctx.arg[1] = THE_NON_VALUE;
2075 return binary_to_term_int(BIF_P, BIF_ARG_1, &ctx);
2076 }
2077
2078 HIPE_WRAPPER_BIF_DISABLE_GC(binary_to_term, 2)
2079
binary_to_term_2(BIF_ALIST_2)2080 BIF_RETTYPE binary_to_term_2(BIF_ALIST_2)
2081 {
2082 B2TContext ctx;
2083 Eterm opts;
2084 Eterm opt;
2085
2086 ctx.flags = 0;
2087 ctx.used_bytes = 0;
2088 opts = BIF_ARG_2;
2089 while (is_list(opts)) {
2090 opt = CAR(list_val(opts));
2091 if (opt == am_safe) {
2092 ctx.flags |= ERTS_DIST_EXT_BTT_SAFE;
2093 }
2094 else if (opt == am_used) {
2095 ctx.used_bytes = 1;
2096 }
2097 else {
2098 goto error;
2099 }
2100 opts = CDR(list_val(opts));
2101 }
2102
2103 if (is_not_nil(opts))
2104 goto error;
2105
2106 ctx.trap_bin = THE_NON_VALUE;
2107 ctx.bif = &bif_trap_export[BIF_binary_to_term_2];
2108 ctx.arg[0] = BIF_ARG_1;
2109 ctx.arg[1] = BIF_ARG_2;
2110 return binary_to_term_int(BIF_P, BIF_ARG_1, &ctx);
2111
2112 error:
2113 BIF_ERROR(BIF_P, BADARG);
2114 }
2115
2116 Eterm
external_size_1(BIF_ALIST_1)2117 external_size_1(BIF_ALIST_1)
2118 {
2119 Process* p = BIF_P;
2120 Eterm Term = BIF_ARG_1;
2121 Uint size = 0;
2122
2123 switch (erts_encode_ext_size(Term, &size)) {
2124 case ERTS_EXT_SZ_SYSTEM_LIMIT:
2125 BIF_ERROR(BIF_P, SYSTEM_LIMIT);
2126 case ERTS_EXT_SZ_YIELD:
2127 ERTS_INTERNAL_ERROR("Unexpected yield");
2128 case ERTS_EXT_SZ_OK:
2129 break;
2130 }
2131
2132 if (IS_USMALL(0, size)) {
2133 BIF_RET(make_small(size));
2134 } else {
2135 Eterm* hp = HAlloc(p, BIG_UINT_HEAP_SIZE);
2136 BIF_RET(uint_to_big(size, hp));
2137 }
2138 }
2139
2140 Eterm
external_size_2(BIF_ALIST_2)2141 external_size_2(BIF_ALIST_2)
2142 {
2143 Uint size = 0;
2144 Uint flags = TERM_TO_BINARY_DFLAGS;
2145
2146 while (is_list(BIF_ARG_2)) {
2147 Eterm arg = CAR(list_val(BIF_ARG_2));
2148 Eterm* tp;
2149
2150 if (is_tuple(arg) && *(tp = tuple_val(arg)) == make_arityval(2)) {
2151 if (tp[1] == am_minor_version && is_small(tp[2])) {
2152 switch (signed_val(tp[2])) {
2153 case 0:
2154 flags &= ~DFLAG_NEW_FLOATS;
2155 break;
2156 case 1:
2157 break;
2158 default:
2159 goto error;
2160 }
2161 } else {
2162 goto error;
2163 }
2164 } else {
2165 error:
2166 BIF_ERROR(BIF_P, BADARG);
2167 }
2168 BIF_ARG_2 = CDR(list_val(BIF_ARG_2));
2169 }
2170 if (is_not_nil(BIF_ARG_2)) {
2171 goto error;
2172 }
2173
2174 switch (erts_encode_ext_size_2(BIF_ARG_1, flags, &size)) {
2175 case ERTS_EXT_SZ_SYSTEM_LIMIT:
2176 BIF_ERROR(BIF_P, SYSTEM_LIMIT);
2177 case ERTS_EXT_SZ_YIELD:
2178 ERTS_INTERNAL_ERROR("Unexpected yield");
2179 case ERTS_EXT_SZ_OK:
2180 break;
2181 }
2182
2183 if (IS_USMALL(0, size)) {
2184 BIF_RET(make_small(size));
2185 } else {
2186 Eterm* hp = HAlloc(BIF_P, BIG_UINT_HEAP_SIZE);
2187 BIF_RET(uint_to_big(size, hp));
2188 }
2189 }
2190
2191 static Eterm
erts_term_to_binary_simple(Process * p,Eterm Term,Uint size,int level,Uint64 dflags)2192 erts_term_to_binary_simple(Process* p, Eterm Term, Uint size, int level, Uint64 dflags)
2193 {
2194 Eterm bin;
2195 size_t real_size;
2196 byte* endp;
2197
2198 if (level != 0) {
2199 byte buf[256];
2200 byte* bytes = buf;
2201 byte* out_bytes;
2202 uLongf dest_len;
2203
2204 if (sizeof(buf) < size) {
2205 bytes = erts_alloc(ERTS_ALC_T_TMP, size);
2206 }
2207
2208 if ((endp = enc_term(NULL, Term, bytes, dflags, NULL))
2209 == NULL) {
2210 erts_exit(ERTS_ERROR_EXIT, "%s, line %d: bad term: %x\n",
2211 __FILE__, __LINE__, Term);
2212 }
2213 real_size = endp - bytes;
2214 if (real_size > size) {
2215 erts_exit(ERTS_ERROR_EXIT, "%s, line %d: buffer overflow: %d word(s)\n",
2216 __FILE__, __LINE__, real_size - size);
2217 }
2218
2219 /*
2220 * We don't want to compress if compression actually increases the size.
2221 * Therefore, don't give zlib more out buffer than the size of the
2222 * uncompressed external format (minus the 5 bytes needed for the
2223 * COMPRESSED tag). If zlib returns any error, we'll revert to using
2224 * the original uncompressed external term format.
2225 */
2226
2227 if (real_size < 5) {
2228 dest_len = 0;
2229 } else {
2230 dest_len = real_size - 5;
2231 }
2232 bin = new_binary(p, NULL, real_size+1);
2233 out_bytes = binary_bytes(bin);
2234 out_bytes[0] = VERSION_MAGIC;
2235 if (erl_zlib_compress2(out_bytes+6, &dest_len, bytes, real_size, level) != Z_OK) {
2236 sys_memcpy(out_bytes+1, bytes, real_size);
2237 bin = erts_realloc_binary(bin, real_size+1);
2238 } else {
2239 out_bytes[1] = COMPRESSED;
2240 put_int32(real_size, out_bytes+2);
2241 bin = erts_realloc_binary(bin, dest_len+6);
2242 }
2243 if (bytes != buf) {
2244 erts_free(ERTS_ALC_T_TMP, bytes);
2245 }
2246 return bin;
2247 } else {
2248 byte* bytes;
2249
2250 bin = new_binary(p, (byte *)NULL, size);
2251 bytes = binary_bytes(bin);
2252 bytes[0] = VERSION_MAGIC;
2253 if ((endp = enc_term(NULL, Term, bytes+1, dflags, NULL))
2254 == NULL) {
2255 erts_exit(ERTS_ERROR_EXIT, "%s, line %d: bad term: %x\n",
2256 __FILE__, __LINE__, Term);
2257 }
2258 real_size = endp - bytes;
2259 if (real_size > size) {
2260 erts_exit(ERTS_ERROR_EXIT, "%s, line %d: buffer overflow: %d word(s)\n",
2261 __FILE__, __LINE__, endp - (bytes + size));
2262 }
2263 return erts_realloc_binary(bin, real_size);
2264 }
2265 }
2266
2267 Eterm
erts_term_to_binary(Process * p,Eterm Term,int level,Uint64 flags)2268 erts_term_to_binary(Process* p, Eterm Term, int level, Uint64 flags) {
2269 Uint size = 0;
2270 switch (encode_size_struct_int(NULL, NULL, Term, flags, NULL, &size)) {
2271 case ERTS_EXT_SZ_SYSTEM_LIMIT:
2272 return THE_NON_VALUE;
2273 case ERTS_EXT_SZ_YIELD:
2274 ERTS_INTERNAL_ERROR("Unexpected yield");
2275 case ERTS_EXT_SZ_OK:
2276 break;
2277 }
2278 size++; /* VERSION_MAGIC */;
2279 return erts_term_to_binary_simple(p, Term, size, level, flags);
2280 }
2281
2282 /* Define EXTREME_TTB_TRAPPING for testing in dist.h */
2283
2284 #ifndef EXTREME_TTB_TRAPPING
2285 #define TERM_TO_BINARY_COMPRESS_CHUNK (1 << 18)
2286 #else
2287 #define TERM_TO_BINARY_COMPRESS_CHUNK 10
2288 #endif
2289 #define TERM_TO_BINARY_MEMCPY_FACTOR 8
2290
ttb_context_destructor(Binary * context_bin)2291 static int ttb_context_destructor(Binary *context_bin)
2292 {
2293 TTBContext *context = ERTS_MAGIC_BIN_DATA(context_bin);
2294 if (context->alive) {
2295 context->alive = 0;
2296 switch (context->state) {
2297 case TTBSize:
2298 DESTROY_SAVED_WSTACK(&context->s.sc.wstack);
2299 break;
2300 case TTBEncode:
2301 DESTROY_SAVED_WSTACK(&context->s.ec.wstack);
2302 if (context->s.ec.result_bin != NULL) { /* Set to NULL if ever made alive! */
2303 ASSERT(erts_refc_read(&(context->s.ec.result_bin->intern.refc),1));
2304 erts_bin_free(context->s.ec.result_bin);
2305 context->s.ec.result_bin = NULL;
2306 }
2307 if (context->s.ec.iov)
2308 erts_free(ERTS_ALC_T_T2B_VEC, context->s.ec.iov);
2309 break;
2310 case TTBCompress:
2311 erl_zlib_deflate_finish(&(context->s.cc.stream));
2312
2313 if (context->s.cc.destination_bin != NULL) { /* Set to NULL if ever made alive! */
2314 ASSERT(erts_refc_read(&(context->s.cc.destination_bin->intern.refc),1));
2315 erts_bin_free(context->s.cc.destination_bin);
2316 context->s.cc.destination_bin = NULL;
2317 }
2318
2319 if (context->s.cc.result_bin != NULL) { /* Set to NULL if ever made alive! */
2320 ASSERT(erts_refc_read(&(context->s.cc.result_bin->intern.refc),1));
2321 erts_bin_free(context->s.cc.result_bin);
2322 context->s.cc.result_bin = NULL;
2323 }
2324 break;
2325 }
2326 }
2327 return 1;
2328 }
2329
2330 Uint
erts_ttb_iov_size(int use_termv,Sint vlen,Uint fragments)2331 erts_ttb_iov_size(int use_termv, Sint vlen, Uint fragments)
2332 {
2333 Uint sz;
2334 ASSERT(vlen > 0);
2335 ASSERT(fragments > 0);
2336 sz = sizeof(SysIOVec)*vlen;
2337 sz += sizeof(ErlDrvBinary *)*vlen;
2338 if (use_termv)
2339 sz += sizeof(Eterm)*vlen;
2340 sz += sizeof(ErlIOVec *)*fragments;
2341 sz += sizeof(ErlIOVec)*fragments;
2342 ASSERT(sz % sizeof(void*) == 0);
2343 return sz;
2344 }
2345
2346 void
erts_ttb_iov_init(TTBEncodeContext * ctx,int use_termv,char * ptr,Sint vlen,Uint fragments,Uint fragment_size)2347 erts_ttb_iov_init(TTBEncodeContext *ctx, int use_termv, char *ptr,
2348 Sint vlen, Uint fragments, Uint fragment_size)
2349 {
2350 ctx->vlen = 0;
2351 ctx->size = 0;
2352
2353 ctx->iov = (SysIOVec *) ptr;
2354 ptr += sizeof(SysIOVec)*vlen;
2355 ASSERT(((UWord) ptr) % sizeof(void *) == 0);
2356
2357 ctx->binv = (ErlDrvBinary **) ptr;
2358 ptr += sizeof(ErlDrvBinary *)*vlen;
2359
2360 if (!use_termv)
2361 ctx->termv = NULL;
2362 else {
2363 ctx->termv = (Eterm *) ptr;
2364 ptr += sizeof(Eterm)*vlen;
2365 }
2366
2367 ctx->fragment_eiovs = (ErlIOVec *) ptr;
2368 ptr += sizeof(ErlIOVec)*fragments;
2369 ASSERT(((UWord) ptr) % sizeof(void *) == 0);
2370
2371 ctx->frag_ix = -1;
2372 ctx->fragment_size = fragment_size;
2373
2374 #ifdef DEBUG
2375 ctx->cptr = NULL;
2376 ctx->debug_fragments = fragments;
2377 ctx->debug_vlen = vlen;
2378 #endif
2379 }
2380
erts_term_to_binary_int(Process * p,Sint bif_ix,Eterm Term,Eterm opts,int level,Uint64 dflags,Binary * context_b,int iovec,Uint fragment_size)2381 static Eterm erts_term_to_binary_int(Process* p, Sint bif_ix, Eterm Term, Eterm opts,
2382 int level, Uint64 dflags, Binary *context_b,
2383 int iovec, Uint fragment_size)
2384 {
2385 Eterm *hp;
2386 Eterm res;
2387 Eterm c_term;
2388 #ifndef EXTREME_TTB_TRAPPING
2389 Sint reds = (Sint) (ERTS_BIF_REDS_LEFT(p) * TERM_TO_BINARY_LOOP_FACTOR);
2390 #else
2391 Sint reds = 20; /* For testing */
2392 #endif
2393 Sint initial_reds = reds;
2394 TTBContext c_buff;
2395 TTBContext *context = &c_buff;
2396
2397 ASSERT(bif_ix > 0 && IS_USMALL(!0, bif_ix));
2398 ASSERT(bif_ix == BIF_term_to_binary_1 || bif_ix == BIF_term_to_binary_2
2399 || bif_ix == BIF_term_to_iovec_1 || bif_ix == BIF_term_to_iovec_2);
2400
2401 #define EXPORT_CONTEXT() \
2402 do { \
2403 if (context_b == NULL) { \
2404 context_b = erts_create_magic_binary(sizeof(TTBContext), \
2405 ttb_context_destructor);\
2406 context = ERTS_MAGIC_BIN_DATA(context_b); \
2407 sys_memcpy(context,&c_buff,sizeof(TTBContext)); \
2408 } \
2409 } while (0)
2410
2411 #define RETURN_STATE() \
2412 do { \
2413 hp = HAlloc(p, ERTS_MAGIC_REF_THING_SIZE + 1 + 4); \
2414 c_term = erts_mk_magic_ref(&hp, &MSO(p), context_b); \
2415 res = TUPLE4(hp, Term, opts, c_term, make_small(bif_ix)); \
2416 BUMP_ALL_REDS(p); \
2417 return res; \
2418 } while (0);
2419
2420 if (context_b == NULL) {
2421 /* Setup enough to get started */
2422 context->state = TTBSize;
2423 context->alive = 1;
2424 ERTS_INIT_TTBSizeContext(&context->s.sc, dflags);
2425 context->s.sc.level = level;
2426 context->s.sc.fragment_size = fragment_size;
2427 if (!level) {
2428 context->s.sc.vlen = iovec ? 0 : -1;
2429 context->s.sc.iovec = iovec;
2430 }
2431 else {
2432 context->s.sc.vlen = -1;
2433 context->s.sc.iovec = 0;
2434 }
2435 } else {
2436 context = ERTS_MAGIC_BIN_DATA(context_b);
2437 }
2438
2439 /* Initialization done, now we will go through the states */
2440 for (;;) {
2441 switch (context->state) {
2442 case TTBSize:
2443 {
2444 Uint size, fragments = 1;
2445 Binary *result_bin;
2446 int level = context->s.sc.level;
2447 Sint vlen;
2448 iovec = context->s.sc.iovec;
2449 fragment_size = context->s.sc.fragment_size;
2450 size = 1; /* VERSION_MAGIC */
2451 switch (encode_size_struct_int(&context->s.sc, NULL, Term,
2452 context->s.sc.dflags, &reds,
2453 &size)) {
2454 case ERTS_EXT_SZ_SYSTEM_LIMIT:
2455 BUMP_REDS(p, (initial_reds - reds) / TERM_TO_BINARY_LOOP_FACTOR);
2456 return THE_NON_VALUE;
2457 case ERTS_EXT_SZ_YIELD:
2458 EXPORT_CONTEXT();
2459 /* Same state */
2460 RETURN_STATE();
2461 case ERTS_EXT_SZ_OK:
2462 break;
2463 }
2464 /* Move these to next state */
2465 dflags = context->s.sc.dflags;
2466 vlen = context->s.sc.vlen;
2467 if (vlen >= 0) {
2468 Uint total_size = size + context->s.sc.extra_size;
2469 fragments = (total_size - 1)/fragment_size + 1;
2470 vlen += 3*fragments;
2471 ASSERT(vlen);
2472 }
2473 else if (size <= ERL_ONHEAP_BIN_LIMIT) {
2474 /* Finish in one go */
2475 res = erts_term_to_binary_simple(p, Term, size,
2476 level, dflags);
2477 if (iovec) {
2478 Eterm *hp = HAlloc(p, 2);
2479 res = CONS(hp, res, NIL);
2480 }
2481 BUMP_REDS(p, 1);
2482 return res;
2483 }
2484
2485 result_bin = erts_bin_nrml_alloc(size);
2486 result_bin->orig_bytes[0] = (byte)VERSION_MAGIC;
2487 /* Next state immediately, no need to export context */
2488 context->state = TTBEncode;
2489 ERTS_INIT_TTBEncodeContext(&context->s.ec, dflags);
2490 context->s.ec.level = level;
2491 context->s.ec.result_bin = result_bin;
2492 context->s.ec.iovec = iovec;
2493 if (vlen >= 0) {
2494 Uint sz = erts_ttb_iov_size(!0, vlen, fragments);
2495 char *ptr = (char *) erts_alloc(ERTS_ALC_T_T2B_VEC, sz);
2496 erts_ttb_iov_init(&context->s.ec, !0, ptr, vlen,
2497 fragments, fragment_size);
2498 context->s.ec.cptr = (byte *) &result_bin->orig_bytes[0];
2499 }
2500 break;
2501 }
2502 case TTBEncode:
2503 {
2504 byte *endp, *tmp;
2505 byte *bytes = (byte *) context->s.ec.result_bin->orig_bytes;
2506 size_t real_size;
2507 Binary *result_bin;
2508 Sint realloc_offset;
2509 Uint fragments;
2510
2511 dflags = context->s.ec.dflags;
2512 if (enc_term_int(&context->s.ec, NULL,Term, bytes+1, dflags,
2513 NULL, &reds, &endp) < 0) {
2514 EXPORT_CONTEXT();
2515 RETURN_STATE();
2516 }
2517 real_size = endp - bytes;
2518 tmp = (byte *) &context->s.ec.result_bin->orig_bytes[0];
2519 result_bin = erts_bin_realloc(context->s.ec.result_bin,real_size);
2520 realloc_offset = (byte *) &result_bin->orig_bytes[0] - tmp;
2521 level = context->s.ec.level;
2522 BUMP_REDS(p, (initial_reds - reds) / TERM_TO_BINARY_LOOP_FACTOR);
2523 if (level == 0 || real_size < 6) { /* We are done */
2524 Sint cbin_refc_diff;
2525 Eterm result, rb_term, *hp, *hp_end;
2526 Uint hsz;
2527 int ix;
2528 SysIOVec *iov;
2529 Eterm *termv;
2530 return_normal:
2531 fragments = context->s.ec.frag_ix + 1;
2532 context->s.ec.result_bin = NULL;
2533 context->alive = 0;
2534 if (context_b && erts_refc_read(&context_b->intern.refc,0) == 0) {
2535 erts_bin_free(context_b);
2536 }
2537 if (!context->s.ec.iov) {
2538 hsz = PROC_BIN_SIZE + (iovec ? 2 : 0);
2539 hp = HAlloc(p, hsz);
2540 result = erts_build_proc_bin(&MSO(p), hp, result_bin);
2541 if (iovec) {
2542 hp += PROC_BIN_SIZE;
2543 result = CONS(hp, result, NIL);
2544 }
2545 return result;
2546 }
2547 iovec = context->s.ec.iovec;
2548 ASSERT(iovec);
2549 iov = context->s.ec.iov;
2550 termv = context->s.ec.termv;
2551 ASSERT(context->s.ec.vlen <= context->s.ec.debug_vlen);
2552 ASSERT(fragments <= context->s.ec.debug_fragments);
2553 /* first two elements should be unused */
2554 ASSERT(context->s.ec.vlen >= 3*fragments);
2555 ASSERT(!iov[0].iov_base && !iov[0].iov_len);
2556 ASSERT(!iov[1].iov_base && !iov[1].iov_len);
2557
2558 hsz = (2 /* cons */
2559 + (PROC_BIN_SIZE > ERL_SUB_BIN_SIZE
2560 ? PROC_BIN_SIZE
2561 : ERL_SUB_BIN_SIZE)); /* max size per vec */
2562 hsz *= context->s.ec.vlen - 2*fragments; /* number of vecs */
2563 hp = HAlloc(p, hsz);
2564 hp_end = hp + hsz;
2565 rb_term = THE_NON_VALUE;
2566 result = NIL;
2567 ASSERT(erts_refc_read(&result_bin->intern.refc, 1) == 1);
2568 cbin_refc_diff = -1;
2569 for (ix = context->s.ec.vlen - 1; ix > 1; ix--) {
2570 Eterm bin_term, pb_term;
2571 Uint pb_size;
2572 ProcBin *pb;
2573 SysIOVec *iovp = &iov[ix];
2574 if (!iovp->iov_base)
2575 continue; /* empty slot for header */
2576 pb_term = termv[ix];
2577 if (is_value(pb_term)) {
2578 pb_size = binary_size(pb_term);
2579 pb = (ProcBin *) binary_val(pb_term);
2580 }
2581 else {
2582 iovp->iov_base = (void *) (((byte *) iovp->iov_base)
2583 + realloc_offset);
2584 pb_size = result_bin->orig_size;
2585 if (is_non_value(rb_term))
2586 pb = NULL;
2587 else {
2588 pb = (ProcBin *) binary_val(rb_term);
2589 pb_term = rb_term;
2590 }
2591 }
2592 /*
2593 * We intentionally avoid using sub binaries
2594 * since the GC might convert those to heap
2595 * binaries and by this ruin the nice preparation
2596 * for usage of this data as I/O vector in
2597 * nifs/drivers.
2598 */
2599 if (is_value(pb_term) && iovp->iov_len == pb_size)
2600 bin_term = pb_term;
2601 else {
2602 Binary *bin;
2603 if (is_value(pb_term)) {
2604 bin = ((ProcBin *) binary_val(pb_term))->val;
2605 erts_refc_inc(&bin->intern.refc, 2);
2606 }
2607 else {
2608 bin = result_bin;
2609 cbin_refc_diff++;
2610 }
2611 pb = (ProcBin *) (char *) hp;
2612 hp += PROC_BIN_SIZE;
2613 pb->thing_word = HEADER_PROC_BIN;
2614 pb->size = (Uint) iovp->iov_len;
2615 pb->next = MSO(p).first;
2616 MSO(p).first = (struct erl_off_heap_header*) pb;
2617 pb->val = bin;
2618 pb->bytes = (byte*) iovp->iov_base;
2619 pb->flags = 0;
2620 OH_OVERHEAD(&MSO(p), pb->size / sizeof(Eterm));
2621 bin_term = make_binary(pb);
2622 }
2623 result = CONS(hp, bin_term, result);
2624 hp += 2;
2625 }
2626 ASSERT(hp <= hp_end);
2627 HRelease(p, hp_end, hp);
2628 context->s.ec.iov = NULL;
2629 erts_free(ERTS_ALC_T_T2B_VEC, iov);
2630 if (cbin_refc_diff) {
2631 ASSERT(cbin_refc_diff >= -1);
2632 if (cbin_refc_diff > 0)
2633 erts_refc_add(&result_bin->intern.refc,
2634 cbin_refc_diff, 1);
2635 else
2636 erts_bin_free(result_bin);
2637 }
2638 return result;
2639 }
2640 /* Continue with compression... */
2641 /* To make absolutely sure that zlib does not barf on a reallocated context,
2642 we make sure it's "exported" before doing anything compession-like */
2643 EXPORT_CONTEXT();
2644 bytes = (byte *) result_bin->orig_bytes; /* result_bin is reallocated */
2645 if (erl_zlib_deflate_start(&(context->s.cc.stream),bytes+1,real_size-1,level)
2646 != Z_OK) {
2647 goto return_normal;
2648 }
2649 context->state = TTBCompress;
2650 context->s.cc.real_size = real_size;
2651 context->s.cc.result_bin = result_bin;
2652
2653 result_bin = erts_bin_nrml_alloc(real_size);
2654 result_bin->orig_bytes[0] = (byte) VERSION_MAGIC;
2655
2656 context->s.cc.destination_bin = result_bin;
2657 context->s.cc.dest_len = 0;
2658 context->s.cc.dbytes = (byte *) result_bin->orig_bytes+6;
2659 break;
2660 }
2661 case TTBCompress:
2662 {
2663 uLongf tot_dest_len = context->s.cc.real_size - 6;
2664 uLongf left = (tot_dest_len - context->s.cc.dest_len);
2665 uLongf this_time = (left > TERM_TO_BINARY_COMPRESS_CHUNK) ?
2666 TERM_TO_BINARY_COMPRESS_CHUNK :
2667 left;
2668 Binary *result_bin;
2669 ProcBin *pb;
2670 Uint max = (ERTS_BIF_REDS_LEFT(p) * TERM_TO_BINARY_COMPRESS_CHUNK) / CONTEXT_REDS;
2671
2672 if (max < this_time) {
2673 this_time = max + 1; /* do not set this_time to 0 */
2674 }
2675
2676 res = erl_zlib_deflate_chunk(&(context->s.cc.stream), context->s.cc.dbytes, &this_time);
2677 context->s.cc.dbytes += this_time;
2678 context->s.cc.dest_len += this_time;
2679 switch (res) {
2680 case Z_OK:
2681 if (context->s.cc.dest_len >= tot_dest_len) {
2682 goto no_use_compressing;
2683 }
2684 RETURN_STATE();
2685 case Z_STREAM_END:
2686 {
2687 byte *dbytes = (byte *) context->s.cc.destination_bin->orig_bytes + 1;
2688
2689 dbytes[0] = COMPRESSED;
2690 put_int32(context->s.cc.real_size-1,dbytes+1);
2691 erl_zlib_deflate_finish(&(context->s.cc.stream));
2692 result_bin = erts_bin_realloc(context->s.cc.destination_bin,
2693 context->s.cc.dest_len+6);
2694 context->s.cc.destination_bin = NULL;
2695 ASSERT(erts_refc_read(&result_bin->intern.refc, 1));
2696 erts_bin_free(context->s.cc.result_bin);
2697 context->s.cc.result_bin = NULL;
2698 context->alive = 0;
2699 BUMP_REDS(p, (this_time * CONTEXT_REDS) / TERM_TO_BINARY_COMPRESS_CHUNK);
2700 if (context_b && erts_refc_read(&context_b->intern.refc,0) == 0) {
2701 erts_bin_free(context_b);
2702 }
2703 return erts_build_proc_bin(&MSO(p),
2704 HAlloc(p, PROC_BIN_SIZE),
2705 result_bin);
2706 }
2707 default: /* Compression error, revert to uncompressed binary (still in
2708 context) */
2709 no_use_compressing:
2710 result_bin = context->s.cc.result_bin;
2711 context->s.cc.result_bin = NULL;
2712 pb = (ProcBin *) HAlloc(p, PROC_BIN_SIZE);
2713 pb->thing_word = HEADER_PROC_BIN;
2714 pb->size = context->s.cc.real_size;
2715 pb->next = MSO(p).first;
2716 MSO(p).first = (struct erl_off_heap_header*)pb;
2717 pb->val = result_bin;
2718 pb->bytes = (byte*) result_bin->orig_bytes;
2719 pb->flags = 0;
2720 OH_OVERHEAD(&(MSO(p)), pb->size / sizeof(Eterm));
2721 ASSERT(erts_refc_read(&result_bin->intern.refc, 1));
2722 erl_zlib_deflate_finish(&(context->s.cc.stream));
2723 erts_bin_free(context->s.cc.destination_bin);
2724 context->s.cc.destination_bin = NULL;
2725 context->alive = 0;
2726 BUMP_REDS(p, (this_time * CONTEXT_REDS) / TERM_TO_BINARY_COMPRESS_CHUNK);
2727 if (context_b && erts_refc_read(&context_b->intern.refc,0) == 0) {
2728 erts_bin_free(context_b);
2729 }
2730 return make_binary(pb);
2731 }
2732 }
2733 }
2734 }
2735 #undef EXPORT_CONTEXT
2736 #undef RETURN_STATE
2737 }
2738
2739
2740
2741
2742
2743
2744
2745
2746 /*
2747 * This function fills ext with the external format of atom.
2748 * If it's an old atom we just supply an index, otherwise
2749 * we insert the index _and_ the entire atom. This way the receiving side
2750 * does not have to perform an hash on the etom to locate it, and
2751 * we save a lot of space on the wire.
2752 */
2753
2754 static byte*
enc_atom(ErtsAtomCacheMap * acmp,Eterm atom,byte * ep,Uint64 dflags)2755 enc_atom(ErtsAtomCacheMap *acmp, Eterm atom, byte *ep, Uint64 dflags)
2756 {
2757 int iix;
2758 int len;
2759 const int utf8_atoms = (int) (dflags & DFLAG_UTF8_ATOMS);
2760
2761 ASSERT(is_atom(atom));
2762
2763 if (dflags & DFLAG_ETS_COMPRESSED) {
2764 Uint aval = atom_val(atom);
2765 ASSERT(aval < (1<<24));
2766 if (aval >= (1 << 16)) {
2767 *ep++ = ATOM_INTERNAL_REF3;
2768 put_int24(aval, ep);
2769 ep += 3;
2770 }
2771 else {
2772 *ep++ = ATOM_INTERNAL_REF2;
2773 put_int16(aval, ep);
2774 ep += 2;
2775 }
2776 return ep;
2777 }
2778
2779 /*
2780 * term_to_binary/1,2 and the initial distribution message
2781 * don't use the cache.
2782 */
2783
2784 iix = get_iix_acache_map(acmp, atom, dflags);
2785 if (iix < 0) {
2786 Atom *a = atom_tab(atom_val(atom));
2787 len = a->len;
2788 if (utf8_atoms || a->latin1_chars < 0) {
2789 if (len > 255) {
2790 *ep++ = ATOM_UTF8_EXT;
2791 put_int16(len, ep);
2792 ep += 2;
2793 }
2794 else {
2795 *ep++ = SMALL_ATOM_UTF8_EXT;
2796 put_int8(len, ep);
2797 ep += 1;
2798 }
2799 sys_memcpy((char *) ep, (char *) a->name, len);
2800 }
2801 else {
2802 if (a->latin1_chars <= 255 && (dflags & DFLAG_SMALL_ATOM_TAGS)) {
2803 *ep++ = SMALL_ATOM_EXT;
2804 if (len == a->latin1_chars) {
2805 sys_memcpy(ep+1, a->name, len);
2806 }
2807 else {
2808 len = erts_utf8_to_latin1(ep+1, a->name, len);
2809 ASSERT(len == a->latin1_chars);
2810 }
2811 put_int8(len, ep);
2812 ep++;
2813 }
2814 else {
2815 *ep++ = ATOM_EXT;
2816 if (len == a->latin1_chars) {
2817 sys_memcpy(ep+2, a->name, len);
2818 }
2819 else {
2820 len = erts_utf8_to_latin1(ep+2, a->name, len);
2821 ASSERT(len == a->latin1_chars);
2822 }
2823 put_int16(len, ep);
2824 ep += 2;
2825 }
2826 }
2827 ep += len;
2828 return ep;
2829 }
2830
2831 /* The atom is referenced in the cache. */
2832 *ep++ = ATOM_CACHE_REF;
2833 put_int8(iix, ep);
2834 ep++;
2835 return ep;
2836 }
2837
2838 /*
2839 * We use this atom as sysname in local pid/port/refs
2840 * for the ETS compressed format
2841 *
2842 */
2843 #define INTERNAL_LOCAL_SYSNAME am_ErtsSecretAtom
2844
2845 static byte*
enc_pid(ErtsAtomCacheMap * acmp,Eterm pid,byte * ep,Uint64 dflags)2846 enc_pid(ErtsAtomCacheMap *acmp, Eterm pid, byte* ep, Uint64 dflags)
2847 {
2848 Uint on, os;
2849 Eterm sysname = ((is_internal_pid(pid) && (dflags & DFLAG_ETS_COMPRESSED))
2850 ? INTERNAL_LOCAL_SYSNAME : pid_node_name(pid));
2851 Uint32 creation = pid_creation(pid);
2852
2853 *ep++ = NEW_PID_EXT;
2854
2855 /* insert atom here containing host and sysname */
2856 ep = enc_atom(acmp, sysname, ep, dflags);
2857
2858 on = pid_number(pid);
2859 os = pid_serial(pid);
2860
2861 put_int32(on, ep);
2862 ep += 4;
2863 put_int32(os, ep);
2864 ep += 4;
2865 put_int32(creation, ep);
2866 ep += 4;
2867 return ep;
2868 }
2869
2870 /* Expect an atom in plain text or cached */
2871 static byte*
dec_atom(ErtsDistExternal * edep,byte * ep,Eterm * objp)2872 dec_atom(ErtsDistExternal *edep, byte* ep, Eterm* objp)
2873 {
2874 Uint len;
2875 int n;
2876 ErtsAtomEncoding char_enc;
2877
2878 switch (*ep++) {
2879 case ATOM_CACHE_REF:
2880 if (!(edep && (edep->flags & ERTS_DIST_EXT_ATOM_TRANS_TAB)))
2881 goto error;
2882 n = get_int8(ep);
2883 ep++;
2884 if (n >= edep->attab.size)
2885 goto error;
2886 ASSERT(is_atom(edep->attab.atom[n]));
2887 *objp = edep->attab.atom[n];
2888 break;
2889 case ATOM_EXT:
2890 len = get_int16(ep),
2891 ep += 2;
2892 char_enc = ERTS_ATOM_ENC_LATIN1;
2893 goto dec_atom_common;
2894 case SMALL_ATOM_EXT:
2895 len = get_int8(ep);
2896 ep++;
2897 char_enc = ERTS_ATOM_ENC_LATIN1;
2898 goto dec_atom_common;
2899 case ATOM_UTF8_EXT:
2900 len = get_int16(ep),
2901 ep += 2;
2902 char_enc = ERTS_ATOM_ENC_UTF8;
2903 goto dec_atom_common;
2904 case SMALL_ATOM_UTF8_EXT:
2905 len = get_int8(ep),
2906 ep++;
2907 char_enc = ERTS_ATOM_ENC_UTF8;
2908 dec_atom_common:
2909 if (edep && (edep->flags & ERTS_DIST_EXT_BTT_SAFE)) {
2910 if (!erts_atom_get((char*)ep, len, objp, char_enc)) {
2911 goto error;
2912 }
2913 } else {
2914 Eterm atom = erts_atom_put(ep, len, char_enc, 0);
2915 if (is_non_value(atom))
2916 goto error;
2917 *objp = atom;
2918 }
2919 ep += len;
2920 break;
2921 case ATOM_INTERNAL_REF2:
2922 n = get_int16(ep);
2923 ep += 2;
2924 if (n >= atom_table_size()) {
2925 goto error;
2926 }
2927 *objp = make_atom(n);
2928 break;
2929 case ATOM_INTERNAL_REF3:
2930 n = get_int24(ep);
2931 ep += 3;
2932 if (n >= atom_table_size()) {
2933 goto error;
2934 }
2935 *objp = make_atom(n);
2936 break;
2937
2938 default:
2939 error:
2940 *objp = NIL; /* Don't leave a hole in the heap */
2941 return NULL;
2942 }
2943 return ep;
2944 }
2945
dec_get_node(Eterm sysname,Uint32 creation,Eterm book)2946 static ERTS_INLINE ErlNode* dec_get_node(Eterm sysname, Uint32 creation, Eterm book)
2947 {
2948 if (sysname == INTERNAL_LOCAL_SYSNAME) /* && DFLAG_INTERNAL_TAGS */
2949 return erts_this_node;
2950
2951 if (sysname == erts_this_node->sysname
2952 && (creation == erts_this_node->creation || creation == ORIG_CREATION))
2953 return erts_this_node;
2954
2955 return erts_find_or_insert_node(sysname,creation,book);
2956 }
2957
2958 static byte*
dec_pid(ErtsDistExternal * edep,ErtsHeapFactory * factory,byte * ep,Eterm * objp,byte tag)2959 dec_pid(ErtsDistExternal *edep, ErtsHeapFactory* factory, byte* ep,
2960 Eterm* objp, byte tag)
2961 {
2962 Eterm sysname;
2963 Uint data;
2964 Uint num;
2965 Uint ser;
2966 Uint32 cre;
2967 ErlNode *node;
2968
2969 *objp = NIL; /* In case we fail, don't leave a hole in the heap */
2970
2971 /* eat first atom */
2972 if ((ep = dec_atom(edep, ep, &sysname)) == NULL)
2973 return NULL;
2974 num = get_int32(ep);
2975 ep += 4;
2976 if (num > ERTS_MAX_PID_NUMBER)
2977 return NULL;
2978 ser = get_int32(ep);
2979 ep += 4;
2980 if (ser > ERTS_MAX_PID_SERIAL)
2981 return NULL;
2982
2983 if (tag == PID_EXT) {
2984 cre = get_int8(ep);
2985 ep += 1;
2986 if (!is_tiny_creation(cre)) {
2987 return NULL;
2988 }
2989 } else {
2990 ASSERT(tag == NEW_PID_EXT);
2991 cre = get_int32(ep);
2992 ep += 4;
2993 }
2994
2995 data = make_pid_data(ser, num);
2996
2997 /*
2998 * We are careful to create the node entry only after all
2999 * validity tests are done.
3000 */
3001 node = dec_get_node(sysname, cre, make_boxed(factory->hp));
3002
3003 if(node == erts_this_node) {
3004 *objp = make_internal_pid(data);
3005 } else {
3006 ExternalThing *etp = (ExternalThing *) factory->hp;
3007 factory->hp += EXTERNAL_THING_HEAD_SIZE + 1;
3008
3009 etp->header = make_external_pid_header(1);
3010 etp->next = factory->off_heap->first;
3011 etp->node = node;
3012 etp->data.ui[0] = data;
3013
3014 factory->off_heap->first = (struct erl_off_heap_header*) etp;
3015 *objp = make_external_pid(etp);
3016 }
3017 return ep;
3018 }
3019
3020
3021 #define ENC_TERM ((Eterm) 0)
3022 #define ENC_ONE_CONS ((Eterm) 1)
3023 #define ENC_PATCH_FUN_SIZE ((Eterm) 2)
3024 #define ENC_BIN_COPY ((Eterm) 3)
3025 #define ENC_MAP_PAIR ((Eterm) 4)
3026 #define ENC_HASHMAP_NODE ((Eterm) 5)
3027 #define ENC_LAST_ARRAY_ELEMENT ((Eterm) 6)
3028
3029 static byte*
enc_term(ErtsAtomCacheMap * acmp,Eterm obj,byte * ep,Uint64 dflags,struct erl_off_heap_header ** off_heap)3030 enc_term(ErtsAtomCacheMap *acmp, Eterm obj, byte* ep, Uint64 dflags,
3031 struct erl_off_heap_header** off_heap)
3032 {
3033 byte *res;
3034 (void) enc_term_int(NULL, acmp, obj, ep, dflags, off_heap, NULL, &res);
3035 return res;
3036 }
3037
3038 static int
enc_term_int(TTBEncodeContext * ctx,ErtsAtomCacheMap * acmp,Eterm obj,byte * ep,Uint64 dflags,struct erl_off_heap_header ** off_heap,Sint * reds,byte ** res)3039 enc_term_int(TTBEncodeContext* ctx, ErtsAtomCacheMap *acmp, Eterm obj, byte* ep,
3040 Uint64 dflags,
3041 struct erl_off_heap_header** off_heap, Sint *reds, byte **res)
3042 {
3043 DECLARE_WSTACK(s);
3044 Uint n;
3045 Uint i;
3046 Uint j;
3047 Uint* ptr;
3048 Eterm val;
3049 FloatDef f;
3050 Sint r = 0;
3051 int use_iov = 0;
3052
3053 if (ctx) {
3054 WSTACK_CHANGE_ALLOCATOR(s, ERTS_ALC_T_SAVED_ESTACK);
3055 r = *reds;
3056 use_iov = !!ctx->iov;
3057
3058 if (ctx->wstack.wstart) { /* restore saved stacks and byte pointer */
3059 WSTACK_RESTORE(s, &ctx->wstack);
3060 ep = ctx->ep;
3061 obj = ctx->obj;
3062 if (is_non_value(obj)) {
3063 goto outer_loop;
3064 }
3065 }
3066 }
3067
3068 goto L_jump_start;
3069
3070 outer_loop:
3071 while (!WSTACK_ISEMPTY(s)) {
3072 obj = WSTACK_POP(s);
3073
3074 switch (val = WSTACK_POP(s)) {
3075 case ENC_TERM:
3076 break;
3077 case ENC_ONE_CONS:
3078 encode_one_cons:
3079 {
3080 Eterm* cons = list_val(obj);
3081 Eterm tl;
3082 Uint len_cnt = WSTACK_POP(s);
3083
3084 obj = CAR(cons);
3085 tl = CDR(cons);
3086 if (is_list(tl)) {
3087 len_cnt++;
3088 WSTACK_PUSH3(s, len_cnt, ENC_ONE_CONS, tl);
3089 }
3090 else {
3091 byte* list_lenp = (byte*) WSTACK_POP(s);
3092 ASSERT(list_lenp[-1] == LIST_EXT);
3093 put_int32(len_cnt, list_lenp);
3094
3095 WSTACK_PUSH2(s, ENC_TERM, tl);
3096 }
3097 }
3098 break;
3099 case ENC_PATCH_FUN_SIZE:
3100 {
3101 byte* size_p = (byte *) obj;
3102 put_int32(ep - size_p, size_p);
3103 }
3104 goto outer_loop;
3105 case ENC_BIN_COPY: {
3106 Uint bits = (Uint)obj;
3107 Uint bitoffs = WSTACK_POP(s);
3108 byte* bytes = (byte*) WSTACK_POP(s);
3109 byte* dst = (byte*) WSTACK_POP(s);
3110 if (bits > r * (TERM_TO_BINARY_MEMCPY_FACTOR * 8)) {
3111 Uint n = r * TERM_TO_BINARY_MEMCPY_FACTOR;
3112 WSTACK_PUSH5(s, (UWord)(dst + n), (UWord)(bytes + n), bitoffs,
3113 ENC_BIN_COPY, bits - 8*n);
3114 bits = 8*n;
3115 copy_binary_to_buffer(dst, 0, bytes, bitoffs, bits);
3116 obj = THE_NON_VALUE;
3117 r = 0; /* yield */
3118 break;
3119 } else {
3120 copy_binary_to_buffer(dst, 0, bytes, bitoffs, bits);
3121 r -= bits / (TERM_TO_BINARY_MEMCPY_FACTOR * 8);
3122 goto outer_loop;
3123 }
3124 }
3125 case ENC_MAP_PAIR: {
3126 Uint pairs_left = obj;
3127 Eterm *vptr = (Eterm*) WSTACK_POP(s);
3128 Eterm *kptr = (Eterm*) WSTACK_POP(s);
3129
3130 obj = *kptr;
3131 if (--pairs_left > 0) {
3132 WSTACK_PUSH4(s, (UWord)(kptr+1), (UWord)(vptr+1),
3133 ENC_MAP_PAIR, pairs_left);
3134 }
3135 WSTACK_PUSH2(s, ENC_TERM, *vptr);
3136 break;
3137 }
3138 case ENC_HASHMAP_NODE:
3139 if (is_list(obj)) { /* leaf node [K|V] */
3140 ptr = list_val(obj);
3141 WSTACK_PUSH2(s, ENC_TERM, CDR(ptr));
3142 obj = CAR(ptr);
3143 }
3144 break;
3145 case ENC_LAST_ARRAY_ELEMENT:
3146 /* obj is the tuple */
3147 {
3148 Eterm* ptr = (Eterm *) obj;
3149 obj = *ptr;
3150 }
3151 break;
3152 default: /* ENC_LAST_ARRAY_ELEMENT+1 and upwards */
3153 {
3154 Eterm* ptr = (Eterm *) obj;
3155 obj = *ptr++;
3156 WSTACK_PUSH2(s, val-1, (UWord)ptr);
3157 }
3158 break;
3159 }
3160
3161 if (ctx && --r <= 0) {
3162 *reds = 0;
3163 ctx->obj = obj;
3164 ctx->ep = ep;
3165 WSTACK_SAVE(s, &ctx->wstack);
3166 return -1;
3167 }
3168
3169 L_jump_start:
3170 switch(tag_val_def(obj)) {
3171 case NIL_DEF:
3172 *ep++ = NIL_EXT;
3173 break;
3174
3175 case ATOM_DEF:
3176 ep = enc_atom(acmp,obj,ep,dflags);
3177 break;
3178
3179 case SMALL_DEF:
3180 {
3181 /* From R14B we no longer restrict INTEGER_EXT to 28 bits,
3182 * as done earlier for backward compatibility reasons. */
3183 Sint val = signed_val(obj);
3184
3185 if ((Uint)val < 256) {
3186 *ep++ = SMALL_INTEGER_EXT;
3187 put_int8(val, ep);
3188 ep++;
3189 } else if (sizeof(Sint) == 4 || IS_SSMALL32(val)) {
3190 *ep++ = INTEGER_EXT;
3191 put_int32(val, ep);
3192 ep += 4;
3193 } else {
3194 DeclareTmpHeapNoproc(tmp_big,2);
3195 Eterm big;
3196 UseTmpHeapNoproc(2);
3197 big = small_to_big(val, tmp_big);
3198 *ep++ = SMALL_BIG_EXT;
3199 n = big_bytes(big);
3200 ASSERT(n < 256);
3201 put_int8(n, ep);
3202 ep += 1;
3203 *ep++ = big_sign(big);
3204 ep = big_to_bytes(big, ep);
3205 UnUseTmpHeapNoproc(2);
3206 }
3207 }
3208 break;
3209
3210 case BIG_DEF:
3211 {
3212 int sign = big_sign(obj);
3213 n = big_bytes(obj);
3214 if (sizeof(Sint)==4 && n<=4) {
3215 Uint dig = big_digit(obj,0);
3216 Sint val = sign ? -dig : dig;
3217 if ((val<0) == sign) {
3218 *ep++ = INTEGER_EXT;
3219 put_int32(val, ep);
3220 ep += 4;
3221 break;
3222 }
3223 }
3224 if (n < 256) {
3225 *ep++ = SMALL_BIG_EXT;
3226 put_int8(n, ep);
3227 ep += 1;
3228 }
3229 else {
3230 *ep++ = LARGE_BIG_EXT;
3231 put_int32(n, ep);
3232 ep += 4;
3233 }
3234 *ep++ = sign;
3235 ep = big_to_bytes(obj, ep);
3236 }
3237 break;
3238
3239 case PID_DEF:
3240 case EXTERNAL_PID_DEF:
3241 ep = enc_pid(acmp, obj, ep, dflags);
3242 break;
3243
3244 case REF_DEF:
3245 case EXTERNAL_REF_DEF: {
3246 Uint32 *ref_num;
3247 Eterm sysname = (((dflags & DFLAG_ETS_COMPRESSED) && is_internal_ref(obj))
3248 ? INTERNAL_LOCAL_SYSNAME : ref_node_name(obj));
3249 Uint32 creation = ref_creation(obj);
3250
3251 ASSERT(dflags & DFLAG_EXTENDED_REFERENCES);
3252
3253 erts_magic_ref_save_bin(obj);
3254
3255 *ep++ = NEWER_REFERENCE_EXT;
3256 i = ref_no_numbers(obj);
3257 put_int16(i, ep);
3258 ep += 2;
3259 ep = enc_atom(acmp, sysname, ep, dflags);
3260 put_int32(creation, ep);
3261 ep += 4;
3262 ref_num = ref_numbers(obj);
3263 for (j = 0; j < i; j++) {
3264 put_int32(ref_num[j], ep);
3265 ep += 4;
3266 }
3267 break;
3268 }
3269 case PORT_DEF:
3270 case EXTERNAL_PORT_DEF: {
3271 Eterm sysname = (((dflags & DFLAG_ETS_COMPRESSED) && is_internal_port(obj))
3272 ? INTERNAL_LOCAL_SYSNAME : port_node_name(obj));
3273 Uint32 creation = port_creation(obj);
3274
3275 *ep++ = NEW_PORT_EXT;
3276 ep = enc_atom(acmp, sysname, ep, dflags);
3277 j = port_number(obj);
3278 put_int32(j, ep);
3279 ep += 4;
3280 put_int32(creation, ep);
3281 ep += 4;
3282 break;
3283 }
3284 case LIST_DEF:
3285 {
3286 if (is_external_string(obj, &i)) {
3287 *ep++ = STRING_EXT;
3288 put_int16(i, ep);
3289 ep += 2;
3290 while (is_list(obj)) {
3291 Eterm* cons = list_val(obj);
3292 *ep++ = unsigned_val(CAR(cons));
3293 obj = CDR(cons);
3294 }
3295 r -= i;
3296 } else {
3297 r -= i/2;
3298 *ep++ = LIST_EXT;
3299 /* Patch list length when we find end of list */
3300 WSTACK_PUSH2(s, (UWord)ep, 1);
3301 ep += 4;
3302 goto encode_one_cons;
3303 }
3304 }
3305 break;
3306
3307 case TUPLE_DEF:
3308 ptr = tuple_val(obj);
3309 i = arityval(*ptr);
3310 ptr++;
3311 if (i <= 0xff) {
3312 *ep++ = SMALL_TUPLE_EXT;
3313 put_int8(i, ep);
3314 ep += 1;
3315 } else {
3316 *ep++ = LARGE_TUPLE_EXT;
3317 put_int32(i, ep);
3318 ep += 4;
3319 }
3320 if (i > 0) {
3321 WSTACK_PUSH2(s, ENC_LAST_ARRAY_ELEMENT+i-1, (UWord)ptr);
3322 }
3323 break;
3324
3325 case MAP_DEF:
3326 if (is_flatmap(obj)) {
3327 flatmap_t *mp = (flatmap_t*)flatmap_val(obj);
3328 Uint size = flatmap_get_size(mp);
3329
3330 *ep++ = MAP_EXT;
3331 put_int32(size, ep); ep += 4;
3332
3333 if (size > 0) {
3334 Eterm *kptr = flatmap_get_keys(mp);
3335 Eterm *vptr = flatmap_get_values(mp);
3336
3337 WSTACK_PUSH4(s, (UWord)kptr, (UWord)vptr,
3338 ENC_MAP_PAIR, size);
3339 }
3340 } else {
3341 Eterm hdr;
3342 Uint node_sz;
3343 ptr = boxed_val(obj);
3344 hdr = *ptr;
3345 ASSERT(is_header(hdr));
3346 switch(hdr & _HEADER_MAP_SUBTAG_MASK) {
3347 case HAMT_SUBTAG_HEAD_ARRAY:
3348 *ep++ = MAP_EXT;
3349 ptr++;
3350 put_int32(*ptr, ep); ep += 4;
3351 node_sz = 16;
3352 break;
3353 case HAMT_SUBTAG_HEAD_BITMAP:
3354 *ep++ = MAP_EXT;
3355 ptr++;
3356 put_int32(*ptr, ep); ep += 4;
3357 /*fall through*/
3358 case HAMT_SUBTAG_NODE_BITMAP:
3359 node_sz = hashmap_bitcount(MAP_HEADER_VAL(hdr));
3360 ASSERT(node_sz < 17);
3361 break;
3362 default:
3363 erts_exit(ERTS_ERROR_EXIT, "bad header\r\n");
3364 }
3365
3366 ptr++;
3367 WSTACK_RESERVE(s, node_sz*2);
3368 while(node_sz--) {
3369 WSTACK_FAST_PUSH(s, ENC_HASHMAP_NODE);
3370 WSTACK_FAST_PUSH(s, *ptr++);
3371 }
3372 }
3373 break;
3374 case FLOAT_DEF:
3375 GET_DOUBLE(obj, f);
3376 if (dflags & DFLAG_NEW_FLOATS) {
3377 *ep++ = NEW_FLOAT_EXT;
3378 #if defined(WORDS_BIGENDIAN) || defined(DOUBLE_MIDDLE_ENDIAN)
3379 put_int32(f.fw[0], ep);
3380 ep += 4;
3381 put_int32(f.fw[1], ep);
3382 #else
3383 put_int32(f.fw[1], ep);
3384 ep += 4;
3385 put_int32(f.fw[0], ep);
3386 #endif
3387 ep += 4;
3388 } else {
3389 *ep++ = FLOAT_EXT;
3390
3391 /* now the erts_snprintf which does the work */
3392 i = sys_double_to_chars(f.fd, (char*) ep, (size_t)31);
3393
3394 /* Don't leave garbage after the float! (Bad practice in general,
3395 * and Purify complains.)
3396 */
3397 sys_memset(ep+i, 0, 31-i);
3398 ep += 31;
3399 }
3400 break;
3401
3402 case BINARY_DEF:
3403 {
3404 Uint bitoffs;
3405 Uint bitsize;
3406 byte* bytes;
3407 byte* data_dst;
3408 Uint off_heap_bytesize = 0;
3409 Uint off_heap_tail;
3410 Eterm pb_term;
3411 Binary *pb_val;
3412
3413 ASSERT(!(dflags & DFLAG_PENDING_CONNECT) || (ctx && ctx->iov));
3414
3415 ERTS_GET_BINARY_BYTES(obj, bytes, bitoffs, bitsize);
3416 if (use_iov) {
3417 if (bitoffs == 0) {
3418 ProcBin* pb = (ProcBin*) binary_val(obj);
3419 off_heap_bytesize = pb->size;
3420 if (off_heap_bytesize <= ERL_ONHEAP_BIN_LIMIT)
3421 off_heap_bytesize = 0;
3422 else {
3423 pb_term = obj;
3424 if (pb->thing_word == HEADER_SUB_BIN) {
3425 ErlSubBin* sub = (ErlSubBin*)pb;
3426 pb_term = sub->orig;
3427 pb = (ProcBin*) binary_val(pb_term);
3428 }
3429 if (pb->thing_word != HEADER_PROC_BIN)
3430 off_heap_bytesize = 0;
3431 else {
3432 if (pb->flags) {
3433 char* before_realloc = pb->val->orig_bytes;
3434 erts_emasculate_writable_binary(pb);
3435 bytes += (pb->val->orig_bytes - before_realloc);
3436 ASSERT((byte *) &pb->val->orig_bytes[0] <= bytes
3437 && bytes < ((byte *) &pb->val->orig_bytes[0]
3438 + pb->val->orig_size));
3439 }
3440 pb_val = pb->val;
3441 }
3442 }
3443 }
3444 }
3445 else if (dflags & DFLAG_ETS_COMPRESSED) {
3446 ProcBin* pb = (ProcBin*) binary_val(obj);
3447 Uint bytesize = pb->size;
3448 if (pb->thing_word == HEADER_SUB_BIN) {
3449 ErlSubBin* sub = (ErlSubBin*)pb;
3450 pb = (ProcBin*) binary_val(sub->orig);
3451 ASSERT(bytesize == sub->size);
3452 bytesize += (bitoffs + bitsize + 7) / 8;
3453 }
3454 if (pb->thing_word == HEADER_PROC_BIN
3455 && heap_bin_size(bytesize) > PROC_BIN_SIZE) {
3456 ProcBin tmp;
3457 if (bitoffs || bitsize) {
3458 *ep++ = BIT_BINARY_INTERNAL_REF;
3459 *ep++ = bitoffs;
3460 *ep++ = bitsize;
3461 }
3462 else {
3463 *ep++ = BINARY_INTERNAL_REF;
3464 }
3465 if (pb->flags) {
3466 char* before_realloc = pb->val->orig_bytes;
3467 erts_emasculate_writable_binary(pb);
3468 bytes += (pb->val->orig_bytes - before_realloc);
3469 }
3470 erts_refc_inc(&pb->val->intern.refc, 2);
3471
3472 sys_memcpy(&tmp, pb, sizeof(ProcBin));
3473 tmp.next = *off_heap;
3474 tmp.bytes = bytes;
3475 tmp.size = bytesize;
3476 sys_memcpy(ep, &tmp, sizeof(ProcBin));
3477 *off_heap = (struct erl_off_heap_header*) ep;
3478 ep += sizeof(ProcBin);
3479 break;
3480 }
3481 }
3482 if (bitsize == 0) {
3483 /* Plain old byte-sized binary. */
3484 *ep++ = BINARY_EXT;
3485 j = binary_size(obj);
3486 put_int32(j, ep);
3487 ep += 4;
3488 if (off_heap_bytesize)
3489 off_heap_tail = 0;
3490 else {
3491 data_dst = ep;
3492 ep += j;
3493 }
3494 } else if (dflags & DFLAG_BIT_BINARIES) {
3495 /* Bit-level binary. */
3496 if (dflags & DFLAG_PENDING_CONNECT) {
3497 j = off_heap_bytesize;
3498 if (!j) {
3499 pb_val = NULL;
3500 pb_term = THE_NON_VALUE;
3501 j = binary_size(obj);
3502 }
3503 data_dst = hopefull_bit_binary(ctx, &ep, pb_val, pb_term,
3504 bytes, bitoffs, bitsize, j);
3505 if (!data_dst)
3506 break; /* off heap binary referred... */
3507 ASSERT(!off_heap_bytesize);
3508 off_heap_tail = 0;
3509 /*
3510 * Trailing bits already written by hopefull_bit_binary();
3511 * now go copy all whole octets...
3512 */
3513 bitsize = 0;
3514 }
3515 else {
3516 *ep++ = BIT_BINARY_EXT;
3517 j = binary_size(obj);
3518 put_int32((j+1), ep);
3519 ep += 4;
3520 *ep++ = bitsize;
3521 if (off_heap_bytesize) {
3522 /* trailing bits */
3523 ep[0] = 0;
3524 copy_binary_to_buffer(ep, 0, bytes + j, 0, bitsize);
3525 off_heap_tail = 1;
3526 }
3527 else {
3528 ep[j] = 0; /* Zero unused bits at end of binary */
3529 data_dst = ep;
3530 ep += j + 1;
3531 }
3532 }
3533 } else {
3534 /*
3535 * Bit-level binary, but the receiver doesn't support it.
3536 * Build a tuple instead.
3537 */
3538 *ep++ = SMALL_TUPLE_EXT;
3539 *ep++ = 2;
3540 *ep++ = BINARY_EXT;
3541 j = binary_size(obj);
3542 put_int32((j+1), ep);
3543 ep += 4;
3544
3545 if (off_heap_bytesize) {
3546 /* trailing bits */
3547 ep[0] = 0;
3548 copy_binary_to_buffer(ep, 0, bytes + j, 0, bitsize);
3549 ep[1] = SMALL_INTEGER_EXT;
3550 ep[2] = bitsize;
3551 off_heap_tail = 3;
3552 }
3553 else {
3554 ep[j] = 0; /* Zero unused bits at end of binary */
3555 data_dst = ep;
3556 ep += j+1;
3557 *ep++ = SMALL_INTEGER_EXT;
3558 *ep++ = bitsize;
3559 }
3560 }
3561 if (off_heap_bytesize) {
3562 ASSERT(pb_val);
3563 store_in_vec(ctx, ep, pb_val, pb_term,
3564 bytes, off_heap_bytesize);
3565 ep += off_heap_tail;
3566 }
3567 else if (ctx && j > r * TERM_TO_BINARY_MEMCPY_FACTOR) {
3568 WSTACK_PUSH5(s, (UWord)data_dst, (UWord)bytes, bitoffs,
3569 ENC_BIN_COPY, 8*j + bitsize);
3570 } else {
3571 copy_binary_to_buffer(data_dst, 0, bytes, bitoffs,
3572 8 * j + bitsize);
3573 }
3574 }
3575 break;
3576 case EXPORT_DEF:
3577 {
3578 Export* exp = *((Export **) (export_val(obj) + 1));
3579 ASSERT(!(dflags & DFLAG_PENDING_CONNECT) || (ctx && ctx->iov));
3580 if (dflags & DFLAG_PENDING_CONNECT)
3581 hopefull_export(ctx, &ep, exp, dflags, off_heap);
3582 else if ((dflags & DFLAG_EXPORT_PTR_TAG) != 0) {
3583 *ep++ = EXPORT_EXT;
3584 ep = enc_atom(acmp, exp->info.mfa.module, ep, dflags);
3585 ep = enc_atom(acmp, exp->info.mfa.function, ep, dflags);
3586 ep = enc_term(acmp, make_small(exp->info.mfa.arity),
3587 ep, dflags, off_heap);
3588 } else {
3589 /* Tag, arity */
3590 *ep++ = SMALL_TUPLE_EXT;
3591 put_int8(2, ep);
3592 ep += 1;
3593
3594 /* Module name */
3595 ep = enc_atom(acmp, exp->info.mfa.module, ep, dflags);
3596
3597 /* Function name */
3598 ep = enc_atom(acmp, exp->info.mfa.function, ep, dflags);
3599 }
3600 break;
3601 }
3602 break;
3603 case FUN_DEF:
3604 {
3605 ErlFunThing* funp = (ErlFunThing *) fun_val(obj);
3606 int ei;
3607
3608 ASSERT(dflags & DFLAG_NEW_FUN_TAGS);
3609 *ep++ = NEW_FUN_EXT;
3610 WSTACK_PUSH2(s, ENC_PATCH_FUN_SIZE,
3611 (UWord) ep); /* Position for patching in size */
3612 ep += 4;
3613 *ep = funp->arity;
3614 ep += 1;
3615 sys_memcpy(ep, funp->fe->uniq, 16);
3616 ep += 16;
3617 put_int32(funp->fe->index, ep);
3618 ep += 4;
3619 put_int32(funp->num_free, ep);
3620 ep += 4;
3621 ep = enc_atom(acmp, funp->fe->module, ep, dflags);
3622 ep = enc_term(acmp, make_small(funp->fe->old_index), ep, dflags, off_heap);
3623 ep = enc_term(acmp, make_small(funp->fe->old_uniq), ep, dflags, off_heap);
3624 ep = enc_pid(acmp, funp->creator, ep, dflags);
3625
3626 for (ei = funp->num_free-1; ei >= 0; ei--) {
3627 WSTACK_PUSH2(s, ENC_TERM, (UWord) funp->env[ei]);
3628 }
3629 }
3630 break;
3631 }
3632 }
3633 DESTROY_WSTACK(s);
3634 if (ctx) {
3635 ASSERT(ctx->wstack.wstart == NULL);
3636 *reds = r;
3637 if (use_iov)
3638 store_in_vec(ctx, ep, NULL, THE_NON_VALUE, NULL, 0);
3639 }
3640 *res = ep;
3641 return 0;
3642 }
3643
3644 static ERTS_INLINE void
store_in_vec_aux(TTBEncodeContext * ctx,Binary * bin,Eterm term,byte * ptr,Uint len)3645 store_in_vec_aux(TTBEncodeContext *ctx,
3646 Binary *bin,
3647 Eterm term,
3648 byte *ptr,
3649 Uint len)
3650 {
3651 ErlDrvBinary *dbin = Binary2ErlDrvBinary(bin);
3652 int vlen = ctx->vlen;
3653 Uint iov_len;
3654 ErlIOVec *feiovp;
3655
3656 ASSERT(((byte *) &bin->orig_bytes[0]) <= ptr);
3657 ASSERT(ptr + len <= ((byte *) &bin->orig_bytes[0]) + bin->orig_size);
3658
3659 if (ctx->frag_ix >= 0) {
3660 feiovp = &ctx->fragment_eiovs[ctx->frag_ix];
3661 ASSERT(0 < feiovp->size);
3662 ASSERT(feiovp->size <= ctx->fragment_size);
3663 if (feiovp->size != ctx->fragment_size) {
3664 /* current fragment not full yet... */
3665 iov_len = ctx->fragment_size - feiovp->size;
3666 if (len < iov_len)
3667 iov_len = len;
3668 goto store_iov_data;
3669 }
3670 }
3671
3672 while (len) {
3673 /* Start new fragment... */
3674 ctx->frag_ix++;
3675 feiovp = &ctx->fragment_eiovs[ctx->frag_ix];
3676 ASSERT(ctx->frag_ix >= 0);
3677
3678 if (ctx->termv) {
3679 ctx->termv[vlen] = THE_NON_VALUE;
3680 ctx->termv[vlen+1] = THE_NON_VALUE;
3681 }
3682
3683 feiovp->vsize = 2;
3684 feiovp->size = 0;
3685 feiovp->iov = &ctx->iov[vlen];
3686 feiovp->binv = &ctx->binv[vlen];
3687
3688 /* entry for driver header */
3689 ctx->iov[vlen].iov_base = NULL;
3690 ctx->iov[vlen].iov_len = 0;
3691 ctx->binv[vlen] = NULL;
3692 vlen++;
3693
3694 /* entry for dist header */
3695 ctx->iov[vlen].iov_base = NULL;
3696 ctx->iov[vlen].iov_len = 0;
3697 ctx->binv[vlen] = NULL;
3698 vlen++;
3699
3700 iov_len = len < ctx->fragment_size ? len : ctx->fragment_size;
3701
3702 store_iov_data:
3703
3704 ASSERT(iov_len);
3705
3706 do {
3707 Uint iov_len_left;
3708
3709 if (iov_len <= MAX_SYSIOVEC_IOVLEN)
3710 iov_len_left = 0;
3711 else {
3712 iov_len_left = iov_len - MAX_SYSIOVEC_IOVLEN;
3713 iov_len = MAX_SYSIOVEC_IOVLEN;
3714 }
3715
3716 ctx->iov[vlen].iov_base = ptr;
3717 ctx->iov[vlen].iov_len = iov_len;
3718 ctx->binv[vlen] = dbin;
3719 if (ctx->termv)
3720 ctx->termv[vlen] = term;
3721 else
3722 erts_refc_inc(&bin->intern.refc, 2);
3723 ctx->size += iov_len;
3724 len -= iov_len;
3725 ptr += iov_len;
3726 vlen++;
3727 feiovp->size += iov_len;
3728 feiovp->vsize++;
3729
3730 iov_len = iov_len_left;
3731 } while (iov_len);
3732 }
3733
3734 ctx->vlen = vlen;
3735 }
3736
3737 static void
store_in_vec(TTBEncodeContext * ctx,byte * ep,Binary * ohbin,Eterm ohpb,byte * ohp,Uint ohsz)3738 store_in_vec(TTBEncodeContext *ctx,
3739 byte *ep,
3740 Binary *ohbin,
3741 Eterm ohpb,
3742 byte *ohp,
3743 Uint ohsz)
3744 {
3745 byte *cp = ctx->cptr;
3746 if (cp != ep) {
3747 /* save data in common binary... */
3748 store_in_vec_aux(ctx,
3749 ctx->result_bin,
3750 THE_NON_VALUE,
3751 cp,
3752 ep - cp);
3753 ASSERT(ctx->vlen <= ctx->debug_vlen);
3754 ASSERT(ctx->frag_ix <= ctx->debug_fragments);
3755 ctx->cptr = ep;
3756 }
3757 if (ohbin) {
3758 /* save off-heap binary... */
3759 store_in_vec_aux(ctx,
3760 ohbin,
3761 ohpb,
3762 ohp,
3763 ohsz);
3764 ASSERT(ctx->vlen <= ctx->debug_vlen);
3765 ASSERT(ctx->frag_ix <= ctx->debug_fragments);
3766 }
3767 }
3768
3769 static byte *
begin_hopefull_data(TTBEncodeContext * ctx,byte * ep)3770 begin_hopefull_data(TTBEncodeContext *ctx, byte *ep)
3771 {
3772 store_in_vec(ctx, ep, NULL, THE_NON_VALUE, NULL, 0);
3773 ASSERT(ERTS_NO_HIX == (Uint32) get_int32(ctx->hopefull_ixp));
3774 put_int32(ctx->vlen, ctx->hopefull_ixp);
3775 ctx->hopefull_ixp = ep;
3776 put_int32(ERTS_NO_HIX, ep);
3777 ep += 4;
3778 ctx->cptr = ep;
3779 return ep;
3780 }
3781
3782 static byte *
end_hopefull_data(TTBEncodeContext * ctx,byte * ep,Uint fallback_size)3783 end_hopefull_data(TTBEncodeContext *ctx, byte *ep, Uint fallback_size)
3784 {
3785 Uint sz;
3786 store_in_vec(ctx, ep, NULL, THE_NON_VALUE, NULL, 0);
3787 /*
3788 * Reserve extra room for fallback if needed. The four
3789 * bytes used for hopefull index can be used for
3790 * fallback encoding...
3791 */
3792 sz = ep - ctx->hopefull_ixp;
3793 if (fallback_size > sz) {
3794 ep += fallback_size - sz;
3795 ctx->cptr = ep;
3796 }
3797 return ep;
3798 }
3799
3800 static byte *
hopefull_bit_binary(TTBEncodeContext * ctx,byte ** epp,Binary * pb_val,Eterm pb_term,byte * bytes,byte bitoffs,byte bitsize,Uint sz)3801 hopefull_bit_binary(TTBEncodeContext* ctx, byte **epp, Binary *pb_val, Eterm pb_term,
3802 byte *bytes, byte bitoffs, byte bitsize, Uint sz)
3803 {
3804 byte *octets, *ep = *epp;
3805
3806 ctx->hopefull_flags |= DFLAG_BIT_BINARIES;
3807
3808 /*
3809 * The fallback:
3810 *
3811 * SMALL_TUPLE_EXT - 1 byte
3812 * 2 - 1 byte
3813 * BINARY_EXT - 1 byte
3814 * whole octet size ('sz') - 4 byte
3815 * whole octets - 'sz' bytes
3816 * trailing bits - 1 byte
3817 * SMALL_INTEGER_EXT - 1 byte
3818 * bitsize - 1 byte
3819 */
3820
3821 /* bit binary prelude in one hopefull data element */
3822 ep = begin_hopefull_data(ctx, ep);
3823 *ep++ = BIT_BINARY_EXT;
3824 put_int32((sz+1), ep);
3825 ep += 4;
3826 *ep++ = bitsize;
3827 ep = end_hopefull_data(ctx, ep, 1+1+1+4);
3828
3829 /* All whole octets... */
3830 if (pb_val) {
3831 octets = NULL;
3832 store_in_vec(ctx, ep, pb_val, pb_term, bytes, sz);
3833 }
3834 else {
3835 /* ... will be copied here afterwards */
3836 octets = ep;
3837 ep += sz;
3838 }
3839
3840 /* copy trailing bits into new hopefull data element */
3841 ep = begin_hopefull_data(ctx, ep);
3842 *ep = 0; /* Clear the bit in the byte */
3843
3844 copy_binary_to_buffer(ep, 0, bytes + sz, bitoffs, bitsize);
3845 ep++;
3846
3847 ep = end_hopefull_data(ctx, ep, 1+1+1);
3848 *epp = ep;
3849
3850 return octets;
3851 }
3852
3853 static void
hopefull_export(TTBEncodeContext * ctx,byte ** epp,Export * exp,Uint32 dflags,struct erl_off_heap_header ** off_heap)3854 hopefull_export(TTBEncodeContext* ctx, byte **epp, Export* exp, Uint32 dflags,
3855 struct erl_off_heap_header** off_heap)
3856 {
3857 Uint fallback_sz;
3858 byte *ep = *epp, *mod_start;
3859
3860 /*
3861 * The fallback:
3862 *
3863 * SMALL_TUPLE_EXT - 1 byte
3864 * 2 - 1 byte
3865 * module atom... - M bytes
3866 * function atom... - F bytes
3867 */
3868
3869 ctx->hopefull_flags |= DFLAG_EXPORT_PTR_TAG;
3870
3871 ep = begin_hopefull_data(ctx, ep);
3872
3873 *ep++ = EXPORT_EXT;
3874 mod_start = ep;
3875 ep = enc_atom(NULL, exp->info.mfa.module, ep, dflags);
3876 ep = enc_atom(NULL, exp->info.mfa.function, ep, dflags);
3877 fallback_sz = 2 + (ep - mod_start);
3878 ep = enc_term(NULL, make_small(exp->info.mfa.arity),
3879 ep, dflags, off_heap);
3880
3881 ep = end_hopefull_data(ctx, ep, fallback_sz);
3882
3883 *epp = ep;
3884 }
3885
3886 /** @brief Is it a list of bytes not longer than MAX_STRING_LEN?
3887 * @param lenp out: string length or number of list cells traversed
3888 * @return true/false
3889 */
3890 static
3891 int
is_external_string(Eterm list,Uint * lenp)3892 is_external_string(Eterm list, Uint* lenp)
3893 {
3894 Uint len = 0;
3895
3896 /*
3897 * Calculate the length of the list as long as all characters
3898 * are integers from 0 through 255.
3899 */
3900 while (is_list(list)) {
3901 Eterm* consp = list_val(list);
3902 Eterm hd = CAR(consp);
3903
3904 if (!is_byte(hd) || ++len > MAX_STRING_LEN) {
3905 *lenp = len;
3906 return 0;
3907 }
3908 list = CDR(consp);
3909 }
3910
3911 *lenp = len;
3912 return is_nil(list);
3913 }
3914
3915
3916 struct dec_term_hamt
3917 {
3918 Eterm* objp; /* write result here */
3919 Uint size; /* nr of leafs */
3920 Eterm* leaf_array;
3921 };
3922
3923
3924 /* Decode term from external format into *objp.
3925 ** On failure calls erts_factory_undo() and returns NULL
3926 */
3927 static byte*
dec_term(ErtsDistExternal * edep,ErtsHeapFactory * factory,byte * ep,Eterm * objp,B2TContext * ctx,int ets_decode)3928 dec_term(ErtsDistExternal *edep,
3929 ErtsHeapFactory* factory,
3930 byte* ep,
3931 Eterm* objp,
3932 B2TContext* ctx,
3933 int ets_decode)
3934 {
3935 #define PSTACK_TYPE struct dec_term_hamt
3936 PSTACK_DECLARE(hamt_array, 5);
3937 int n;
3938 ErtsAtomEncoding char_enc;
3939 register Eterm* hp; /* Please don't take the address of hp */
3940 DECLARE_WSTACK(flat_maps); /* for preprocessing of small maps */
3941 Eterm* next;
3942 SWord reds;
3943 #ifdef DEBUG
3944 Eterm* dbg_resultp = ctx ? &ctx->u.dc.res : objp;
3945 #endif
3946
3947 if (ctx) {
3948 reds = ctx->reds;
3949 next = ctx->u.dc.next;
3950 ep = ctx->u.dc.ep;
3951 factory = &ctx->u.dc.factory;
3952
3953 if (ctx->state != B2TDecode) {
3954 int n_limit = reds;
3955
3956 n = ctx->u.dc.remaining_n;
3957 if (ctx->state == B2TDecodeBinary) {
3958 n_limit *= B2T_MEMCPY_FACTOR;
3959 ASSERT(n_limit >= reds);
3960 reds -= n / B2T_MEMCPY_FACTOR;
3961 }
3962 else
3963 reds -= n;
3964
3965 if (n > n_limit) {
3966 ctx->u.dc.remaining_n -= n_limit;
3967 n = n_limit;
3968 reds = 0;
3969 }
3970 else {
3971 ctx->u.dc.remaining_n = 0;
3972 }
3973
3974 switch (ctx->state) {
3975 case B2TDecodeList:
3976 objp = next - 2;
3977 while (n > 0) {
3978 objp[0] = (Eterm) next;
3979 objp[1] = make_list(next);
3980 next = objp;
3981 objp -= 2;
3982 n--;
3983 }
3984 break;
3985
3986 case B2TDecodeTuple:
3987 objp = next - 1;
3988 while (n-- > 0) {
3989 objp[0] = (Eterm) next;
3990 next = objp;
3991 objp--;
3992 }
3993 break;
3994
3995 case B2TDecodeString:
3996 hp = factory->hp;
3997 hp[-1] = make_list(hp); /* overwrite the premature NIL */
3998 while (n-- > 0) {
3999 hp[0] = make_small(*ep++);
4000 hp[1] = make_list(hp+2);
4001 hp += 2;
4002 }
4003 hp[-1] = NIL;
4004 factory->hp = hp;
4005 break;
4006
4007 case B2TDecodeBinary:
4008 sys_memcpy(ctx->u.dc.remaining_bytes, ep, n);
4009 ctx->u.dc.remaining_bytes += n;
4010 ep += n;
4011 break;
4012
4013 default:
4014 ASSERT(!"Unknown state");
4015 }
4016 if (!ctx->u.dc.remaining_n) {
4017 ctx->state = B2TDecode;
4018 }
4019 if (reds <= 0) {
4020 ctx->u.dc.next = next;
4021 ctx->u.dc.ep = ep;
4022 ctx->reds = 0;
4023 return NULL;
4024 }
4025 }
4026 PSTACK_CHANGE_ALLOCATOR(hamt_array, ERTS_ALC_T_SAVED_ESTACK);
4027 WSTACK_CHANGE_ALLOCATOR(flat_maps, ERTS_ALC_T_SAVED_ESTACK);
4028 if (ctx->u.dc.hamt_array.pstart) {
4029 PSTACK_RESTORE(hamt_array, &ctx->u.dc.hamt_array);
4030 }
4031 if (ctx->u.dc.flat_maps.wstart) {
4032 WSTACK_RESTORE(flat_maps, &ctx->u.dc.flat_maps);
4033 }
4034 }
4035 else {
4036 reds = ERTS_SWORD_MAX;
4037 next = objp;
4038 *next = (Eterm) (UWord) NULL;
4039 }
4040 hp = factory->hp;
4041
4042 while (next != NULL) {
4043
4044 objp = next;
4045 next = (Eterm *) *objp;
4046
4047 switch (*ep++) {
4048 case INTEGER_EXT:
4049 {
4050 Sint sn = get_int32(ep);
4051
4052 ep += 4;
4053 #if defined(ARCH_64)
4054 *objp = make_small(sn);
4055 #else
4056 if (IS_SSMALL(sn)) {
4057 *objp = make_small(sn);
4058 } else {
4059 *objp = small_to_big(sn, hp);
4060 hp += BIG_UINT_HEAP_SIZE;
4061 }
4062 #endif
4063 break;
4064 }
4065 case SMALL_INTEGER_EXT:
4066 n = get_int8(ep);
4067 ep++;
4068 *objp = make_small(n);
4069 break;
4070 case SMALL_BIG_EXT:
4071 n = get_int8(ep);
4072 ep++;
4073 goto big_loop;
4074 case LARGE_BIG_EXT:
4075 n = get_int32(ep);
4076 ep += 4;
4077 big_loop:
4078 {
4079 Eterm big;
4080 byte* first;
4081 byte* last;
4082 Uint neg;
4083
4084 neg = get_int8(ep); /* Sign bit */
4085 ep++;
4086
4087 /*
4088 * Strip away leading zeroes to avoid creating illegal bignums.
4089 */
4090 first = ep;
4091 last = ep + n;
4092 ep += n;
4093 do {
4094 --last;
4095 } while (first <= last && *last == 0);
4096
4097 if ((n = last - first + 1) == 0) {
4098 /* Zero width bignum defaults to zero */
4099 big = make_small(0);
4100 } else {
4101 big = bytes_to_big(first, n, neg, hp);
4102 if (is_nil(big))
4103 goto error;
4104 if (is_big(big)) {
4105 hp += big_arity(big) + 1;
4106 }
4107 }
4108 *objp = big;
4109 break;
4110 }
4111 case ATOM_CACHE_REF:
4112 if (edep == 0 || (edep->flags & ERTS_DIST_EXT_ATOM_TRANS_TAB) == 0) {
4113 goto error;
4114 }
4115 n = get_int8(ep);
4116 ep++;
4117 if (n >= edep->attab.size)
4118 goto error;
4119 ASSERT(is_atom(edep->attab.atom[n]));
4120 *objp = edep->attab.atom[n];
4121 break;
4122 case ATOM_EXT:
4123 n = get_int16(ep);
4124 ep += 2;
4125 char_enc = ERTS_ATOM_ENC_LATIN1;
4126 goto dec_term_atom_common;
4127 case SMALL_ATOM_EXT:
4128 n = get_int8(ep);
4129 ep++;
4130 char_enc = ERTS_ATOM_ENC_LATIN1;
4131 goto dec_term_atom_common;
4132 case ATOM_UTF8_EXT:
4133 n = get_int16(ep);
4134 ep += 2;
4135 char_enc = ERTS_ATOM_ENC_UTF8;
4136 goto dec_term_atom_common;
4137 case SMALL_ATOM_UTF8_EXT:
4138 n = get_int8(ep);
4139 ep++;
4140 char_enc = ERTS_ATOM_ENC_UTF8;
4141 dec_term_atom_common:
4142 if (edep && (edep->flags & ERTS_DIST_EXT_BTT_SAFE)) {
4143 if (!erts_atom_get((char*)ep, n, objp, char_enc)) {
4144 goto error;
4145 }
4146 } else {
4147 Eterm atom = erts_atom_put(ep, n, char_enc, 0);
4148 if (is_non_value(atom))
4149 goto error;
4150 *objp = atom;
4151 }
4152 ep += n;
4153 break;
4154 case LARGE_TUPLE_EXT:
4155 n = get_int32(ep);
4156 ep += 4;
4157 goto tuple_loop;
4158 case SMALL_TUPLE_EXT:
4159 n = get_int8(ep);
4160 ep++;
4161 tuple_loop:
4162 *objp = make_tuple(hp);
4163 *hp++ = make_arityval(n);
4164 hp += n;
4165 objp = hp - 1;
4166 if (ctx) {
4167 if (reds < n) {
4168 ASSERT(reds > 0);
4169 ctx->state = B2TDecodeTuple;
4170 ctx->u.dc.remaining_n = n - reds;
4171 n = reds;
4172 }
4173 reds -= n;
4174 }
4175 while (n-- > 0) {
4176 objp[0] = (Eterm) next;
4177 next = objp;
4178 objp--;
4179 }
4180 break;
4181 case NIL_EXT:
4182 *objp = NIL;
4183 break;
4184 case LIST_EXT:
4185 n = get_int32(ep);
4186 ep += 4;
4187 if (n == 0) {
4188 next = objp;
4189 break;
4190 }
4191 *objp = make_list(hp);
4192 hp += 2 * n;
4193 objp = hp - 2;
4194 objp[0] = (Eterm) (objp+1);
4195 objp[1] = (Eterm) next;
4196 next = objp;
4197 objp -= 2;
4198 n--;
4199 if (ctx) {
4200 if (reds < n) {
4201 ASSERT(reds > 0);
4202 ctx->state = B2TDecodeList;
4203 ctx->u.dc.remaining_n = n - reds;
4204 n = reds;
4205 }
4206 reds -= n;
4207 }
4208 while (n > 0) {
4209 objp[0] = (Eterm) next;
4210 objp[1] = make_list(next);
4211 next = objp;
4212 objp -= 2;
4213 n--;
4214 }
4215 break;
4216 case STRING_EXT:
4217 n = get_int16(ep);
4218 ep += 2;
4219 if (n == 0) {
4220 *objp = NIL;
4221 break;
4222 }
4223 *objp = make_list(hp);
4224 if (ctx) {
4225 if (reds < n) {
4226 ctx->state = B2TDecodeString;
4227 ctx->u.dc.remaining_n = n - reds;
4228 n = reds;
4229 }
4230 reds -= n;
4231 }
4232 while (n-- > 0) {
4233 hp[0] = make_small(*ep++);
4234 hp[1] = make_list(hp+2);
4235 hp += 2;
4236 }
4237 hp[-1] = NIL;
4238 break;
4239 case FLOAT_EXT:
4240 {
4241 FloatDef ff;
4242
4243 if (sys_chars_to_double((char*)ep, &ff.fd) != 0) {
4244 goto error;
4245 }
4246 ep += 31;
4247 *objp = make_float(hp);
4248 PUT_DOUBLE(ff, hp);
4249 hp += FLOAT_SIZE_OBJECT;
4250 break;
4251 }
4252 case NEW_FLOAT_EXT:
4253 {
4254 FloatDef ff;
4255 #ifndef NO_FPE_SIGNALS
4256 volatile unsigned long *fpexnp = erts_get_current_fp_exception();
4257 #endif
4258
4259 #if defined(WORDS_BIGENDIAN) || defined(DOUBLE_MIDDLE_ENDIAN)
4260 ff.fw[0] = get_int32(ep);
4261 ep += 4;
4262 ff.fw[1] = get_int32(ep);
4263 ep += 4;
4264 #else
4265 ff.fw[1] = get_int32(ep);
4266 ep += 4;
4267 ff.fw[0] = get_int32(ep);
4268 ep += 4;
4269 #endif
4270 __ERTS_FP_CHECK_INIT(fpexnp);
4271 __ERTS_FP_ERROR_THOROUGH(fpexnp, ff.fd, goto error);
4272 *objp = make_float(hp);
4273 PUT_DOUBLE(ff, hp);
4274 hp += FLOAT_SIZE_OBJECT;
4275 break;
4276 }
4277 case PID_EXT:
4278 case NEW_PID_EXT:
4279 factory->hp = hp;
4280 ep = dec_pid(edep, factory, ep, objp, ep[-1]);
4281 hp = factory->hp;
4282 if (ep == NULL) {
4283 goto error;
4284 }
4285 break;
4286 case PORT_EXT:
4287 case NEW_PORT_EXT:
4288 {
4289 Eterm sysname;
4290 ErlNode *node;
4291 Uint num;
4292 Uint32 cre;
4293 byte tag = ep[-1];
4294
4295 if ((ep = dec_atom(edep, ep, &sysname)) == NULL) {
4296 goto error;
4297 }
4298 if ((num = get_int32(ep)) > ERTS_MAX_PORT_NUMBER) {
4299 goto error;
4300 }
4301 ep += 4;
4302 if (tag == PORT_EXT) {
4303 cre = get_int8(ep);
4304 ep++;
4305 if (!is_tiny_creation(cre)) {
4306 goto error;
4307 }
4308 }
4309 else {
4310 cre = get_int32(ep);
4311 ep += 4;
4312 }
4313 node = dec_get_node(sysname, cre, make_boxed(hp));
4314 if(node == erts_this_node) {
4315 *objp = make_internal_port(num);
4316 }
4317 else {
4318 ExternalThing *etp = (ExternalThing *) hp;
4319 hp += EXTERNAL_THING_HEAD_SIZE + 1;
4320
4321 etp->header = make_external_port_header(1);
4322 etp->next = factory->off_heap->first;
4323 etp->node = node;
4324 etp->data.ui[0] = num;
4325
4326 factory->off_heap->first = (struct erl_off_heap_header*)etp;
4327 *objp = make_external_port(etp);
4328 }
4329
4330 break;
4331 }
4332 case REFERENCE_EXT:
4333 {
4334 Eterm sysname;
4335 ErlNode *node;
4336 int i;
4337 Uint32 cre;
4338 Uint32 *ref_num;
4339 Uint32 r0;
4340 Uint ref_words;
4341
4342 ref_words = 1;
4343
4344 if ((ep = dec_atom(edep, ep, &sysname)) == NULL)
4345 goto error;
4346 if ((r0 = get_int32(ep)) >= MAX_REFERENCE )
4347 goto error;
4348 ep += 4;
4349
4350 cre = get_int8(ep);
4351 ep += 1;
4352 if (!is_tiny_creation(cre)) {
4353 goto error;
4354 }
4355 goto ref_ext_common;
4356
4357 case NEW_REFERENCE_EXT:
4358 ref_words = get_int16(ep);
4359 ep += 2;
4360
4361 if ((ep = dec_atom(edep, ep, &sysname)) == NULL)
4362 goto error;
4363
4364 cre = get_int8(ep);
4365 ep += 1;
4366 if (!is_tiny_creation(cre)) {
4367 goto error;
4368 }
4369 r0 = get_int32(ep);
4370 ep += 4;
4371 if (r0 >= MAX_REFERENCE)
4372 goto error;
4373 goto ref_ext_common;
4374
4375 case NEWER_REFERENCE_EXT:
4376 ref_words = get_int16(ep);
4377 ep += 2;
4378
4379 if ((ep = dec_atom(edep, ep, &sysname)) == NULL)
4380 goto error;
4381
4382 cre = get_int32(ep);
4383 ep += 4;
4384 r0 = get_int32(ep);
4385 ep += 4;
4386
4387 ref_ext_common: {
4388 ErtsORefThing *rtp;
4389
4390 if (ref_words > ERTS_MAX_REF_NUMBERS)
4391 goto error;
4392
4393 node = dec_get_node(sysname, cre, make_boxed(hp));
4394 if(node == erts_this_node) {
4395 if (r0 >= MAX_REFERENCE) {
4396 /*
4397 * Must reject local refs with more than 18 bits
4398 * in first word as magic ref table relies on it.
4399 */
4400 goto error;
4401 }
4402
4403 rtp = (ErtsORefThing *) hp;
4404 ref_num = &rtp->num[0];
4405 if (ref_words != ERTS_REF_NUMBERS) {
4406 int i;
4407 if (ref_words > ERTS_REF_NUMBERS)
4408 goto error; /* Not a ref that we created... */
4409 for (i = ref_words; i < ERTS_REF_NUMBERS; i++)
4410 ref_num[i] = 0;
4411 }
4412
4413 #ifdef ERTS_ORDINARY_REF_MARKER
4414 rtp->marker = ERTS_ORDINARY_REF_MARKER;
4415 #endif
4416 hp += ERTS_REF_THING_SIZE;
4417 rtp->header = ERTS_REF_THING_HEADER;
4418 *objp = make_internal_ref(rtp);
4419 }
4420 else {
4421 ExternalThing *etp = (ExternalThing *) hp;
4422 rtp = NULL;
4423 #if defined(ARCH_64)
4424 hp += EXTERNAL_THING_HEAD_SIZE + ref_words/2 + 1;
4425 #else
4426 hp += EXTERNAL_THING_HEAD_SIZE + ref_words;
4427 #endif
4428
4429 #if defined(ARCH_64)
4430 etp->header = make_external_ref_header(ref_words/2 + 1);
4431 #else
4432 etp->header = make_external_ref_header(ref_words);
4433 #endif
4434 etp->next = factory->off_heap->first;
4435 etp->node = node;
4436
4437 factory->off_heap->first = (struct erl_off_heap_header*)etp;
4438 *objp = make_external_ref(etp);
4439 ref_num = &(etp->data.ui32[0]);
4440 #if defined(ARCH_64)
4441 *(ref_num++) = ref_words /* 32-bit arity */;
4442 #endif
4443 }
4444
4445 ref_num[0] = r0;
4446
4447 for(i = 1; i < ref_words; i++) {
4448 ref_num[i] = get_int32(ep);
4449 ep += 4;
4450 }
4451 #if defined(ARCH_64)
4452 if ((1 + ref_words) % 2)
4453 ref_num[ref_words] = 0;
4454 #endif
4455 if (node == erts_this_node) {
4456 /* Check if it was a magic reference... */
4457 ErtsMagicBinary *mb = erts_magic_ref_lookup_bin(ref_num);
4458 if (mb) {
4459 /*
4460 * Was a magic ref; adjust it...
4461 *
4462 * Refc on binary was increased by lookup above...
4463 */
4464 ASSERT(rtp);
4465 hp = (Eterm *) rtp;
4466 write_magic_ref_thing(hp, factory->off_heap, mb);
4467 OH_OVERHEAD(factory->off_heap,
4468 mb->orig_size / sizeof(Eterm));
4469 hp += ERTS_MAGIC_REF_THING_SIZE;
4470 }
4471 }
4472 break;
4473 }
4474 }
4475 case BINARY_EXT:
4476 {
4477 n = get_int32(ep);
4478 ep += 4;
4479
4480 if ((unsigned)n <= ERL_ONHEAP_BIN_LIMIT) {
4481 ErlHeapBin* hb = (ErlHeapBin *) hp;
4482
4483 hb->thing_word = header_heap_bin(n);
4484 hb->size = n;
4485 hp += heap_bin_size(n);
4486 sys_memcpy(hb->data, ep, n);
4487 *objp = make_binary(hb);
4488 } else if (edep && edep->data && edep->data->binp &&
4489 n > (edep->data->binp->orig_size / 4)) {
4490 /* If we decode a refc binary from a distribution data
4491 entry we know that it is a refc binary to begin with
4492 so we just increment it and use the reference. This
4493 means that the entire distribution data entry will
4494 remain until this binary is de-allocated so we only
4495 do it if a substantial part (> 25%) of the data
4496 is a binary. */
4497 ProcBin* pb = (ProcBin *) hp;
4498 Binary* bptr = edep->data->binp;
4499 erts_refc_inc(&bptr->intern.refc, 1);
4500 pb->thing_word = HEADER_PROC_BIN;
4501 pb->size = n;
4502 pb->next = factory->off_heap->first;
4503 factory->off_heap->first = (struct erl_off_heap_header*)pb;
4504 pb->val = bptr;
4505 pb->bytes = (byte*) ep;
4506 ERTS_ASSERT((byte*)(bptr->orig_bytes) < ep &&
4507 ep+n <= (byte*)(bptr->orig_bytes+bptr->orig_size));
4508 pb->flags = 0;
4509 OH_OVERHEAD(factory->off_heap, pb->size / sizeof(Eterm));
4510 hp += PROC_BIN_SIZE;
4511 *objp = make_binary(pb);
4512 } else {
4513 Binary* dbin = erts_bin_nrml_alloc(n);
4514
4515 *objp = erts_build_proc_bin(factory->off_heap, hp, dbin);
4516 hp += PROC_BIN_SIZE;
4517 if (ctx) {
4518 int n_limit = reds * B2T_MEMCPY_FACTOR;
4519 if (n > n_limit) {
4520 ctx->state = B2TDecodeBinary;
4521 ctx->u.dc.remaining_n = n - n_limit;
4522 ctx->u.dc.remaining_bytes = dbin->orig_bytes + n_limit;
4523 n = n_limit;
4524 reds = 0;
4525 }
4526 else
4527 reds -= n / B2T_MEMCPY_FACTOR;
4528 }
4529 sys_memcpy(dbin->orig_bytes, ep, n);
4530 }
4531 ep += n;
4532 break;
4533 }
4534 case BIT_BINARY_EXT:
4535 {
4536 Eterm bin;
4537 ErlSubBin* sb;
4538 Uint bitsize;
4539
4540 n = get_int32(ep);
4541 bitsize = ep[4];
4542 if (((bitsize==0) != (n==0)) || bitsize > 8)
4543 goto error;
4544 ep += 5;
4545 if ((unsigned)n <= ERL_ONHEAP_BIN_LIMIT) {
4546 ErlHeapBin* hb = (ErlHeapBin *) hp;
4547
4548 hb->thing_word = header_heap_bin(n);
4549 hb->size = n;
4550 sys_memcpy(hb->data, ep, n);
4551 bin = make_binary(hb);
4552 hp += heap_bin_size(n);
4553 ep += n;
4554 } else {
4555 Binary* dbin = erts_bin_nrml_alloc(n);
4556 Uint n_copy = n;
4557
4558 bin = erts_build_proc_bin(factory->off_heap, hp, dbin);
4559 hp += PROC_BIN_SIZE;
4560 if (ctx) {
4561 int n_limit = reds * B2T_MEMCPY_FACTOR;
4562 if (n > n_limit) {
4563 ctx->state = B2TDecodeBinary;
4564 ctx->u.dc.remaining_n = n - n_limit;
4565 ctx->u.dc.remaining_bytes = dbin->orig_bytes + n_limit;
4566 n_copy = n_limit;
4567 reds = 0;
4568 }
4569 else {
4570 reds -= n / B2T_MEMCPY_FACTOR;
4571 }
4572 }
4573 sys_memcpy(dbin->orig_bytes, ep, n_copy);
4574 ep += n_copy;
4575 }
4576
4577 if (bitsize == 8 || n == 0) {
4578 *objp = bin;
4579 } else {
4580 sb = (ErlSubBin *)hp;
4581 sb->thing_word = HEADER_SUB_BIN;
4582 sb->orig = bin;
4583 sb->size = n - 1;
4584 sb->bitsize = bitsize;
4585 sb->bitoffs = 0;
4586 sb->offs = 0;
4587 sb->is_writable = 0;
4588 *objp = make_binary(sb);
4589 hp += ERL_SUB_BIN_SIZE;
4590 }
4591 break;
4592 }
4593 case EXPORT_EXT:
4594 {
4595 Eterm mod;
4596 Eterm name;
4597 Eterm temp;
4598 Sint arity;
4599
4600 if ((ep = dec_atom(edep, ep, &mod)) == NULL) {
4601 goto error;
4602 }
4603 if ((ep = dec_atom(edep, ep, &name)) == NULL) {
4604 goto error;
4605 }
4606 factory->hp = hp;
4607 ep = dec_term(edep, factory, ep, &temp, NULL, 0);
4608 hp = factory->hp;
4609 if (ep == NULL) {
4610 goto error;
4611 }
4612 if (!is_small(temp)) {
4613 goto error;
4614 }
4615 arity = signed_val(temp);
4616 if (arity < 0) {
4617 goto error;
4618 }
4619 if (edep && (edep->flags & ERTS_DIST_EXT_BTT_SAFE)) {
4620 if (!erts_active_export_entry(mod, name, arity))
4621 goto error;
4622 }
4623 *objp = make_export(hp);
4624 *hp++ = HEADER_EXPORT;
4625 *hp++ = (Eterm) erts_export_get_or_make_stub(mod, name, arity);
4626 break;
4627 }
4628 break;
4629 case MAP_EXT:
4630 {
4631 Uint32 size,n;
4632 Eterm *kptr,*vptr;
4633 Eterm keys;
4634
4635 size = get_int32(ep); ep += 4;
4636
4637 if (size <= MAP_SMALL_MAP_LIMIT) {
4638 flatmap_t *mp;
4639
4640 keys = make_tuple(hp);
4641 *hp++ = make_arityval(size);
4642 hp += size;
4643 kptr = hp - 1;
4644
4645 mp = (flatmap_t*)hp;
4646 hp += MAP_HEADER_FLATMAP_SZ;
4647 hp += size;
4648 vptr = hp - 1;
4649
4650 /* kptr, last word for keys
4651 * vptr, last word for values
4652 */
4653
4654 WSTACK_PUSH(flat_maps, (UWord)mp);
4655 mp->thing_word = MAP_HEADER_FLATMAP;
4656 mp->size = size;
4657 mp->keys = keys;
4658 *objp = make_flatmap(mp);
4659
4660 for (n = size; n; n--) {
4661 *vptr = (Eterm) next;
4662 *kptr = (Eterm) vptr;
4663 next = kptr;
4664 vptr--;
4665 kptr--;
4666 }
4667 }
4668 else { /* Make hamt */
4669 struct dec_term_hamt* hamt = PSTACK_PUSH(hamt_array);
4670
4671 hamt->objp = objp;
4672 hamt->size = size;
4673 hamt->leaf_array = hp;
4674
4675 for (n = size; n; n--) {
4676 CDR(hp) = (Eterm) next;
4677 CAR(hp) = (Eterm) &CDR(hp);
4678 next = &CAR(hp);
4679 hp += 2;
4680 }
4681 }
4682 }
4683 break;
4684 case NEW_FUN_EXT:
4685 {
4686 ErlFunThing* funp = (ErlFunThing *) hp;
4687 Uint arity;
4688 Eterm module;
4689 byte* uniq;
4690 int index;
4691 Sint old_uniq;
4692 Sint old_index;
4693 unsigned num_free;
4694 int i;
4695 Eterm temp;
4696
4697 ep += 4; /* Skip total size in bytes */
4698 arity = *ep++;
4699 uniq = ep;
4700 ep += 16;
4701 index = get_int32(ep);
4702 ep += 4;
4703 num_free = get_int32(ep);
4704 ep += 4;
4705 hp += ERL_FUN_SIZE;
4706 hp += num_free;
4707 funp->thing_word = HEADER_FUN;
4708 funp->num_free = num_free;
4709 *objp = make_fun(funp);
4710
4711 /* Module */
4712 if ((ep = dec_atom(edep, ep, &module)) == NULL) {
4713 goto error;
4714 }
4715 factory->hp = hp;
4716 /* Index */
4717 if ((ep = dec_term(edep, factory, ep, &temp, NULL, 0)) == NULL) {
4718 goto error;
4719 }
4720 if (!is_small(temp)) {
4721 goto error;
4722 }
4723 old_index = unsigned_val(temp);
4724
4725 /* Uniq */
4726 if ((ep = dec_term(edep, factory, ep, &temp, NULL, 0)) == NULL) {
4727 goto error;
4728 }
4729 if (!is_small(temp)) {
4730 goto error;
4731 }
4732 old_uniq = unsigned_val(temp);
4733
4734 /*
4735 * It is safe to link the fun into the fun list only when
4736 * no more validity tests can fail.
4737 */
4738 funp->next = factory->off_heap->first;
4739 factory->off_heap->first = (struct erl_off_heap_header*)funp;
4740
4741 funp->fe = erts_put_fun_entry2(module, old_uniq, old_index,
4742 uniq, index, arity);
4743 funp->arity = arity;
4744 #ifdef HIPE
4745 if (funp->fe->native_address == NULL) {
4746 hipe_set_closure_stub(funp->fe);
4747 }
4748 #endif
4749 hp = factory->hp;
4750
4751 /* Environment */
4752 for (i = num_free-1; i >= 0; i--) {
4753 funp->env[i] = (Eterm) next;
4754 next = funp->env + i;
4755 }
4756 /* Creator */
4757 funp->creator = (Eterm) next;
4758 next = &(funp->creator);
4759 break;
4760 }
4761 case ATOM_INTERNAL_REF2:
4762 n = get_int16(ep);
4763 ep += 2;
4764 /* If this is an ets_decode we know that
4765 the atom is valid, so we can skip the
4766 validation check */
4767 if (!ets_decode && n >= atom_table_size()) {
4768 goto error;
4769 }
4770 *objp = make_atom(n);
4771 break;
4772 case ATOM_INTERNAL_REF3:
4773 n = get_int24(ep);
4774 ep += 3;
4775 /* If this is an ets_decode we know that
4776 the atom is valid, so we can skip the
4777 validation check */
4778 if (!ets_decode && n >= atom_table_size()) {
4779 goto error;
4780 }
4781 *objp = make_atom(n);
4782 break;
4783
4784 case BINARY_INTERNAL_REF:
4785 {
4786 ProcBin* pb = (ProcBin*) hp;
4787 sys_memcpy(pb, ep, sizeof(ProcBin));
4788 ep += sizeof(ProcBin);
4789
4790 erts_refc_inc(&pb->val->intern.refc, 1);
4791 hp += PROC_BIN_SIZE;
4792 pb->next = factory->off_heap->first;
4793 factory->off_heap->first = (struct erl_off_heap_header*)pb;
4794 OH_OVERHEAD(factory->off_heap, pb->size / sizeof(Eterm));
4795 pb->flags = 0;
4796 *objp = make_binary(pb);
4797 break;
4798 }
4799 case BIT_BINARY_INTERNAL_REF:
4800 {
4801 Sint bitoffs = *ep++;
4802 Sint bitsize = *ep++;
4803 ProcBin* pb = (ProcBin*) hp;
4804 ErlSubBin* sub;
4805 sys_memcpy(pb, ep, sizeof(ProcBin));
4806 ep += sizeof(ProcBin);
4807
4808 erts_refc_inc(&pb->val->intern.refc, 1);
4809 hp += PROC_BIN_SIZE;
4810 pb->next = factory->off_heap->first;
4811 factory->off_heap->first = (struct erl_off_heap_header*)pb;
4812 OH_OVERHEAD(factory->off_heap, pb->size / sizeof(Eterm));
4813 pb->flags = 0;
4814
4815 sub = (ErlSubBin*)hp;
4816 sub->thing_word = HEADER_SUB_BIN;
4817 sub->size = pb->size - (bitoffs + bitsize + 7)/8;
4818 sub->offs = 0;
4819 sub->bitoffs = bitoffs;
4820 sub->bitsize = bitsize;
4821 sub->is_writable = 0;
4822 sub->orig = make_binary(pb);
4823
4824 hp += ERL_SUB_BIN_SIZE;
4825 *objp = make_binary(sub);
4826 break;
4827 }
4828
4829 default:
4830 goto error;
4831 }
4832
4833 if (--reds <= 0) {
4834 if (ctx) {
4835 if (next || ctx->state != B2TDecode) {
4836 ctx->u.dc.ep = ep;
4837 ctx->u.dc.next = next;
4838 ctx->u.dc.factory.hp = hp;
4839 if (!WSTACK_ISEMPTY(flat_maps)) {
4840 WSTACK_SAVE(flat_maps, &ctx->u.dc.flat_maps);
4841 }
4842 if (!PSTACK_IS_EMPTY(hamt_array)) {
4843 PSTACK_SAVE(hamt_array, &ctx->u.dc.hamt_array);
4844 }
4845 ctx->reds = 0;
4846 return NULL;
4847 }
4848 }
4849 else {
4850 reds = ERTS_SWORD_MAX;
4851 }
4852 }
4853 }
4854
4855 ASSERT(hp <= factory->hp_end
4856 || (factory->mode == FACTORY_CLOSED && is_immed(*dbg_resultp)));
4857 factory->hp = hp;
4858 /*
4859 * From here on factory may produce (more) heap fragments
4860 */
4861
4862 if (!PSTACK_IS_EMPTY(hamt_array)) {
4863 do {
4864 struct dec_term_hamt* hamt = PSTACK_TOP(hamt_array);
4865
4866 *hamt->objp = erts_hashmap_from_array(factory,
4867 hamt->leaf_array,
4868 hamt->size,
4869 1);
4870 if (is_non_value(*hamt->objp))
4871 goto error_hamt;
4872
4873 (void) PSTACK_POP(hamt_array);
4874 } while (!PSTACK_IS_EMPTY(hamt_array));
4875 }
4876
4877 /* Iterate through all the (flat)maps and check for validity and sort keys
4878 * - done here for when we know it is complete.
4879 */
4880
4881 while(!WSTACK_ISEMPTY(flat_maps)) {
4882 next = (Eterm *)WSTACK_POP(flat_maps);
4883 if (!erts_validate_and_sort_flatmap((flatmap_t*)next))
4884 goto error;
4885 }
4886
4887 /* Now that no more errors can occur, the stacks can be destroyed safely. */
4888 PSTACK_DESTROY(hamt_array);
4889 WSTACK_DESTROY(flat_maps);
4890
4891 ASSERT((Eterm*)*dbg_resultp != NULL);
4892
4893 if (ctx) {
4894 ctx->state = B2TDone;
4895 ctx->reds = reds;
4896 ctx->u.dc.ep = ep;
4897 }
4898
4899 return ep;
4900
4901 error:
4902 /* UNDO:
4903 * Must unlink all off-heap objects that may have been
4904 * linked into the process.
4905 */
4906 if (factory->mode != FACTORY_CLOSED) {
4907 if (factory->hp < hp) { /* Sometimes we used hp and sometimes factory->hp */
4908 factory->hp = hp; /* the largest must be the freshest */
4909 }
4910 }
4911 else ASSERT(!factory->hp || factory->hp == hp);
4912
4913 error_hamt:
4914 erts_factory_undo(factory);
4915 PSTACK_DESTROY(hamt_array);
4916 if (ctx) {
4917 ctx->state = B2TDecodeFail;
4918 ctx->reds = reds;
4919 }
4920 WSTACK_DESTROY(flat_maps);
4921
4922 return NULL;
4923 }
4924
4925 /* returns the number of bytes needed to encode an object
4926 to a sequence of bytes
4927 N.B. That this must agree with to_external2() above!!!
4928 (except for cached atoms) */
encode_size_struct2(ErtsAtomCacheMap * acmp,Eterm obj,Uint64 dflags)4929 static Uint encode_size_struct2(ErtsAtomCacheMap *acmp,
4930 Eterm obj,
4931 Uint64 dflags) {
4932 Uint size = 0;
4933 ErtsExtSzRes res = encode_size_struct_int(NULL, acmp, obj,
4934 dflags, NULL,
4935 &size);
4936 /*
4937 * encode_size_struct2() only allowed when
4938 * we know the result will always be OK!
4939 */
4940 ASSERT(res == ERTS_EXT_SZ_OK); (void) res;
4941 return (Uint) size;
4942 }
4943
4944 static ErtsExtSzRes
encode_size_struct_int(TTBSizeContext * ctx,ErtsAtomCacheMap * acmp,Eterm obj,Uint64 dflags,Sint * reds,Uint * res)4945 encode_size_struct_int(TTBSizeContext* ctx, ErtsAtomCacheMap *acmp, Eterm obj,
4946 Uint64 dflags, Sint *reds, Uint *res)
4947 {
4948 DECLARE_WSTACK(s);
4949 Uint m, i, arity;
4950 Uint result = *res;
4951 Sint r = 0;
4952 int vlen = -1;
4953
4954 if (ctx) {
4955 WSTACK_CHANGE_ALLOCATOR(s, ERTS_ALC_T_SAVED_ESTACK);
4956 r = *reds;
4957
4958 vlen = ctx->vlen;
4959
4960 if (!ctx->wstack.wstart)
4961 ctx->last_result = result;
4962 else { /* restore saved stack */
4963 WSTACK_RESTORE(s, &ctx->wstack);
4964 result = ctx->result;
4965 obj = ctx->obj;
4966 }
4967 }
4968
4969 #define LIST_TAIL_OP ((0 << _TAG_PRIMARY_SIZE) | TAG_PRIMARY_HEADER)
4970 #define TERM_ARRAY_OP(N) (((N) << _TAG_PRIMARY_SIZE) | TAG_PRIMARY_HEADER)
4971 #define TERM_ARRAY_OP_DEC(OP) ((OP) - (1 << _TAG_PRIMARY_SIZE))
4972
4973
4974 for (;;) {
4975 ASSERT(!is_header(obj));
4976
4977 if (ctx && --r <= 0) {
4978 *reds = 0;
4979 ctx->obj = obj;
4980 ctx->result = result;
4981 ctx->vlen = vlen;
4982 WSTACK_SAVE(s, &ctx->wstack);
4983 return ERTS_EXT_SZ_YIELD;
4984 }
4985 switch (tag_val_def(obj)) {
4986 case NIL_DEF:
4987 result++;
4988 break;
4989 case ATOM_DEF:
4990 if (dflags & DFLAG_ETS_COMPRESSED) {
4991 if (atom_val(obj) >= (1<<16)) {
4992 result += 1 + 3;
4993 }
4994 else {
4995 result += 1 + 2;
4996 }
4997 }
4998 else {
4999 Atom *a = atom_tab(atom_val(obj));
5000 int alen;
5001 if ((dflags & DFLAG_UTF8_ATOMS) || a->latin1_chars < 0) {
5002 alen = a->len;
5003 result += 1 + 1 + alen;
5004 if (alen > 255) {
5005 result++; /* ATOM_UTF8_EXT (not small) */
5006 }
5007 }
5008 else {
5009 alen = a->latin1_chars;
5010 result += 1 + 1 + alen;
5011 if (alen > 255 || !(dflags & DFLAG_SMALL_ATOM_TAGS))
5012 result++; /* ATOM_EXT (not small) */
5013 }
5014 insert_acache_map(acmp, obj, dflags);
5015 }
5016 break;
5017 case SMALL_DEF:
5018 {
5019 Sint val = signed_val(obj);
5020
5021 if ((Uint)val < 256)
5022 result += 1 + 1; /* SMALL_INTEGER_EXT */
5023 else if (sizeof(Sint) == 4 || IS_SSMALL32(val))
5024 result += 1 + 4; /* INTEGER_EXT */
5025 else {
5026 DeclareTmpHeapNoproc(tmp_big,2);
5027 UseTmpHeapNoproc(2);
5028 i = big_bytes(small_to_big(val, tmp_big));
5029 result += 1 + 1 + 1 + i; /* SMALL_BIG_EXT */
5030 UnUseTmpHeapNoproc(2);
5031 }
5032 }
5033 break;
5034 case BIG_DEF:
5035 i = big_bytes(obj);
5036 if (sizeof(Sint)==4 && i <= 4 && (big_digit(obj,0)-big_sign(obj)) < (1<<31))
5037 result += 1 + 4; /* INTEGER_EXT */
5038 else if (i < 256)
5039 result += 1 + 1 + 1 + i; /* tag,size,sign,digits */
5040 else
5041 result += 1 + 4 + 1 + i; /* tag,size,sign,digits */
5042 break;
5043 case EXTERNAL_PID_DEF:
5044 case PID_DEF:
5045 result += (1 + encode_size_struct2(acmp, pid_node_name(obj), dflags) +
5046 4 + 4 + 4);
5047 break;
5048 case EXTERNAL_REF_DEF:
5049 case REF_DEF:
5050 ASSERT(dflags & DFLAG_EXTENDED_REFERENCES);
5051 i = ref_no_numbers(obj);
5052 result += (1 + 2 + encode_size_struct2(acmp, ref_node_name(obj), dflags) +
5053 4 + 4*i);
5054 break;
5055 case EXTERNAL_PORT_DEF:
5056 case PORT_DEF:
5057 result += (1 + encode_size_struct2(acmp, port_node_name(obj), dflags) +
5058 4 + 4);
5059 break;
5060 case LIST_DEF: {
5061 int is_str = is_external_string(obj, &m);
5062 r -= m/2;
5063 if (is_str) {
5064 result += m + 2 + 1;
5065 } else {
5066 result += 5;
5067 WSTACK_PUSH2(s, (UWord)CDR(list_val(obj)), (UWord)LIST_TAIL_OP);
5068 obj = CAR(list_val(obj));
5069 continue; /* big loop */
5070 }
5071 break;
5072 }
5073 case TUPLE_DEF:
5074 {
5075 Eterm* ptr = tuple_val(obj);
5076 arity = arityval(*ptr);
5077 if (arity <= 0xff) {
5078 result += 1 + 1;
5079 } else {
5080 result += 1 + 4;
5081 }
5082 if (arity > 1) {
5083 WSTACK_PUSH2(s, (UWord) (ptr + 2),
5084 (UWord) TERM_ARRAY_OP(arity-1));
5085 }
5086 else if (arity == 0) {
5087 break;
5088 }
5089 obj = ptr[1];
5090 continue; /* big loop */
5091 }
5092 case MAP_DEF:
5093 if (is_flatmap(obj)) {
5094 flatmap_t *mp = (flatmap_t*)flatmap_val(obj);
5095 Uint size = flatmap_get_size(mp);
5096
5097 result += 1 + 4; /* tag + 4 bytes size */
5098
5099 if (size) {
5100 WSTACK_PUSH4(s, (UWord) flatmap_get_values(mp),
5101 (UWord) TERM_ARRAY_OP(size),
5102 (UWord) flatmap_get_keys(mp),
5103 (UWord) TERM_ARRAY_OP(size));
5104 }
5105 } else {
5106 Eterm *ptr;
5107 Eterm hdr;
5108 Uint node_sz;
5109 ptr = boxed_val(obj);
5110 hdr = *ptr;
5111 ASSERT(is_header(hdr));
5112 switch(hdr & _HEADER_MAP_SUBTAG_MASK) {
5113 case HAMT_SUBTAG_HEAD_ARRAY:
5114 ptr++;
5115 node_sz = 16;
5116 result += 1 + 4; /* tag + 4 bytes size */
5117 break;
5118 case HAMT_SUBTAG_HEAD_BITMAP:
5119 ptr++;
5120 result += 1 + 4; /* tag + 4 bytes size */
5121 /*fall through*/
5122 case HAMT_SUBTAG_NODE_BITMAP:
5123 node_sz = hashmap_bitcount(MAP_HEADER_VAL(hdr));
5124 ASSERT(node_sz < 17);
5125 break;
5126 default:
5127 erts_exit(ERTS_ERROR_EXIT, "bad header\r\n");
5128 }
5129
5130 ptr++;
5131 WSTACK_RESERVE(s, node_sz*2);
5132 while(node_sz--) {
5133 if (is_list(*ptr)) {
5134 WSTACK_FAST_PUSH(s, CAR(list_val(*ptr)));
5135 WSTACK_FAST_PUSH(s, CDR(list_val(*ptr)));
5136 } else {
5137 WSTACK_FAST_PUSH(s, *ptr);
5138 }
5139 ptr++;
5140 }
5141 }
5142 break;
5143 case FLOAT_DEF:
5144 if (dflags & DFLAG_NEW_FLOATS) {
5145 result += 9;
5146 } else {
5147 result += 32; /* Yes, including the tag */
5148 }
5149 break;
5150 case BINARY_DEF: {
5151 ProcBin* pb = (ProcBin*) binary_val(obj);
5152 Uint bin_size = pb->size;
5153 byte bitoffs = 0;
5154 byte bitsize = 0;
5155 if (dflags & DFLAG_ETS_COMPRESSED) {
5156 ProcBin* pb = (ProcBin*) binary_val(obj);
5157 Uint sub_extra = 0;
5158 if (pb->thing_word == HEADER_SUB_BIN) {
5159 ErlSubBin* sub = (ErlSubBin*) pb;
5160 bitoffs = sub->bitoffs;
5161 bitsize = sub->bitsize;
5162 pb = (ProcBin*) binary_val(sub->orig);
5163 sub_extra = 2; /* bitoffs and bitsize */
5164 bin_size += (bitoffs + bitsize + 7) / 8;
5165 }
5166 if (pb->thing_word == HEADER_PROC_BIN
5167 && heap_bin_size(bin_size) > PROC_BIN_SIZE) {
5168
5169 result += 1 + sub_extra + sizeof(ProcBin);
5170 break;
5171 }
5172 }
5173 else {
5174 #ifdef ARCH_64
5175 if (bin_size >= (Uint) 0xffffffff) {
5176 if (pb->thing_word == HEADER_SUB_BIN) {
5177 ErlSubBin* sub = (ErlSubBin*) pb;
5178 bin_size += (sub->bitoffs + sub->bitsize+ 7) / 8;
5179 }
5180 if (bin_size > (Uint) 0xffffffff) {
5181 WSTACK_DESTROY(s);
5182 return ERTS_EXT_SZ_SYSTEM_LIMIT;
5183 }
5184 }
5185 #endif
5186 if (pb->thing_word == HEADER_SUB_BIN) {
5187 ErlSubBin* sub = (ErlSubBin*) pb;
5188 bitoffs = sub->bitoffs;
5189 bitsize = sub->bitsize;
5190 pb = (ProcBin*) binary_val(sub->orig);
5191 }
5192 if (vlen >= 0) {
5193 Uint csz;
5194 if (pb->thing_word == HEADER_PROC_BIN
5195 && bitoffs == 0
5196 && bin_size > ERL_ONHEAP_BIN_LIMIT) {
5197 Uint trailing_result;
5198 if (bitsize == 0) {
5199 result += (1 /* BIT_BINARY_EXT */
5200 + 4 /* size */);
5201 trailing_result = 0;
5202 }
5203 else if (dflags & DFLAG_BIT_BINARIES) {
5204 result += (1 /* BIT_BINARY_EXT */
5205 + 4 /* size */
5206 + 1 /* trailing bitsize */);
5207 trailing_result = 1 /* trailing bits */;
5208 }
5209 else {
5210 /* sigh... */
5211 result += (1 /* SMALL_TUPLE_EXT */
5212 + 1 /* 2 tuple size */
5213 + 1 /* BINARY_EXT */
5214 + 4 /* binary size */);
5215 trailing_result = (1 /* SMALL_INTEGER_EXT */
5216 + 1 /* bitsize */);
5217 }
5218 csz = result - ctx->last_result;
5219 ctx->last_result = result;
5220 result += trailing_result;
5221 vlen += 2; /* data leading up to binary and binary */
5222
5223 /* potentially multiple elements leading up to binary */
5224 vlen += csz/MAX_SYSIOVEC_IOVLEN;
5225 /* potentially multiple elements for binary */
5226 vlen += bin_size/MAX_SYSIOVEC_IOVLEN;
5227 ctx->extra_size += bin_size;
5228
5229 if (dflags & DFLAG_PENDING_CONNECT) {
5230 ASSERT(dflags & DFLAG_BIT_BINARIES);
5231 vlen += 2; /* for hopefull prolog and epilog */
5232 result += (4 /* for hopefull prolog (see below) */
5233 + 4); /* for hopefull epilog (see below) */
5234 ctx->last_result = result;
5235 }
5236 break;
5237 }
5238 }
5239 }
5240
5241 if (bitsize == 0) {
5242 result += (1 /* BIT_BINARY_EXT */
5243 + 4 /* size */
5244 + bin_size);
5245 }
5246 else if (dflags & DFLAG_PENDING_CONNECT) {
5247 /* This is the odd case when we have an un-aligned bit-string
5248 during a pending connect. */
5249 Uint csz = result - ctx->last_result;
5250 ASSERT(dflags & DFLAG_BIT_BINARIES);
5251 /* potentially multiple elements leading up to binary */
5252 vlen += (csz + MAX_SYSIOVEC_IOVLEN - 1)/MAX_SYSIOVEC_IOVLEN;
5253
5254 vlen++; /* hopefull prolog */
5255 /*
5256 * Size for hopefull prolog is max of
5257 * - fallback: 1 + 1 + 1 + 4
5258 * - hopfull index + bit binary prolog: 4 + 1 + 4 + 1
5259 */
5260 result += 4 + 1 + 4 + 1;
5261 /* potentially multiple elements for binary */
5262 vlen += bin_size/MAX_SYSIOVEC_IOVLEN + 1;
5263 result += bin_size;
5264 vlen++; /* hopefull epiolog */
5265 /*
5266 * Size for hopefull epiolog is max of
5267 * - fallback: 1 + 1 + 1
5268 * - hopfull index + bit binary epilog: 4 + 1
5269 */
5270 result += 4 + 1;
5271 ctx->last_result = result;
5272 }
5273 else if (dflags & DFLAG_BIT_BINARIES) {
5274 result += 1 + 4 + 1 + bin_size + 1;
5275 }
5276 else {
5277 /* Sigh... */
5278 result += 1 + 1 + 1 + 4 + bin_size + 1 + 1 + 1;
5279 }
5280 break;
5281 }
5282 case FUN_DEF:
5283 {
5284 ErlFunThing* funp = (ErlFunThing *) fun_val(obj);
5285
5286 ASSERT(dflags & DFLAG_NEW_FUN_TAGS);
5287 result += 20+1+1+4; /* New ID + Tag */
5288 result += 4; /* Length field (number of free variables */
5289 result += encode_size_struct2(acmp, funp->creator, dflags);
5290 result += encode_size_struct2(acmp, funp->fe->module, dflags);
5291 result += 2 * (1+4); /* Index, Uniq */
5292 if (funp->num_free > 1) {
5293 WSTACK_PUSH2(s, (UWord) (funp->env + 1),
5294 (UWord) TERM_ARRAY_OP(funp->num_free-1));
5295 }
5296 if (funp->num_free != 0) {
5297 obj = funp->env[0];
5298 continue; /* big loop */
5299 }
5300 break;
5301 }
5302
5303 case EXPORT_DEF:
5304 {
5305 Export* ep = *((Export **) (export_val(obj) + 1));
5306 Uint tmp_result = result;
5307 result += 1;
5308 result += encode_size_struct2(acmp, ep->info.mfa.module, dflags);
5309 result += encode_size_struct2(acmp, ep->info.mfa.function, dflags);
5310 result += encode_size_struct2(acmp, make_small(ep->info.mfa.arity), dflags);
5311 if (dflags & DFLAG_PENDING_CONNECT) {
5312 Uint csz;
5313 /*
5314 * Fallback is 1 + 1 + Module size + Function size, that is,
5315 * the hopefull index + hopefull encoding is larger...
5316 */
5317 ASSERT(dflags & DFLAG_EXPORT_PTR_TAG);
5318 csz = tmp_result - ctx->last_result;
5319 /* potentially multiple elements leading up to hopefull entry */
5320 vlen += (csz/MAX_SYSIOVEC_IOVLEN + 1
5321 + 1); /* hopefull entry */
5322 result += 4; /* hopefull index */
5323 ctx->last_result = result;
5324 }
5325 }
5326 break;
5327
5328 default:
5329 erts_exit(ERTS_ERROR_EXIT,"Internal data structure error (in encode_size_struct_int) %x\n",
5330 obj);
5331 }
5332
5333 if (WSTACK_ISEMPTY(s)) {
5334 break;
5335 }
5336 obj = (Eterm) WSTACK_POP(s);
5337
5338 if (is_header(obj)) {
5339 switch (obj) {
5340 case LIST_TAIL_OP:
5341 obj = (Eterm) WSTACK_POP(s);
5342 if (is_list(obj)) {
5343 Eterm* cons = list_val(obj);
5344
5345 WSTACK_PUSH2(s, (UWord)CDR(cons), (UWord)LIST_TAIL_OP);
5346 obj = CAR(cons);
5347 }
5348 break;
5349
5350 case TERM_ARRAY_OP(1):
5351 obj = *(Eterm*)WSTACK_POP(s);
5352 break;
5353 default: { /* TERM_ARRAY_OP(N) when N > 1 */
5354 Eterm* ptr = (Eterm*) WSTACK_POP(s);
5355 WSTACK_PUSH2(s, (UWord) (ptr+1),
5356 (UWord) TERM_ARRAY_OP_DEC(obj));
5357 obj = *ptr;
5358 }
5359 }
5360 }
5361 }
5362
5363 WSTACK_DESTROY(s);
5364 if (ctx) {
5365 ASSERT(ctx->wstack.wstart == NULL);
5366 *reds = r < 0 ? 0 : r;
5367
5368 if (vlen >= 0) {
5369 Uint csz;
5370 csz = result - ctx->last_result;
5371 if (csz)
5372 vlen += csz/MAX_SYSIOVEC_IOVLEN + 1;
5373 ctx->vlen = vlen;
5374 }
5375 }
5376 *res = result;
5377 return ERTS_EXT_SZ_OK;
5378 }
5379
5380
5381
5382 static Sint
decoded_size(byte * ep,byte * endp,int internal_tags,B2TContext * ctx)5383 decoded_size(byte *ep, byte* endp, int internal_tags, B2TContext* ctx)
5384 {
5385 Sint heap_size;
5386 int terms;
5387 int atom_extra_skip;
5388 Uint n;
5389 SWord reds;
5390
5391 if (ctx) {
5392 reds = ctx->reds;
5393 if (ctx->u.sc.ep) {
5394 heap_size = ctx->u.sc.heap_size;
5395 terms = ctx->u.sc.terms;
5396 ep = ctx->u.sc.ep;
5397 atom_extra_skip = ctx->u.sc.atom_extra_skip;
5398 goto init_done;
5399 }
5400 }
5401 else
5402 ERTS_UNDEF(reds, 0);
5403
5404 heap_size = 0;
5405 terms = 1;
5406 atom_extra_skip = 0;
5407 init_done:
5408
5409 #define SKIP(sz) \
5410 do { \
5411 if ((sz) <= endp-ep) { \
5412 ep += (sz); \
5413 } else { goto error; }; \
5414 } while (0)
5415
5416 #define SKIP2(sz1, sz2) \
5417 do { \
5418 Uint sz = (sz1) + (sz2); \
5419 if (sz1 < sz && (sz) <= endp-ep) { \
5420 ep += (sz); \
5421 } else { goto error; } \
5422 } while (0)
5423
5424 #define CHKSIZE(sz) \
5425 do { \
5426 if ((sz) > endp-ep) { goto error; } \
5427 } while (0)
5428
5429 #define ADDTERMS(n) \
5430 do { \
5431 int before = terms; \
5432 terms += (n); \
5433 if (terms < before) goto error; \
5434 } while (0)
5435
5436 ASSERT(terms > 0);
5437 do {
5438 int tag;
5439 CHKSIZE(1);
5440 tag = ep++[0];
5441 switch (tag) {
5442 case INTEGER_EXT:
5443 SKIP(4);
5444 #if !defined(ARCH_64)
5445 heap_size += BIG_UINT_HEAP_SIZE;
5446 #endif
5447 break;
5448 case SMALL_INTEGER_EXT:
5449 SKIP(1);
5450 break;
5451 case SMALL_BIG_EXT:
5452 CHKSIZE(1);
5453 n = ep[0]; /* number of bytes */
5454 SKIP2(n, 1+1); /* skip size,sign,digits */
5455 heap_size += 1+(n+sizeof(Eterm)-1)/sizeof(Eterm); /* XXX: 1 too much? */
5456 break;
5457 case LARGE_BIG_EXT:
5458 CHKSIZE(4);
5459 n = get_int32(ep);
5460 if (n > BIG_ARITY_MAX*sizeof(ErtsDigit)) {
5461 goto error;
5462 }
5463 SKIP2(n,4+1); /* skip, size,sign,digits */
5464 heap_size += 1+1+(n+sizeof(Eterm)-1)/sizeof(Eterm); /* XXX: 1 too much? */
5465 break;
5466 case ATOM_EXT:
5467 CHKSIZE(2);
5468 n = get_int16(ep);
5469 if (n > MAX_ATOM_CHARACTERS) {
5470 goto error;
5471 }
5472 SKIP(n+2+atom_extra_skip);
5473 atom_extra_skip = 0;
5474 break;
5475 case ATOM_UTF8_EXT:
5476 CHKSIZE(2);
5477 n = get_int16(ep);
5478 ep += 2;
5479 if (n > MAX_ATOM_SZ_LIMIT) {
5480 goto error;
5481 }
5482 SKIP(n+atom_extra_skip);
5483 atom_extra_skip = 0;
5484 break;
5485 case SMALL_ATOM_EXT:
5486 CHKSIZE(1);
5487 n = get_int8(ep);
5488 if (n > MAX_ATOM_CHARACTERS) {
5489 goto error;
5490 }
5491 SKIP(n+1+atom_extra_skip);
5492 atom_extra_skip = 0;
5493 break;
5494 case SMALL_ATOM_UTF8_EXT:
5495 CHKSIZE(1);
5496 n = get_int8(ep);
5497 ep++;
5498 if (n > MAX_ATOM_SZ_LIMIT) {
5499 goto error;
5500 }
5501 SKIP(n+atom_extra_skip);
5502 atom_extra_skip = 0;
5503 break;
5504 case ATOM_CACHE_REF:
5505 SKIP(1+atom_extra_skip);
5506 atom_extra_skip = 0;
5507 break;
5508 case NEW_PID_EXT:
5509 atom_extra_skip = 12;
5510 goto case_PID;
5511 case PID_EXT:
5512 atom_extra_skip = 9;
5513 case_PID:
5514 /* In case it is an external pid */
5515 heap_size += EXTERNAL_THING_HEAD_SIZE + 1;
5516 terms++;
5517 break;
5518 case NEW_PORT_EXT:
5519 atom_extra_skip = 8;
5520 goto case_PORT;
5521 case PORT_EXT:
5522 atom_extra_skip = 5;
5523 case_PORT:
5524 /* In case it is an external port */
5525 heap_size += EXTERNAL_THING_HEAD_SIZE + 1;
5526 terms++;
5527 break;
5528 case NEWER_REFERENCE_EXT:
5529 atom_extra_skip = 4;
5530 goto case_NEW_REFERENCE;
5531 case NEW_REFERENCE_EXT:
5532 atom_extra_skip = 1;
5533 case_NEW_REFERENCE:
5534 {
5535 int id_words;
5536
5537 CHKSIZE(2);
5538 id_words = get_int16(ep);
5539
5540 if (id_words > ERTS_MAX_REF_NUMBERS)
5541 goto error;
5542
5543 ep += 2;
5544 atom_extra_skip += 4*id_words;
5545 /* In case it is an external ref */
5546 #if defined(ARCH_64)
5547 heap_size += EXTERNAL_THING_HEAD_SIZE + id_words/2 + 1;
5548 #else
5549 heap_size += EXTERNAL_THING_HEAD_SIZE + id_words;
5550 #endif
5551 terms++;
5552 break;
5553 }
5554 case REFERENCE_EXT:
5555 /* In case it is an external ref */
5556 heap_size += EXTERNAL_THING_HEAD_SIZE + 1;
5557 atom_extra_skip = 5;
5558 terms++;
5559 break;
5560 case NIL_EXT:
5561 break;
5562 case LIST_EXT:
5563 CHKSIZE(4);
5564 n = get_int32(ep);
5565 ep += 4;
5566 ADDTERMS(n);
5567 terms++;
5568 heap_size += 2 * n;
5569 break;
5570 case SMALL_TUPLE_EXT:
5571 CHKSIZE(1);
5572 n = *ep++;
5573 terms += n;
5574 heap_size += n + 1;
5575 break;
5576 case LARGE_TUPLE_EXT:
5577 CHKSIZE(4);
5578 n = get_int32(ep);
5579 ep += 4;
5580 ADDTERMS(n);
5581 heap_size += n + 1;
5582 break;
5583 case MAP_EXT:
5584 CHKSIZE(4);
5585 n = get_int32(ep);
5586 ep += 4;
5587 ADDTERMS(2*n);
5588 if (n <= MAP_SMALL_MAP_LIMIT) {
5589 heap_size += 3 + n + 1 + n;
5590 } else {
5591 #if !defined(ARCH_64)
5592 if ((n >> 30) != 0) {
5593 /* Can't possibly fit in memory. */
5594 goto error;
5595 }
5596 #endif
5597 CHKSIZE(2*n); /* Conservative size check */
5598 heap_size += HASHMAP_ESTIMATED_HEAP_SIZE(n);
5599 }
5600 break;
5601 case STRING_EXT:
5602 CHKSIZE(2);
5603 n = get_int16(ep);
5604 SKIP(n+2);
5605 heap_size += 2 * n;
5606 break;
5607 case FLOAT_EXT:
5608 SKIP(31);
5609 heap_size += FLOAT_SIZE_OBJECT;
5610 break;
5611 case NEW_FLOAT_EXT:
5612 SKIP(8);
5613 heap_size += FLOAT_SIZE_OBJECT;
5614 break;
5615 case BINARY_EXT:
5616 CHKSIZE(4);
5617 n = get_int32(ep);
5618 SKIP2(n, 4);
5619 if (n <= ERL_ONHEAP_BIN_LIMIT) {
5620 heap_size += heap_bin_size(n);
5621 } else {
5622 heap_size += PROC_BIN_SIZE;
5623 }
5624 break;
5625 case BIT_BINARY_EXT:
5626 {
5627 CHKSIZE(5);
5628 n = get_int32(ep);
5629 SKIP2(n, 5);
5630 if (n <= ERL_ONHEAP_BIN_LIMIT) {
5631 heap_size += heap_bin_size(n) + ERL_SUB_BIN_SIZE;
5632 } else {
5633 heap_size += PROC_BIN_SIZE + ERL_SUB_BIN_SIZE;
5634 }
5635 }
5636 break;
5637 case EXPORT_EXT:
5638 terms += 3;
5639 heap_size += 2;
5640 break;
5641 case NEW_FUN_EXT:
5642 {
5643 unsigned num_free;
5644 Uint total_size;
5645
5646 CHKSIZE(1+16+4+4);
5647 total_size = get_int32(ep);
5648 CHKSIZE(total_size);
5649 ep += 1+16+4+4;
5650 CHKSIZE(4);
5651 num_free = get_int32(ep);
5652 ep += 4;
5653 if (num_free > MAX_ARG) {
5654 goto error;
5655 }
5656 terms += 4 + num_free;
5657 heap_size += ERL_FUN_SIZE + num_free;
5658 break;
5659 }
5660 case FUN_EXT:
5661 /*
5662 * OTP 23: No longer support decoding the old fun
5663 * representation.
5664 */
5665 goto error;
5666 case ATOM_INTERNAL_REF2:
5667 SKIP(2+atom_extra_skip);
5668 atom_extra_skip = 0;
5669 break;
5670 case ATOM_INTERNAL_REF3:
5671 SKIP(3+atom_extra_skip);
5672 atom_extra_skip = 0;
5673 break;
5674
5675 case BINARY_INTERNAL_REF:
5676 if (!internal_tags) {
5677 goto error;
5678 }
5679 SKIP(sizeof(ProcBin));
5680 heap_size += PROC_BIN_SIZE;
5681 break;
5682 case BIT_BINARY_INTERNAL_REF:
5683 if (!internal_tags) {
5684 goto error;
5685 }
5686 SKIP(2+sizeof(ProcBin));
5687 heap_size += PROC_BIN_SIZE + ERL_SUB_BIN_SIZE;
5688 break;
5689 default:
5690 goto error;
5691 }
5692 terms--;
5693
5694 if (ctx && --reds <= 0 && terms > 0) {
5695 ctx->u.sc.heap_size = heap_size;
5696 ctx->u.sc.terms = terms;
5697 ctx->u.sc.ep = ep;
5698 ctx->u.sc.atom_extra_skip = atom_extra_skip;
5699 ctx->reds = 0;
5700 return 0;
5701 }
5702 }while (terms > 0);
5703
5704 /* 'terms' may be non-zero if it has wrapped around */
5705 if (terms == 0) {
5706 if (ctx) {
5707 ctx->state = B2TDecodeInit;
5708 ctx->reds = reds;
5709 }
5710 return heap_size;
5711 }
5712
5713 error:
5714 if (ctx) {
5715 ctx->state = B2TBadArg;
5716 }
5717 return -1;
5718 #undef SKIP
5719 #undef SKIP2
5720 #undef CHKSIZE
5721 }
5722
5723 #define ERTS_TRANSCODE_REDS_FACT 4
5724 typedef struct {
5725 ErtsHeapFactory factory;
5726 Eterm *hp;
5727 } ErtsTranscodeDecodeState;
5728
5729 static Eterm
transcode_decode_ctl_msg(ErtsTranscodeDecodeState * state,SysIOVec * iov,int end_ix)5730 transcode_decode_ctl_msg(ErtsTranscodeDecodeState *state,
5731 SysIOVec *iov,
5732 int end_ix)
5733 {
5734 Eterm ctl_msg, *hp;
5735 Uint buf_sz;
5736 byte *buf_start, *buf_end;
5737 byte *ptr;
5738 Uint hsz;
5739
5740 if (end_ix == 3) {
5741 /* The whole control message is in iov[2].iov_base */
5742 buf_sz = (Uint) iov[2].iov_len;
5743 buf_start = (byte *) iov[2].iov_base;
5744 buf_end = buf_start + buf_sz;
5745 }
5746 else {
5747 /* Control message over multiple buffers... */
5748 int ix;
5749 buf_sz = 0;
5750 for (ix = 2; ix < end_ix; ix++)
5751 buf_sz += iov[ix].iov_len;
5752 ptr = buf_start = erts_alloc(ERTS_ALC_T_TMP, buf_sz);
5753 buf_end = buf_start + buf_sz;
5754 for (ix = 2; ix < end_ix; ix++) {
5755 sys_memcpy((void *) ptr,
5756 (void *) iov[ix].iov_base,
5757 iov[ix].iov_len);
5758 ptr += iov[ix].iov_len;
5759 }
5760 }
5761
5762 hsz = decoded_size(buf_start, buf_end, 0, NULL);
5763 state->hp = hp = erts_alloc(ERTS_ALC_T_TMP, hsz*sizeof(Eterm));
5764 erts_factory_tmp_init(&state->factory, hp, hsz, ERTS_ALC_T_TMP);
5765
5766 ptr = dec_term(NULL, &state->factory, buf_start, &ctl_msg, NULL, 0);
5767 ASSERT(ptr); (void)ptr;
5768 ASSERT(is_tuple(ctl_msg));
5769
5770 if (buf_start != (byte *) iov[2].iov_base)
5771 erts_free(ERTS_ALC_T_TMP, buf_start);
5772
5773 return ctl_msg;
5774 }
5775
5776 static void
transcode_decode_state_destroy(ErtsTranscodeDecodeState * state)5777 transcode_decode_state_destroy(ErtsTranscodeDecodeState *state)
5778 {
5779 erts_factory_close(&state->factory);
5780 erts_free(ERTS_ALC_T_TMP, state->hp);
5781 }
5782
5783 static
transcode_dist_obuf(ErtsDistOutputBuf * ob,DistEntry * dep,Uint64 dflags,Sint reds)5784 Sint transcode_dist_obuf(ErtsDistOutputBuf* ob,
5785 DistEntry* dep,
5786 Uint64 dflags,
5787 Sint reds)
5788 {
5789 ErlIOVec* eiov = ob->eiov;
5790 SysIOVec* iov = eiov->iov;
5791 byte *hdr;
5792 Uint64 hopefull_flags;
5793 Uint32 hopefull_ix, payload_ix;
5794 Sint start_r, r;
5795 Uint new_len;
5796 byte *ep;
5797
5798 if (reds < 0)
5799 return reds;
5800
5801 /*
5802 * HOPEFUL_DATA header always present in io vector
5803 * element 1:
5804 *
5805 * +---+--------------+-----------+----------+
5806 * |'H'|Hopefull Flags|Hopefull IX|Payload IX|
5807 * +---+--------------+-----------+----------+
5808 * 1 8 4 4
5809 *
5810 * Hopefull flags: Flags corresponding to actual
5811 * hopefull encodings in this
5812 * buffer.
5813 * Hopefull IX: Vector index of first hopefull
5814 * encoding. Each hopefull encoding
5815 * is preceeded by 4 bytes containing
5816 * next vector index of hopefull
5817 * encoding. ERTS_NO_HIX marks the
5818 * end.
5819 * Payload IX: Vector index of the beginning
5820 * of the payload if there is
5821 * one; otherwise, zero.
5822 */
5823 hdr = (byte *) iov[1].iov_base;
5824
5825 ASSERT(HOPEFUL_DATA == *((byte *)iov[1].iov_base));
5826 ASSERT(iov[1].iov_len == 1+8+4+4);
5827
5828 /* Control message always begin in vector element 2 */
5829 ep = iov[2].iov_base;
5830 ASSERT(ep[0] == SMALL_TUPLE_EXT || ep[0] == LARGE_TUPLE_EXT);
5831
5832 if (~dflags & (DFLAG_DIST_MONITOR | DFLAG_DIST_MONITOR_NAME)
5833 && ep[0] == SMALL_TUPLE_EXT
5834 && ep[1] == 4
5835 && ep[2] == SMALL_INTEGER_EXT
5836 && (ep[3] == DOP_MONITOR_P ||
5837 ep[3] == DOP_MONITOR_P_EXIT ||
5838 ep[3] == DOP_DEMONITOR_P)) {
5839 /*
5840 * Receiver does not support process monitoring.
5841 * Suppress monitor control msg (see erts_dsig_send_monitor)
5842 * by converting it to an empty (tick) packet.
5843 */
5844 int i;
5845 for (i = 1; i < ob->eiov->vsize; i++) {
5846 if (ob->eiov->binv[i])
5847 driver_free_binary(ob->eiov->binv[i]);
5848 }
5849 ob->eiov->vsize = 1;
5850 ob->eiov->size = 0;
5851 return reds;
5852 }
5853
5854 hdr++;
5855 hopefull_flags = get_int64(hdr);
5856
5857 hdr += 8;
5858 hopefull_ix = get_int32(hdr);
5859
5860 if ((~dflags & DFLAG_SPAWN)
5861 && ep[0] == SMALL_TUPLE_EXT
5862 && ((ep[1] == 6
5863 && ep[2] == SMALL_INTEGER_EXT
5864 && ep[3] == DOP_SPAWN_REQUEST)
5865 || (ep[1] == 8
5866 && ep[2] == SMALL_INTEGER_EXT
5867 && ep[3] == DOP_SPAWN_REQUEST_TT))) {
5868 /*
5869 * Receiver does not support distributed spawn. Convert
5870 * this packet to an empty (tick) packet, and inform
5871 * spawning process that this is not supported...
5872 */
5873 ErtsTranscodeDecodeState tds;
5874 Eterm ctl_msg, ref, pid, token, *tp;
5875 int i;
5876
5877 hdr += 4;
5878 payload_ix = get_int32(hdr);
5879 ASSERT(payload_ix >= 3);
5880
5881 ctl_msg = transcode_decode_ctl_msg(&tds, iov, payload_ix);
5882
5883 ASSERT(is_tuple_arity(ctl_msg, 6)
5884 || is_tuple_arity(ctl_msg, 8));
5885 tp = tuple_val(ctl_msg);
5886 ASSERT(tp[1] == make_small(DOP_SPAWN_REQUEST)
5887 || tp[1] == make_small(DOP_SPAWN_REQUEST_TT));
5888
5889 ref = tp[2];
5890 pid = tp[3];
5891 if (tp[1] == make_small(DOP_SPAWN_REQUEST))
5892 token = NIL;
5893 else {
5894 token = tp[8];
5895 erts_seq_trace_update_node_token(token);
5896 }
5897 ASSERT(is_internal_ordinary_ref(tp[2]));
5898 ASSERT(is_internal_pid(tp[3]));
5899
5900 (void) erts_proc_sig_send_dist_spawn_reply(dep->sysname,
5901 ref, pid,
5902 NULL, am_notsup,
5903 token);
5904
5905 transcode_decode_state_destroy(&tds);
5906
5907 for (i = 1; i < ob->eiov->vsize; i++) {
5908 if (ob->eiov->binv[i])
5909 driver_free_binary(ob->eiov->binv[i]);
5910 }
5911 ob->eiov->vsize = 1;
5912 ob->eiov->size = 0;
5913
5914 reds -= 4;
5915
5916 if (reds < 0)
5917 return 0;
5918 return reds;
5919 }
5920
5921 if ((~dflags & DFLAG_UNLINK_ID)
5922 && ep[0] == SMALL_TUPLE_EXT
5923 && ep[1] == 4
5924 && ep[2] == SMALL_INTEGER_EXT
5925 && (ep[3] == DOP_UNLINK_ID_ACK || ep[3] == DOP_UNLINK_ID)) {
5926
5927 if (ep[3] == DOP_UNLINK_ID_ACK) {
5928 /* Drop DOP_UNLINK_ID_ACK signal... */
5929 int i;
5930 for (i = 1; i < ob->eiov->vsize; i++) {
5931 if (ob->eiov->binv[i])
5932 driver_free_binary(ob->eiov->binv[i]);
5933 }
5934 ob->eiov->vsize = 1;
5935 ob->eiov->size = 0;
5936 }
5937 else {
5938 Eterm ctl_msg, remote, local, *tp;
5939 ErtsTranscodeDecodeState tds;
5940 Uint64 id;
5941 byte *ptr;
5942 ASSERT(ep[3] == DOP_UNLINK_ID);
5943 /*
5944 * Rewrite the DOP_UNLINK_ID signal into a
5945 * DOP_UNLINK signal and send an unlink ack
5946 * to the local sender.
5947 */
5948
5949 /*
5950 * decode control message so we get info
5951 * needed for unlink ack signal to send...
5952 */
5953 ASSERT(get_int32(hdr + 4) == 0); /* No payload */
5954 ctl_msg = transcode_decode_ctl_msg(&tds, iov, eiov->vsize);
5955
5956 ASSERT(is_tuple_arity(ctl_msg, 4));
5957
5958 tp = tuple_val(ctl_msg);
5959 ASSERT(tp[1] == make_small(DOP_UNLINK_ID));
5960
5961 if (!term_to_Uint64(tp[2], &id))
5962 ERTS_INTERNAL_ERROR("Invalid encoding of DOP_UNLINK_ID signal");
5963
5964 local = tp[3];
5965 remote = tp[4];
5966
5967 ASSERT(is_internal_pid(local));
5968 ASSERT(is_external_pid(remote));
5969
5970 /*
5971 * Rewrite buffer to an unlink signal by removing
5972 * second element and change first element to
5973 * DOP_UNLINK. That is, to: {DOP_UNLINK, local, remote}
5974 */
5975
5976 ptr = &ep[4];
5977 switch (*ptr) {
5978 case SMALL_INTEGER_EXT:
5979 ptr += 1;
5980 break;
5981 case INTEGER_EXT:
5982 ptr += 4;
5983 break;
5984 case SMALL_BIG_EXT:
5985 ptr += 1;
5986 ASSERT(*ptr <= 8);
5987 ptr += *ptr + 1;
5988 break;
5989 default:
5990 ERTS_INTERNAL_ERROR("Invalid encoding of DOP_UNLINK_ID signal");
5991 break;
5992 }
5993
5994 ASSERT((ptr - ep) <= 16);
5995 ASSERT((ptr - ep) <= iov[2].iov_len);
5996
5997 *(ptr--) = DOP_UNLINK;
5998 *(ptr--) = SMALL_INTEGER_EXT;
5999 *(ptr--) = 3;
6000 *ptr = SMALL_TUPLE_EXT;
6001
6002 iov[2].iov_base = ptr;
6003 iov[2].iov_len -= (ptr - ep);
6004
6005 #ifdef DEBUG
6006 {
6007 ErtsTranscodeDecodeState dbg_tds;
6008 Eterm new_ctl_msg = transcode_decode_ctl_msg(&dbg_tds,
6009 iov,
6010 eiov->vsize);
6011 ASSERT(is_tuple_arity(new_ctl_msg, 3));
6012 tp = tuple_val(new_ctl_msg);
6013 ASSERT(tp[1] == make_small(DOP_UNLINK));
6014 ASSERT(tp[2] == local);
6015 ASSERT(eq(tp[3], remote));
6016 transcode_decode_state_destroy(&dbg_tds);
6017 }
6018 #endif
6019
6020 /* Send unlink ack to local sender... */
6021 erts_proc_sig_send_dist_unlink_ack(NULL, dep,
6022 dep->connection_id,
6023 remote, local, id);
6024
6025 transcode_decode_state_destroy(&tds);
6026
6027 reds -= 5;
6028 }
6029 if (reds < 0)
6030 return 0;
6031 return reds;
6032 }
6033
6034 start_r = r = reds*ERTS_TRANSCODE_REDS_FACT;
6035
6036 if (~dflags & hopefull_flags) {
6037
6038 while (hopefull_ix != ERTS_NO_HIX) {
6039 Uint32 new_hopefull_ix;
6040
6041 if (r <= 0) { /* yield... */
6042 /* save current hopefull_ix... */
6043 ep = (byte *) iov[1].iov_base;
6044 ep += 5;
6045 put_int32(hopefull_ix, ep);
6046 return -1;
6047 }
6048
6049 /* Read next hopefull index */
6050 ep = (byte *) iov[hopefull_ix].iov_base;
6051 ep -= 4;
6052 new_hopefull_ix = get_int32(ep);
6053 ASSERT(new_hopefull_ix == ERTS_NO_HIX
6054 || (hopefull_ix < new_hopefull_ix
6055 && new_hopefull_ix < eiov->vsize));
6056
6057 ep = (byte *) iov[hopefull_ix].iov_base;
6058 switch (*ep) {
6059
6060 case EXPORT_EXT: {
6061 byte *start_ep, *end_ep;
6062 Eterm module, function;
6063 if (!(hopefull_flags & DFLAG_EXPORT_PTR_TAG))
6064 break;
6065 /* Read original encoding... */
6066 ep++;
6067 start_ep = ep;
6068 ep = dec_atom(NULL, ep, &module);
6069 ASSERT(ep && is_atom(module));
6070 ep = dec_atom(NULL, ep, &function);
6071 ASSERT(ep && is_atom(function));
6072 end_ep = ep;
6073 ASSERT(*ep == SMALL_INTEGER_EXT
6074 || *ep == INTEGER_EXT
6075 || *ep == SMALL_BIG_EXT
6076 || *ep == LARGE_BIG_EXT);
6077
6078 /*
6079 * module and function atoms are encoded
6080 * between start_ep and end_ep. Prepend a
6081 * 2-tuple tag before the atoms and
6082 * remove arity at end.
6083 */
6084
6085 /* write fallback */
6086
6087 ep = start_ep;
6088 ep--;
6089 put_int8(2, ep);
6090 ep--;
6091 *ep = SMALL_TUPLE_EXT;
6092
6093 iov[hopefull_ix].iov_base = ep;
6094
6095 /* Update iov sizes... */
6096 new_len = end_ep - ep;
6097 eiov->size -= iov[hopefull_ix].iov_len;
6098 eiov->size += new_len;
6099 iov[hopefull_ix].iov_len = new_len;
6100 r--;
6101 break;
6102 }
6103
6104 case BIT_BINARY_EXT: {
6105 Uint bin_sz;
6106 byte bitsize, epilog_byte;
6107 ASSERT(hopefull_ix != ERTS_NO_HIX);
6108 if (!(hopefull_flags & DFLAG_BIT_BINARIES)) {
6109 /* skip to epilog... */
6110 hopefull_ix = new_hopefull_ix;
6111 ep = (byte *) iov[hopefull_ix].iov_base;
6112 ep -= 4;
6113 new_hopefull_ix = get_int32(ep);
6114 ASSERT(new_hopefull_ix == ERTS_NO_HIX
6115 || (hopefull_ix < new_hopefull_ix
6116 && new_hopefull_ix < eiov->vsize));
6117 break;
6118 }
6119
6120 /* read original encoded prolog... */
6121 ep++;
6122 bin_sz = get_int32(ep);
6123 ep += 4;
6124 bitsize = *ep++;
6125
6126 /* write fallback prolog... */
6127 iov[hopefull_ix].iov_base -= 4;
6128 ep = (byte *) iov[hopefull_ix].iov_base;
6129
6130 *ep++ = SMALL_TUPLE_EXT;
6131 *ep++ = 2;
6132 *ep++ = BINARY_EXT;
6133 put_int32(bin_sz, ep);
6134 ep += 4;
6135
6136 /* Update iov sizes... */
6137 new_len = ep - (byte *) iov[hopefull_ix].iov_base;
6138 eiov->size -= iov[hopefull_ix].iov_len;
6139 eiov->size += new_len;
6140 iov[hopefull_ix].iov_len = new_len;
6141 r--;
6142 #ifdef DEBUG
6143 /*
6144 * The binary data between the prolog and the
6145 * epilog should be of size 'bin_sz - 1' and
6146 * exists in the iov elements between prolog
6147 * and epilog...
6148 */
6149 {
6150 Uint ix, debug_bin_sz = 0;
6151 for (ix = hopefull_ix+1; ix < new_hopefull_ix; ix++)
6152 debug_bin_sz += iov[ix].iov_len;
6153 ASSERT(debug_bin_sz == bin_sz - 1);
6154 }
6155 #endif
6156 /* jump to epilog... */
6157 hopefull_ix = new_hopefull_ix;
6158 ep = (byte *) iov[hopefull_ix].iov_base;
6159
6160 /* read original encoded epilog... */
6161 epilog_byte = *ep;
6162
6163 ASSERT(1 == iov[hopefull_ix].iov_len);
6164
6165 iov[hopefull_ix].iov_base -= 4;
6166 ep = (byte *) iov[hopefull_ix].iov_base;
6167 new_hopefull_ix = get_int32(ep);
6168 ASSERT(new_hopefull_ix == ERTS_NO_HIX
6169 || (hopefull_ix < new_hopefull_ix
6170 && new_hopefull_ix < eiov->vsize));
6171
6172 /* write fallback epilog... */
6173
6174 *ep++ = epilog_byte;
6175 *ep++ = SMALL_INTEGER_EXT;
6176 *ep++ = bitsize;
6177
6178 /* Update iov sizes... */
6179 new_len = ep - (byte *) iov[hopefull_ix].iov_base;
6180 eiov->size -= iov[hopefull_ix].iov_len;
6181 eiov->size += new_len;
6182 iov[hopefull_ix].iov_len = new_len;
6183 r--;
6184 break;
6185 }
6186
6187 default:
6188 ERTS_INTERNAL_ERROR("Unexpected external tag");
6189 break;
6190 }
6191
6192 hopefull_ix = new_hopefull_ix;
6193 r--;
6194 }
6195 }
6196
6197 /*
6198 * Replace hopefull data header with actual header...
6199 */
6200 ep = (byte *) iov[1].iov_base;
6201 eiov->size -= iov[1].iov_len;
6202
6203 if (dflags & (DFLAG_DIST_HDR_ATOM_CACHE|DFLAG_FRAGMENTS)) {
6204 /*
6205 * Encoding was done without atom caching but receiver expects
6206 * a dist header, so we prepend an empty one.
6207 */
6208 *ep++ = VERSION_MAGIC;
6209 *ep++ = DIST_HEADER;
6210 *ep++ = 0; /* NumberOfAtomCacheRefs */
6211 }
6212 else {
6213 hdr += 4;
6214 payload_ix = get_int32(hdr);
6215
6216 if (payload_ix) {
6217 ASSERT(0 < payload_ix && payload_ix < eiov->vsize);
6218 /* Prepend version magic on payload. */
6219 iov[payload_ix].iov_base--;
6220 *((byte *) iov[payload_ix].iov_base) = VERSION_MAGIC;
6221 iov[payload_ix].iov_len++;
6222 eiov->size++;
6223 r--;
6224 }
6225
6226 *ep++ = PASS_THROUGH;
6227 *ep++ = VERSION_MAGIC;
6228 }
6229
6230 iov[1].iov_len = ep - (byte *) iov[1].iov_base;
6231 eiov->size += iov[1].iov_len;
6232
6233 r--;
6234
6235 /* done... */
6236
6237 reds -= (start_r - r)/ERTS_TRANSCODE_REDS_FACT + 1;
6238 if (reds < 0)
6239 return 0;
6240 return reds;
6241 }
6242