1 /*
2  *  R : A Computer Language for Statistical Data Analysis
3  *  Copyright (C) 1998--2021  The R Core Team.
4  *  Copyright (C) 1995, 1996  Robert Gentleman and Ross Ihaka
5  *
6  *  This program is free software; you can redistribute it and/or modify
7  *  it under the terms of the GNU General Public License as published by
8  *  the Free Software Foundation; either version 2 of the License, or
9  *  (at your option) any later version.
10  *
11  *  This program is distributed in the hope that it will be useful,
12  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
13  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14  *  GNU General Public License for more details.
15  *
16  *  You should have received a copy of the GNU General Public License
17  *  along with this program; if not, a copy is available at
18  *  https://www.R-project.org/Licenses/
19  */
20 
21 /*
22  *	This code implements a non-moving generational collector
23  *      with two or three generations.
24  *
25  *	Memory allocated by R_alloc is maintained in a stack.  Code
26  *	that R_allocs memory must use vmaxget and vmaxset to obtain
27  *	and reset the stack pointer.
28  */
29 
30 #define USE_RINTERNALS
31 #define COMPILING_MEMORY_C
32 
33 #ifdef HAVE_CONFIG_H
34 #include <config.h>
35 #endif
36 
37 #include <stdarg.h>
38 
39 #include <R_ext/RS.h> /* for S4 allocation */
40 #include <R_ext/Print.h>
41 
42 /* Declarations for Valgrind.
43 
44    These are controlled by the
45      --with-valgrind-instrumentation=
46    option to configure, which sets VALGRIND_LEVEL to the
47    supplied value (default 0) and defines NVALGRIND if
48    the value is 0.
49 
50    level 0 is no additional instrumentation
51    level 1 marks uninitialized numeric, logical, integer, raw,
52 	   complex vectors and R_alloc memory
53    level 2 marks the data section of vector nodes as inaccessible
54 	   when they are freed.
55 
56    level 3 was withdrawn in R 3.2.0.
57 
58    It may be necessary to define NVALGRIND for a non-gcc
59    compiler on a supported architecture if it has different
60    syntax for inline assembly language from gcc.
61 
62    For Win32, Valgrind is useful only if running under Wine.
63 */
64 #ifdef Win32
65 # ifndef USE_VALGRIND_FOR_WINE
66 # define NVALGRIND 1
67 #endif
68 #endif
69 
70 
71 #ifndef VALGRIND_LEVEL
72 # define VALGRIND_LEVEL 0
73 #endif
74 
75 #ifndef NVALGRIND
76 # ifdef HAVE_VALGRIND_MEMCHECK_H
77 #  include "valgrind/memcheck.h"
78 # else
79 // internal version of headers
80 #  include "vg/memcheck.h"
81 # endif
82 #endif
83 
84 
85 #define R_USE_SIGNALS 1
86 #include <Defn.h>
87 #include <Internal.h>
88 #include <R_ext/GraphicsEngine.h> /* GEDevDesc, GEgetDevice */
89 #include <R_ext/Rdynload.h>
90 #include <R_ext/Rallocators.h> /* for R_allocator_t structure */
91 #include <Rmath.h> // R_pow_di
92 #include <Print.h> // R_print
93 
94 #if defined(Win32)
95 extern void *Rm_malloc(size_t n);
96 extern void *Rm_calloc(size_t n_elements, size_t element_size);
97 extern void Rm_free(void * p);
98 extern void *Rm_realloc(void * p, size_t n);
99 #define calloc Rm_calloc
100 #define malloc Rm_malloc
101 #define realloc Rm_realloc
102 #define free Rm_free
103 #endif
104 
105 /* malloc uses size_t.  We are assuming here that size_t is at least
106    as large as unsigned long.  Changed from int at 1.6.0 to (i) allow
107    2-4Gb objects on 32-bit system and (ii) objects limited only by
108    length on a 64-bit system.
109 */
110 
111 static int gc_reporting = 0;
112 static int gc_count = 0;
113 
114 /* Report error encountered during garbage collection where for detecting
115    problems it is better to abort, but for debugging (or some production runs,
116    where external validation of results is possible) it may be preferred to
117    continue. Configurable via _R_GC_FAIL_ON_ERROR_. Typically these problems
118    are due to memory corruption.
119 */
120 static Rboolean gc_fail_on_error = FALSE;
gc_error(const char * msg)121 static void gc_error(const char *msg)
122 {
123     if (gc_fail_on_error)
124 	R_Suicide(msg);
125     else if (R_in_gc)
126 	REprintf(msg);
127     else
128 	error(msg);
129 }
130 
131 /* These are used in profiling to separate out time in GC */
R_gc_running()132 int R_gc_running() { return R_in_gc; }
133 
134 #ifdef TESTING_WRITE_BARRIER
135 # define PROTECTCHECK
136 #endif
137 
138 #ifdef PROTECTCHECK
139 /* This is used to help detect unprotected SEXP values.  It is most
140    useful if the strict barrier is enabled as well. The strategy is:
141 
142        All GCs are full GCs
143 
144        New nodes are marked as NEWSXP
145 
146        After a GC all free nodes that are not of type NEWSXP are
147        marked as type FREESXP
148 
149        Most calls to accessor functions check their SEXP inputs and
150        SEXP outputs with CHK() to see if a reachable node is a
151        FREESXP and signal an error if a FREESXP is found.
152 
153    Combined with GC torture this can help locate where an unprotected
154    SEXP is being used.
155 
156    This approach will miss cases where an unprotected node has been
157    re-allocated.  For these cases it is possible to set
158    gc_inhibit_release to TRUE.  FREESXP nodes will not be reallocated,
159    or large ones released, until gc_inhibit_release is set to FALSE
160    again.  This will of course result in memory growth and should be
161    used with care and typically in combination with OS mechanisms to
162    limit process memory usage.  LT */
163 
164 /* Before a node is marked as a FREESXP by the collector the previous
165    type is recorded.  For now using the LEVELS field seems
166    reasonable.  */
167 #define OLDTYPE(s) LEVELS(s)
168 #define SETOLDTYPE(s, t) SETLEVELS(s, t)
169 
CHK(SEXP x)170 static R_INLINE SEXP CHK(SEXP x)
171 {
172     /* **** NULL check because of R_CurrentExpr */
173     if (x != NULL && TYPEOF(x) == FREESXP)
174 	error("unprotected object (%p) encountered (was %s)",
175 	      x, sexptype2char(OLDTYPE(x)));
176     return x;
177 }
178 #else
179 #define CHK(x) x
180 #endif
181 
182 /* The following three variables definitions are used to record the
183    address and type of the first bad type seen during a collection,
184    and for FREESXP nodes they record the old type as well. */
185 static SEXPTYPE bad_sexp_type_seen = 0;
186 static SEXP bad_sexp_type_sexp = NULL;
187 #ifdef PROTECTCHECK
188 static SEXPTYPE bad_sexp_type_old_type = 0;
189 #endif
190 static int bad_sexp_type_line = 0;
191 
register_bad_sexp_type(SEXP s,int line)192 static R_INLINE void register_bad_sexp_type(SEXP s, int line)
193 {
194     if (bad_sexp_type_seen == 0) {
195 	bad_sexp_type_seen = TYPEOF(s);
196 	bad_sexp_type_sexp = s;
197 	bad_sexp_type_line = line;
198 #ifdef PROTECTCHECK
199 	if (TYPEOF(s) == FREESXP)
200 	    bad_sexp_type_old_type = OLDTYPE(s);
201 #endif
202     }
203 }
204 
205 /* also called from typename() in inspect.c */
206 attribute_hidden
sexptype2char(SEXPTYPE type)207 const char *sexptype2char(SEXPTYPE type) {
208     switch (type) {
209     case NILSXP:	return "NILSXP";
210     case SYMSXP:	return "SYMSXP";
211     case LISTSXP:	return "LISTSXP";
212     case CLOSXP:	return "CLOSXP";
213     case ENVSXP:	return "ENVSXP";
214     case PROMSXP:	return "PROMSXP";
215     case LANGSXP:	return "LANGSXP";
216     case SPECIALSXP:	return "SPECIALSXP";
217     case BUILTINSXP:	return "BUILTINSXP";
218     case CHARSXP:	return "CHARSXP";
219     case LGLSXP:	return "LGLSXP";
220     case INTSXP:	return "INTSXP";
221     case REALSXP:	return "REALSXP";
222     case CPLXSXP:	return "CPLXSXP";
223     case STRSXP:	return "STRSXP";
224     case DOTSXP:	return "DOTSXP";
225     case ANYSXP:	return "ANYSXP";
226     case VECSXP:	return "VECSXP";
227     case EXPRSXP:	return "EXPRSXP";
228     case BCODESXP:	return "BCODESXP";
229     case EXTPTRSXP:	return "EXTPTRSXP";
230     case WEAKREFSXP:	return "WEAKREFSXP";
231     case S4SXP:		return "S4SXP";
232     case RAWSXP:	return "RAWSXP";
233     case NEWSXP:	return "NEWSXP"; /* should never happen */
234     case FREESXP:	return "FREESXP";
235     default:		return "<unknown>";
236     }
237 }
238 
239 #define GC_TORTURE
240 
241 static int gc_pending = 0;
242 #ifdef GC_TORTURE
243 /* **** if the user specified a wait before starting to force
244    **** collections it might make sense to also wait before starting
245    **** to inhibit releases */
246 static int gc_force_wait = 0;
247 static int gc_force_gap = 0;
248 static Rboolean gc_inhibit_release = FALSE;
249 #define FORCE_GC (gc_pending || (gc_force_wait > 0 ? (--gc_force_wait > 0 ? 0 : (gc_force_wait = gc_force_gap, 1)) : 0))
250 #else
251 # define FORCE_GC gc_pending
252 #endif
253 
254 #ifdef R_MEMORY_PROFILING
255 static void R_ReportAllocation(R_size_t);
256 static void R_ReportNewPage();
257 #endif
258 
259 #define GC_PROT(X) do { \
260     int __wait__ = gc_force_wait; \
261     int __gap__ = gc_force_gap;			   \
262     Rboolean __release__ = gc_inhibit_release;	   \
263     X;						   \
264     gc_force_wait = __wait__;			   \
265     gc_force_gap = __gap__;			   \
266     gc_inhibit_release = __release__;		   \
267 }  while(0)
268 
269 static void R_gc_internal(R_size_t size_needed);
270 static void R_gc_no_finalizers(R_size_t size_needed);
271 static void R_gc_lite();
272 static void mem_err_heap(R_size_t size);
273 static void mem_err_malloc(R_size_t size);
274 
275 static SEXPREC UnmarkedNodeTemplate;
276 #define NODE_IS_MARKED(s) (MARK(s)==1)
277 #define MARK_NODE(s) (MARK(s)=1)
278 #define UNMARK_NODE(s) (MARK(s)=0)
279 
280 
281 /* Tuning Constants. Most of these could be made settable from R,
282    within some reasonable constraints at least.  Since there are quite
283    a lot of constants it would probably make sense to put together
284    several "packages" representing different space/speed tradeoffs
285    (e.g. very aggressive freeing and small increments to conserve
286    memory; much less frequent releasing and larger increments to
287    increase speed). */
288 
289 /* There are three levels of collections.  Level 0 collects only the
290    youngest generation, level 1 collects the two youngest generations,
291    and level 2 collects all generations.  Higher level collections
292    occur at least after specified numbers of lower level ones.  After
293    LEVEL_0_FREQ level zero collections a level 1 collection is done;
294    after every LEVEL_1_FREQ level 1 collections a level 2 collection
295    occurs.  Thus, roughly, every LEVEL_0_FREQ-th collection is a level
296    1 collection and every (LEVEL_0_FREQ * LEVEL_1_FREQ)-th collection
297    is a level 2 collection.  */
298 #define LEVEL_0_FREQ 20
299 #define LEVEL_1_FREQ 5
300 static int collect_counts_max[] = { LEVEL_0_FREQ, LEVEL_1_FREQ };
301 
302 /* When a level N collection fails to produce at least MinFreeFrac *
303    R_NSize free nodes and MinFreeFrac * R_VSize free vector space, the
304    next collection will be a level N + 1 collection.
305 
306    This constant is also used in heap size adjustment as a minimal
307    fraction of the minimal heap size levels that should be available
308    for allocation. */
309 static double R_MinFreeFrac = 0.2;
310 
311 /* When pages are released, a number of free nodes equal to
312    R_MaxKeepFrac times the number of allocated nodes for each class is
313    retained.  Pages not needed to meet this requirement are released.
314    An attempt to release pages is made every R_PageReleaseFreq level 1
315    or level 2 collections. */
316 static double R_MaxKeepFrac = 0.5;
317 static int R_PageReleaseFreq = 1;
318 
319 /* The heap size constants R_NSize and R_VSize are used for triggering
320    collections.  The initial values set by defaults or command line
321    arguments are used as minimal values.  After full collections these
322    levels are adjusted up or down, though not below the minimal values
323    or above the maximum values, towards maintain heap occupancy within
324    a specified range.  When the number of nodes in use reaches
325    R_NGrowFrac * R_NSize, the value of R_NSize is incremented by
326    R_NGrowIncrMin + R_NGrowIncrFrac * R_NSize.  When the number of
327    nodes in use falls below R_NShrinkFrac, R_NSize is decremented by
328    R_NShrinkIncrMin + R_NShrinkFrac * R_NSize.  Analogous adjustments
329    are made to R_VSize.
330 
331    This mechanism for adjusting the heap size constants is very
332    primitive but hopefully adequate for now.  Some modeling and
333    experimentation would be useful.  We want the heap sizes to get set
334    at levels adequate for the current computations.  The present
335    mechanism uses only the size of the current live heap to provide
336    information about the current needs; since the current live heap
337    size can be very volatile, the adjustment mechanism only makes
338    gradual adjustments.  A more sophisticated strategy would use more
339    of the live heap history.
340 
341    Some of the settings can now be adjusted by environment variables.
342 */
343 static double R_NGrowFrac = 0.70;
344 static double R_NShrinkFrac = 0.30;
345 
346 static double R_VGrowFrac = 0.70;
347 static double R_VShrinkFrac = 0.30;
348 
349 #ifdef SMALL_MEMORY
350 /* On machines with only 32M of memory (or on a classic Mac OS port)
351    it might be a good idea to use settings like these that are more
352    aggressive at keeping memory usage down. */
353 static double R_NGrowIncrFrac = 0.0, R_NShrinkIncrFrac = 0.2;
354 static int R_NGrowIncrMin = 50000, R_NShrinkIncrMin = 0;
355 static double R_VGrowIncrFrac = 0.0, R_VShrinkIncrFrac = 0.2;
356 static int R_VGrowIncrMin = 100000, R_VShrinkIncrMin = 0;
357 #else
358 static double R_NGrowIncrFrac = 0.2, R_NShrinkIncrFrac = 0.2;
359 static int R_NGrowIncrMin = 40000, R_NShrinkIncrMin = 0;
360 static double R_VGrowIncrFrac = 0.2, R_VShrinkIncrFrac = 0.2;
361 static int R_VGrowIncrMin = 80000, R_VShrinkIncrMin = 0;
362 #endif
363 
init_gc_grow_settings()364 static void init_gc_grow_settings()
365 {
366     char *arg;
367 
368     arg = getenv("R_GC_MEM_GROW");
369     if (arg != NULL) {
370 	int which = (int) atof(arg);
371 	switch (which) {
372 	case 0: /* very conservative -- the SMALL_MEMORY settings */
373 	    R_NGrowIncrFrac = 0.0;
374 	    R_VGrowIncrFrac = 0.0;
375 	    break;
376 	case 1: /* default */
377 	    break;
378 	case 2: /* somewhat aggressive */
379 	    R_NGrowIncrFrac = 0.3;
380 	    R_VGrowIncrFrac = 0.3;
381 	    break;
382 	case 3: /* more aggressive */
383 	    R_NGrowIncrFrac = 0.4;
384 	    R_VGrowIncrFrac = 0.4;
385 	    R_NGrowFrac = 0.5;
386 	    R_VGrowFrac = 0.5;
387 	    break;
388 	}
389     }
390     arg = getenv("R_GC_GROWFRAC");
391     if (arg != NULL) {
392 	double frac = atof(arg);
393 	if (0.35 <= frac && frac <= 0.75) {
394 	    R_NGrowFrac = frac;
395 	    R_VGrowFrac = frac;
396 	}
397     }
398     arg = getenv("R_GC_GROWINCRFRAC");
399     if (arg != NULL) {
400 	double frac = atof(arg);
401 	if (0.05 <= frac && frac <= 0.80) {
402 	    R_NGrowIncrFrac = frac;
403 	    R_VGrowIncrFrac = frac;
404 	}
405     }
406     arg = getenv("R_GC_NGROWINCRFRAC");
407     if (arg != NULL) {
408 	double frac = atof(arg);
409 	if (0.05 <= frac && frac <= 0.80)
410 	    R_NGrowIncrFrac = frac;
411     }
412     arg = getenv("R_GC_VGROWINCRFRAC");
413     if (arg != NULL) {
414 	double frac = atof(arg);
415 	if (0.05 <= frac && frac <= 0.80)
416 	    R_VGrowIncrFrac = frac;
417     }
418 }
419 
420 /* Maximal Heap Limits.  These variables contain upper limits on the
421    heap sizes.  They could be made adjustable from the R level,
422    perhaps by a handler for a recoverable error.
423 
424    Access to these values is provided with reader and writer
425    functions; the writer function insures that the maximal values are
426    never set below the current ones. */
427 static R_size_t R_MaxVSize = R_SIZE_T_MAX;
428 static R_size_t R_MaxNSize = R_SIZE_T_MAX;
429 static int vsfac = 1; /* current units for vsize: changes at initialization */
430 
R_GetMaxVSize(void)431 R_size_t attribute_hidden R_GetMaxVSize(void)
432 {
433     if (R_MaxVSize == R_SIZE_T_MAX) return R_SIZE_T_MAX;
434     return R_MaxVSize * vsfac;
435 }
436 
R_SetMaxVSize(R_size_t size)437 void attribute_hidden R_SetMaxVSize(R_size_t size)
438 {
439     if (size == R_SIZE_T_MAX) return;
440     if (size / vsfac >= R_VSize) R_MaxVSize = (size + 1) / vsfac;
441 }
442 
R_GetMaxNSize(void)443 R_size_t attribute_hidden R_GetMaxNSize(void)
444 {
445     return R_MaxNSize;
446 }
447 
R_SetMaxNSize(R_size_t size)448 void attribute_hidden R_SetMaxNSize(R_size_t size)
449 {
450     if (size >= R_NSize) R_MaxNSize = size;
451 }
452 
R_SetPPSize(R_size_t size)453 void attribute_hidden R_SetPPSize(R_size_t size)
454 {
455     R_PPStackSize = (int) size;
456 }
457 
do_maxVSize(SEXP call,SEXP op,SEXP args,SEXP rho)458 SEXP attribute_hidden do_maxVSize(SEXP call, SEXP op, SEXP args, SEXP rho)
459 {
460     const double MB = 1048576.0;
461     double newval = asReal(CAR(args));
462 
463     if (newval > 0) {
464 	if (newval == R_PosInf)
465 	    R_MaxVSize = R_SIZE_T_MAX;
466 	else
467 	    R_SetMaxVSize((R_size_t) (newval * MB));
468     }
469 
470     if (R_MaxVSize == R_SIZE_T_MAX)
471 	return ScalarReal(R_PosInf);
472     else
473 	return ScalarReal(R_GetMaxVSize() / MB);
474 }
475 
do_maxNSize(SEXP call,SEXP op,SEXP args,SEXP rho)476 SEXP attribute_hidden do_maxNSize(SEXP call, SEXP op, SEXP args, SEXP rho)
477 {
478     double newval = asReal(CAR(args));
479 
480     if (newval > 0) {
481 	if (newval == R_PosInf)
482 	    R_MaxNSize = R_SIZE_T_MAX;
483 	else
484 	    R_SetMaxNSize((R_size_t) newval);
485     }
486 
487     if (R_MaxNSize == R_SIZE_T_MAX)
488 	return ScalarReal(R_PosInf);
489     else
490 	return ScalarReal(R_GetMaxNSize());
491 }
492 
493 
494 /* Miscellaneous Globals. */
495 
496 static SEXP R_VStack = NULL;		/* R_alloc stack pointer */
497 static SEXP R_PreciousList = NULL;      /* List of Persistent Objects */
498 static R_size_t R_LargeVallocSize = 0;
499 static R_size_t R_SmallVallocSize = 0;
500 static R_size_t orig_R_NSize;
501 static R_size_t orig_R_VSize;
502 
503 static R_size_t R_N_maxused=0;
504 static R_size_t R_V_maxused=0;
505 
506 /* Node Classes.  Non-vector nodes are of class zero. Small vector
507    nodes are in classes 1, ..., NUM_SMALL_NODE_CLASSES, and large
508    vector nodes are in class LARGE_NODE_CLASS. Vectors with
509    custom allocators are in CUSTOM_NODE_CLASS. For vector nodes the
510    node header is followed in memory by the vector data, offset from
511    the header by SEXPREC_ALIGN. */
512 
513 #define NUM_NODE_CLASSES 8
514 
515 /* sxpinfo allocates 3 bits for the node class, so at most 8 are allowed */
516 #if NUM_NODE_CLASSES > 8
517 # error NUM_NODE_CLASSES must be at most 8
518 #endif
519 
520 #define LARGE_NODE_CLASS  (NUM_NODE_CLASSES - 1)
521 #define CUSTOM_NODE_CLASS (NUM_NODE_CLASSES - 2)
522 #define NUM_SMALL_NODE_CLASSES (NUM_NODE_CLASSES - 2)
523 
524 /* the number of VECREC's in nodes of the small node classes */
525 static int NodeClassSize[NUM_SMALL_NODE_CLASSES] = { 0, 1, 2, 4, 8, 16 };
526 
527 #define NODE_CLASS(s) ((s)->sxpinfo.gccls)
528 #define SET_NODE_CLASS(s,v) (((s)->sxpinfo.gccls) = (v))
529 
530 
531 /* Node Generations. */
532 
533 #define NUM_OLD_GENERATIONS 2
534 
535 /* sxpinfo allocates one bit for the old generation count, so only 1
536    or 2 is allowed */
537 #if NUM_OLD_GENERATIONS > 2 || NUM_OLD_GENERATIONS < 1
538 # error number of old generations must be 1 or 2
539 #endif
540 
541 #define NODE_GENERATION(s) ((s)->sxpinfo.gcgen)
542 #define SET_NODE_GENERATION(s,g) ((s)->sxpinfo.gcgen=(g))
543 
544 #define NODE_GEN_IS_YOUNGER(s,g) \
545   (! NODE_IS_MARKED(s) || NODE_GENERATION(s) < (g))
546 #define NODE_IS_OLDER(x, y) \
547     (NODE_IS_MARKED(x) && (y) && \
548    (! NODE_IS_MARKED(y) || NODE_GENERATION(x) > NODE_GENERATION(y)))
549 
550 static int num_old_gens_to_collect = 0;
551 static int gen_gc_counts[NUM_OLD_GENERATIONS + 1];
552 static int collect_counts[NUM_OLD_GENERATIONS];
553 
554 
555 /* Node Pages.  Non-vector nodes and small vector nodes are allocated
556    from fixed size pages.  The pages for each node class are kept in a
557    linked list. */
558 
559 typedef union PAGE_HEADER {
560   union PAGE_HEADER *next;
561   double align;
562 } PAGE_HEADER;
563 
564 #if ( SIZEOF_SIZE_T > 4 )
565 # define BASE_PAGE_SIZE 8000
566 #else
567 # define BASE_PAGE_SIZE 2000
568 #endif
569 #define R_PAGE_SIZE \
570   (((BASE_PAGE_SIZE - sizeof(PAGE_HEADER)) / sizeof(SEXPREC)) \
571    * sizeof(SEXPREC) \
572    + sizeof(PAGE_HEADER))
573 #define NODE_SIZE(c) \
574   ((c) == 0 ? sizeof(SEXPREC) : \
575    sizeof(SEXPREC_ALIGN) + NodeClassSize[c] * sizeof(VECREC))
576 
577 #define PAGE_DATA(p) ((void *) (p + 1))
578 #define VHEAP_FREE() (R_VSize - R_LargeVallocSize - R_SmallVallocSize)
579 
580 
581 /* The Heap Structure.  Nodes for each class/generation combination
582    are arranged in circular doubly-linked lists.  The double linking
583    allows nodes to be removed in constant time; this is used by the
584    collector to move reachable nodes out of free space and into the
585    appropriate generation.  The circularity eliminates the need for
586    end checks.  In addition, each link is anchored at an artificial
587    node, the Peg SEXPREC's in the structure below, which simplifies
588    pointer maintenance.  The circular doubly-linked arrangement is
589    taken from Baker's in-place incremental collector design; see
590    ftp://ftp.netcom.com/pub/hb/hbaker/NoMotionGC.html or the Jones and
591    Lins GC book.  The linked lists are implemented by adding two
592    pointer fields to the SEXPREC structure, which increases its size
593    from 5 to 7 words. Other approaches are possible but don't seem
594    worth pursuing for R.
595 
596    There are two options for dealing with old-to-new pointers.  The
597    first option is to make sure they never occur by transferring all
598    referenced younger objects to the generation of the referrer when a
599    reference to a newer object is assigned to an older one.  This is
600    enabled by defining EXPEL_OLD_TO_NEW.  The second alternative is to
601    keep track of all nodes that may contain references to newer nodes
602    and to "age" the nodes they refer to at the beginning of each
603    collection.  This is the default.  The first option is simpler in
604    some ways, but will create more floating garbage and add a bit to
605    the execution time, though the difference is probably marginal on
606    both counts.*/
607 /*#define EXPEL_OLD_TO_NEW*/
608 static struct {
609     SEXP Old[NUM_OLD_GENERATIONS], New, Free;
610     SEXPREC OldPeg[NUM_OLD_GENERATIONS], NewPeg;
611 #ifndef EXPEL_OLD_TO_NEW
612     SEXP OldToNew[NUM_OLD_GENERATIONS];
613     SEXPREC OldToNewPeg[NUM_OLD_GENERATIONS];
614 #endif
615     int OldCount[NUM_OLD_GENERATIONS], AllocCount, PageCount;
616     PAGE_HEADER *pages;
617 } R_GenHeap[NUM_NODE_CLASSES];
618 
619 static R_size_t R_NodesInUse = 0;
620 
621 #define NEXT_NODE(s) (s)->gengc_next_node
622 #define PREV_NODE(s) (s)->gengc_prev_node
623 #define SET_NEXT_NODE(s,t) (NEXT_NODE(s) = (t))
624 #define SET_PREV_NODE(s,t) (PREV_NODE(s) = (t))
625 
626 
627 /* Node List Manipulation */
628 
629 /* unsnap node s from its list */
630 #define UNSNAP_NODE(s) do { \
631   SEXP un__n__ = (s); \
632   SEXP next = NEXT_NODE(un__n__); \
633   SEXP prev = PREV_NODE(un__n__); \
634   SET_NEXT_NODE(prev, next); \
635   SET_PREV_NODE(next, prev); \
636 } while(0)
637 
638 /* snap in node s before node t */
639 #define SNAP_NODE(s,t) do { \
640   SEXP sn__n__ = (s); \
641   SEXP next = (t); \
642   SEXP prev = PREV_NODE(next); \
643   SET_NEXT_NODE(sn__n__, next); \
644   SET_PREV_NODE(next, sn__n__); \
645   SET_NEXT_NODE(prev, sn__n__); \
646   SET_PREV_NODE(sn__n__, prev); \
647 } while (0)
648 
649 /* move all nodes on from_peg to to_peg */
650 #define BULK_MOVE(from_peg,to_peg) do { \
651   SEXP __from__ = (from_peg); \
652   SEXP __to__ = (to_peg); \
653   SEXP first_old = NEXT_NODE(__from__); \
654   SEXP last_old = PREV_NODE(__from__); \
655   SEXP first_new = NEXT_NODE(__to__); \
656   SET_PREV_NODE(first_old, __to__); \
657   SET_NEXT_NODE(__to__, first_old); \
658   SET_PREV_NODE(first_new, last_old); \
659   SET_NEXT_NODE(last_old, first_new); \
660   SET_NEXT_NODE(__from__, __from__); \
661   SET_PREV_NODE(__from__, __from__); \
662 } while (0);
663 
664 
665 /* Processing Node Children */
666 
667 /* This macro calls dc__action__ for each child of __n__, passing
668    dc__extra__ as a second argument for each call. */
669 /* When the CHARSXP hash chains are maintained through the ATTRIB
670    field it is important that we NOT trace those fields otherwise too
671    many CHARSXPs will be kept alive artificially. As a safety we don't
672    ignore all non-NULL ATTRIB values for CHARSXPs but only those that
673    are themselves CHARSXPs, which is what they will be if they are
674    part of a hash chain.  Theoretically, for CHARSXPs the ATTRIB field
675    should always be either R_NilValue or a CHARSXP. */
676 #ifdef PROTECTCHECK
677 # define HAS_GENUINE_ATTRIB(x) \
678     (TYPEOF(x) != FREESXP && ATTRIB(x) != R_NilValue && \
679      (TYPEOF(x) != CHARSXP || TYPEOF(ATTRIB(x)) != CHARSXP))
680 #else
681 # define HAS_GENUINE_ATTRIB(x) \
682     (ATTRIB(x) != R_NilValue && \
683      (TYPEOF(x) != CHARSXP || TYPEOF(ATTRIB(x)) != CHARSXP))
684 #endif
685 
686 #ifdef PROTECTCHECK
687 #define FREE_FORWARD_CASE case FREESXP: if (gc_inhibit_release) break;
688 #else
689 #define FREE_FORWARD_CASE
690 #endif
691 /*** assume for now all ALTREP nodes are based on CONS nodes */
692 #define DO_CHILDREN(__n__,dc__action__,dc__extra__) do { \
693   if (HAS_GENUINE_ATTRIB(__n__)) \
694     dc__action__(ATTRIB(__n__), dc__extra__); \
695   if (ALTREP(__n__)) {					\
696 	  dc__action__(TAG(__n__), dc__extra__);	\
697 	  dc__action__(CAR(__n__), dc__extra__);	\
698 	  dc__action__(CDR(__n__), dc__extra__);	\
699       }							\
700   else \
701   switch (TYPEOF(__n__)) { \
702   case NILSXP: \
703   case BUILTINSXP: \
704   case SPECIALSXP: \
705   case CHARSXP: \
706   case LGLSXP: \
707   case INTSXP: \
708   case REALSXP: \
709   case CPLXSXP: \
710   case WEAKREFSXP: \
711   case RAWSXP: \
712   case S4SXP: \
713     break; \
714   case STRSXP: \
715   case EXPRSXP: \
716   case VECSXP: \
717     { \
718       R_xlen_t i; \
719       for (i = 0; i < XLENGTH(__n__); i++) \
720 	dc__action__(VECTOR_ELT(__n__, i), dc__extra__); \
721     } \
722     break; \
723   case ENVSXP: \
724     dc__action__(FRAME(__n__), dc__extra__); \
725     dc__action__(ENCLOS(__n__), dc__extra__); \
726     dc__action__(HASHTAB(__n__), dc__extra__); \
727     break; \
728   case LISTSXP: \
729     dc__action__(TAG(__n__), dc__extra__); \
730     if (BOXED_BINDING_CELLS || BNDCELL_TAG(__n__) == 0) \
731       dc__action__(CAR0(__n__), dc__extra__); \
732     dc__action__(CDR(__n__), dc__extra__); \
733     break; \
734   case CLOSXP: \
735   case PROMSXP: \
736   case LANGSXP: \
737   case DOTSXP: \
738   case SYMSXP: \
739   case BCODESXP: \
740     dc__action__(TAG(__n__), dc__extra__); \
741     dc__action__(CAR0(__n__), dc__extra__); \
742     dc__action__(CDR(__n__), dc__extra__); \
743     break; \
744   case EXTPTRSXP: \
745     dc__action__(EXTPTR_PROT(__n__), dc__extra__); \
746     dc__action__(EXTPTR_TAG(__n__), dc__extra__); \
747     break; \
748   FREE_FORWARD_CASE \
749   default: \
750     register_bad_sexp_type(__n__, __LINE__);		\
751   } \
752 } while(0)
753 
754 
755 /* Forwarding Nodes.  These macros mark nodes or children of nodes and
756    place them on the forwarding list.  The forwarding list is assumed
757    to be in a local variable of the caller named named
758    forwarded_nodes. */
759 
760 #define FORWARD_NODE(s) do { \
761   SEXP fn__n__ = (s); \
762   if (fn__n__ && ! NODE_IS_MARKED(fn__n__)) { \
763     CHECK_FOR_FREE_NODE(fn__n__) \
764     MARK_NODE(fn__n__); \
765     UNSNAP_NODE(fn__n__); \
766     SET_NEXT_NODE(fn__n__, forwarded_nodes); \
767     forwarded_nodes = fn__n__; \
768   } \
769 } while (0)
770 
771 #define FC_FORWARD_NODE(__n__,__dummy__) FORWARD_NODE(__n__)
772 #define FORWARD_CHILDREN(__n__) DO_CHILDREN(__n__,FC_FORWARD_NODE, 0)
773 
774 /* This macro should help localize where a FREESXP node is encountered
775    in the GC */
776 #ifdef PROTECTCHECK
777 #define CHECK_FOR_FREE_NODE(s) { \
778     SEXP cf__n__ = (s); \
779     if (TYPEOF(cf__n__) == FREESXP && ! gc_inhibit_release) \
780 	register_bad_sexp_type(cf__n__, __LINE__); \
781 }
782 #else
783 #define CHECK_FOR_FREE_NODE(s)
784 #endif
785 
786 
787 /* Node Allocation. */
788 
789 #define CLASS_GET_FREE_NODE(c,s) do { \
790   SEXP __n__ = R_GenHeap[c].Free; \
791   if (__n__ == R_GenHeap[c].New) { \
792     GetNewPage(c); \
793     __n__ = R_GenHeap[c].Free; \
794   } \
795   R_GenHeap[c].Free = NEXT_NODE(__n__); \
796   R_NodesInUse++; \
797   (s) = __n__; \
798 } while (0)
799 
800 #define NO_FREE_NODES() (R_NodesInUse >= R_NSize)
801 #define GET_FREE_NODE(s) CLASS_GET_FREE_NODE(0,s)
802 
803 /* versions that assume nodes are avaialble without adding a new page */
804 #define CLASS_QUICK_GET_FREE_NODE(c,s) do {		\
805 	SEXP __n__ = R_GenHeap[c].Free;			\
806 	if (__n__ == R_GenHeap[c].New)			\
807 	    error("need new page - should not happen");	\
808 	R_GenHeap[c].Free = NEXT_NODE(__n__);		\
809 	R_NodesInUse++;					\
810 	(s) = __n__;					\
811     } while (0)
812 
813 #define QUICK_GET_FREE_NODE(s) CLASS_QUICK_GET_FREE_NODE(0,s)
814 
815 /* QUICK versions can be used if (CLASS_)NEED_NEW_PAGE returns FALSE */
816 #define CLASS_NEED_NEW_PAGE(c) (R_GenHeap[c].Free == R_GenHeap[c].New)
817 #define NEED_NEW_PAGE() CLASS_NEED_NEW_PAGE(0)
818 
819 
820 /* Debugging Routines. */
821 
822 #ifdef DEBUG_GC
CheckNodeGeneration(SEXP x,int g)823 static void CheckNodeGeneration(SEXP x, int g)
824 {
825     if (x && NODE_GENERATION(x) < g) {
826 	gc_error("untraced old-to-new reference\n");
827     }
828 }
829 
DEBUG_CHECK_NODE_COUNTS(char * where)830 static void DEBUG_CHECK_NODE_COUNTS(char *where)
831 {
832     int i, OldCount, NewCount, OldToNewCount, gen;
833     SEXP s;
834 
835     REprintf("Node counts %s:\n", where);
836     for (i = 0; i < NUM_NODE_CLASSES; i++) {
837 	for (s = NEXT_NODE(R_GenHeap[i].New), NewCount = 0;
838 	     s != R_GenHeap[i].New;
839 	     s = NEXT_NODE(s)) {
840 	    NewCount++;
841 	    if (i != NODE_CLASS(s))
842 		gc_error("Inconsistent class assignment for node!\n");
843 	}
844 	for (gen = 0, OldCount = 0, OldToNewCount = 0;
845 	     gen < NUM_OLD_GENERATIONS;
846 	     gen++) {
847 	    for (s = NEXT_NODE(R_GenHeap[i].Old[gen]);
848 		 s != R_GenHeap[i].Old[gen];
849 		 s = NEXT_NODE(s)) {
850 		OldCount++;
851 		if (i != NODE_CLASS(s))
852 		    gc_error("Inconsistent class assignment for node!\n");
853 		if (gen != NODE_GENERATION(s))
854 		    gc_error("Inconsistent node generation\n");
855 		DO_CHILDREN(s, CheckNodeGeneration, gen);
856 	    }
857 	    for (s = NEXT_NODE(R_GenHeap[i].OldToNew[gen]);
858 		 s != R_GenHeap[i].OldToNew[gen];
859 		 s = NEXT_NODE(s)) {
860 		OldToNewCount++;
861 		if (i != NODE_CLASS(s))
862 		    gc_error("Inconsistent class assignment for node!\n");
863 		if (gen != NODE_GENERATION(s))
864 		    gc_error("Inconsistent node generation\n");
865 	    }
866 	}
867 	REprintf("Class: %d, New = %d, Old = %d, OldToNew = %d, Total = %d\n",
868 		 i,
869 		 NewCount, OldCount, OldToNewCount,
870 		 NewCount + OldCount + OldToNewCount);
871     }
872 }
873 
DEBUG_GC_SUMMARY(int full_gc)874 static void DEBUG_GC_SUMMARY(int full_gc)
875 {
876     int i, gen, OldCount;
877     REprintf("\n%s, VSize = %lu", full_gc ? "Full" : "Minor",
878 	     R_SmallVallocSize + R_LargeVallocSize);
879     for (i = 1; i < NUM_NODE_CLASSES; i++) {
880 	for (gen = 0, OldCount = 0; gen < NUM_OLD_GENERATIONS; gen++)
881 	    OldCount += R_GenHeap[i].OldCount[gen];
882 	REprintf(", class %d: %d", i, OldCount);
883     }
884 }
885 #else
886 #define DEBUG_CHECK_NODE_COUNTS(s)
887 #define DEBUG_GC_SUMMARY(x)
888 #endif /* DEBUG_GC */
889 
890 #ifdef DEBUG_ADJUST_HEAP
DEBUG_ADJUST_HEAP_PRINT(double node_occup,double vect_occup)891 static void DEBUG_ADJUST_HEAP_PRINT(double node_occup, double vect_occup)
892 {
893     int i;
894     R_size_t alloc;
895     REprintf("Node occupancy: %.0f%%\nVector occupancy: %.0f%%\n",
896 	     100.0 * node_occup, 100.0 * vect_occup);
897     alloc = R_LargeVallocSize +
898 	sizeof(SEXPREC_ALIGN) * R_GenHeap[LARGE_NODE_CLASS].AllocCount;
899     for (i = 0; i < NUM_SMALL_NODE_CLASSES; i++)
900 	alloc += R_PAGE_SIZE * R_GenHeap[i].PageCount;
901     REprintf("Total allocation: %lu\n", alloc);
902     REprintf("Ncells %lu\nVcells %lu\n", R_NSize, R_VSize);
903 }
904 #else
905 #define DEBUG_ADJUST_HEAP_PRINT(node_occup, vect_occup)
906 #endif /* DEBUG_ADJUST_HEAP */
907 
908 #ifdef DEBUG_RELEASE_MEM
DEBUG_RELEASE_PRINT(int rel_pages,int maxrel_pages,int i)909 static void DEBUG_RELEASE_PRINT(int rel_pages, int maxrel_pages, int i)
910 {
911     if (maxrel_pages > 0) {
912 	int gen, n;
913 	REprintf("Class: %d, pages = %d, maxrel = %d, released = %d\n", i,
914 		 R_GenHeap[i].PageCount, maxrel_pages, rel_pages);
915 	for (gen = 0, n = 0; gen < NUM_OLD_GENERATIONS; gen++)
916 	    n += R_GenHeap[i].OldCount[gen];
917 	REprintf("Allocated = %d, in use = %d\n", R_GenHeap[i].AllocCount, n);
918     }
919 }
920 #else
921 #define DEBUG_RELEASE_PRINT(rel_pages, maxrel_pages, i)
922 #endif /* DEBUG_RELEASE_MEM */
923 
924 #ifdef COMPUTE_REFCNT_VALUES
925 #define INIT_REFCNT(x) do {			\
926 	SEXP __x__ = (x);			\
927 	SET_REFCNT(__x__, 0);			\
928 	SET_TRACKREFS(__x__, TRUE);		\
929     } while (0)
930 #else
931 #define INIT_REFCNT(x) do {} while (0)
932 #endif
933 
934 /* Page Allocation and Release. */
935 
GetNewPage(int node_class)936 static void GetNewPage(int node_class)
937 {
938     SEXP s, base;
939     char *data;
940     PAGE_HEADER *page;
941     int node_size, page_count, i;  // FIXME: longer type?
942 
943     node_size = NODE_SIZE(node_class);
944     page_count = (R_PAGE_SIZE - sizeof(PAGE_HEADER)) / node_size;
945 
946     page = malloc(R_PAGE_SIZE);
947     if (page == NULL) {
948 	R_gc_no_finalizers(0);
949 	page = malloc(R_PAGE_SIZE);
950 	if (page == NULL)
951 	    mem_err_malloc((R_size_t) R_PAGE_SIZE);
952     }
953 #ifdef R_MEMORY_PROFILING
954     R_ReportNewPage();
955 #endif
956     page->next = R_GenHeap[node_class].pages;
957     R_GenHeap[node_class].pages = page;
958     R_GenHeap[node_class].PageCount++;
959 
960     data = PAGE_DATA(page);
961     base = R_GenHeap[node_class].New;
962     for (i = 0; i < page_count; i++, data += node_size) {
963 	s = (SEXP) data;
964 	R_GenHeap[node_class].AllocCount++;
965 	SNAP_NODE(s, base);
966 #if  VALGRIND_LEVEL > 1
967 	if (NodeClassSize[node_class] > 0)
968 	    VALGRIND_MAKE_MEM_NOACCESS(STDVEC_DATAPTR(s), NodeClassSize[node_class]*sizeof(VECREC));
969 #endif
970 	s->sxpinfo = UnmarkedNodeTemplate.sxpinfo;
971 	INIT_REFCNT(s);
972 	SET_NODE_CLASS(s, node_class);
973 #ifdef PROTECTCHECK
974 	SET_TYPEOF(s, NEWSXP);
975 #endif
976 	base = s;
977 	R_GenHeap[node_class].Free = s;
978     }
979 }
980 
ReleasePage(PAGE_HEADER * page,int node_class)981 static void ReleasePage(PAGE_HEADER *page, int node_class)
982 {
983     SEXP s;
984     char *data;
985     int node_size, page_count, i;
986 
987     node_size = NODE_SIZE(node_class);
988     page_count = (R_PAGE_SIZE - sizeof(PAGE_HEADER)) / node_size;
989     data = PAGE_DATA(page);
990 
991     for (i = 0; i < page_count; i++, data += node_size) {
992 	s = (SEXP) data;
993 	UNSNAP_NODE(s);
994 	R_GenHeap[node_class].AllocCount--;
995     }
996     R_GenHeap[node_class].PageCount--;
997     free(page);
998 }
999 
TryToReleasePages(void)1000 static void TryToReleasePages(void)
1001 {
1002     SEXP s;
1003     int i;
1004     static int release_count = 0;
1005 
1006     if (release_count == 0) {
1007 	release_count = R_PageReleaseFreq;
1008 	for (i = 0; i < NUM_SMALL_NODE_CLASSES; i++) {
1009 	    int pages_free = 0;
1010 	    PAGE_HEADER *page, *last, *next;
1011 	    int node_size = NODE_SIZE(i);
1012 	    int page_count = (R_PAGE_SIZE - sizeof(PAGE_HEADER)) / node_size;
1013 	    int maxrel, maxrel_pages, rel_pages, gen;
1014 
1015 	    maxrel = R_GenHeap[i].AllocCount;
1016 	    for (gen = 0; gen < NUM_OLD_GENERATIONS; gen++)
1017 		maxrel -= (int)((1.0 + R_MaxKeepFrac) *
1018 				R_GenHeap[i].OldCount[gen]);
1019 	    maxrel_pages = maxrel > 0 ? maxrel / page_count : 0;
1020 
1021 	    /* all nodes in New space should be both free and unmarked */
1022 	    for (page = R_GenHeap[i].pages, rel_pages = 0, last = NULL;
1023 		 rel_pages < maxrel_pages && page != NULL;) {
1024 		int j, in_use;
1025 		char *data = PAGE_DATA(page);
1026 
1027 		next = page->next;
1028 		for (in_use = 0, j = 0; j < page_count;
1029 		     j++, data += node_size) {
1030 		    s = (SEXP) data;
1031 		    if (NODE_IS_MARKED(s)) {
1032 			in_use = 1;
1033 			break;
1034 		    }
1035 		}
1036 		if (! in_use) {
1037 		    ReleasePage(page, i);
1038 		    if (last == NULL)
1039 			R_GenHeap[i].pages = next;
1040 		    else
1041 			last->next = next;
1042 		    pages_free++;
1043 		    rel_pages++;
1044 		}
1045 		else last = page;
1046 		page = next;
1047 	    }
1048 	    DEBUG_RELEASE_PRINT(rel_pages, maxrel_pages, i);
1049 	    R_GenHeap[i].Free = NEXT_NODE(R_GenHeap[i].New);
1050 	}
1051     }
1052     else release_count--;
1053 }
1054 
1055 /* compute size in VEC units so result will fit in LENGTH field for FREESXPs */
getVecSizeInVEC(SEXP s)1056 static R_INLINE R_size_t getVecSizeInVEC(SEXP s)
1057 {
1058     if (IS_GROWABLE(s))
1059 	SET_STDVEC_LENGTH(s, XTRUELENGTH(s));
1060 
1061     R_size_t size;
1062     switch (TYPEOF(s)) {	/* get size in bytes */
1063     case CHARSXP:
1064 	size = XLENGTH(s) + 1;
1065 	break;
1066     case RAWSXP:
1067 	size = XLENGTH(s);
1068 	break;
1069     case LGLSXP:
1070     case INTSXP:
1071 	size = XLENGTH(s) * sizeof(int);
1072 	break;
1073     case REALSXP:
1074 	size = XLENGTH(s) * sizeof(double);
1075 	break;
1076     case CPLXSXP:
1077 	size = XLENGTH(s) * sizeof(Rcomplex);
1078 	break;
1079     case STRSXP:
1080     case EXPRSXP:
1081     case VECSXP:
1082 	size = XLENGTH(s) * sizeof(SEXP);
1083 	break;
1084     default:
1085 	register_bad_sexp_type(s, __LINE__);
1086 	size = 0;
1087     }
1088     return BYTE2VEC(size);
1089 }
1090 
1091 static void custom_node_free(void *ptr);
1092 
ReleaseLargeFreeVectors()1093 static void ReleaseLargeFreeVectors()
1094 {
1095     for (int node_class = CUSTOM_NODE_CLASS; node_class <= LARGE_NODE_CLASS; node_class++) {
1096 	SEXP s = NEXT_NODE(R_GenHeap[node_class].New);
1097 	while (s != R_GenHeap[node_class].New) {
1098 	    SEXP next = NEXT_NODE(s);
1099 	    if (CHAR(s) != NULL) {
1100 		R_size_t size;
1101 #ifdef PROTECTCHECK
1102 		if (TYPEOF(s) == FREESXP)
1103 		    size = STDVEC_LENGTH(s);
1104 		else
1105 		    /* should not get here -- arrange for a warning/error? */
1106 		    size = getVecSizeInVEC(s);
1107 #else
1108 		size = getVecSizeInVEC(s);
1109 #endif
1110 		UNSNAP_NODE(s);
1111 		R_GenHeap[node_class].AllocCount--;
1112 		if (node_class == LARGE_NODE_CLASS) {
1113 		    R_LargeVallocSize -= size;
1114 		    free(s);
1115 		} else {
1116 		    custom_node_free(s);
1117 		}
1118 	    }
1119 	    s = next;
1120 	}
1121     }
1122 }
1123 
1124 /* Heap Size Adjustment. */
1125 
AdjustHeapSize(R_size_t size_needed)1126 static void AdjustHeapSize(R_size_t size_needed)
1127 {
1128     R_size_t R_MinNFree = (R_size_t)(orig_R_NSize * R_MinFreeFrac);
1129     R_size_t R_MinVFree = (R_size_t)(orig_R_VSize * R_MinFreeFrac);
1130     R_size_t NNeeded = R_NodesInUse + R_MinNFree;
1131     R_size_t VNeeded = R_SmallVallocSize + R_LargeVallocSize
1132 	+ size_needed + R_MinVFree;
1133     double node_occup = ((double) NNeeded) / R_NSize;
1134     double vect_occup =	((double) VNeeded) / R_VSize;
1135 
1136     if (node_occup > R_NGrowFrac) {
1137 	R_size_t change =
1138 	    (R_size_t)(R_NGrowIncrMin + R_NGrowIncrFrac * R_NSize);
1139 
1140 	/* for early andjustments grow more agressively */
1141 	static R_size_t last_in_use = 0;
1142 	static int adjust_count = 1;
1143 	if (adjust_count < 50) {
1144 	    adjust_count++;
1145 
1146 	    /* estimate next in-use count by assuming linear growth */
1147 	    R_size_t next_in_use = R_NodesInUse + (R_NodesInUse - last_in_use);
1148 	    last_in_use = R_NodesInUse;
1149 
1150 	    /* try to achieve and occupancy rate of R_NGrowFrac */
1151 	    R_size_t next_nsize = (R_size_t) (next_in_use / R_NGrowFrac);
1152 	    if (next_nsize > R_NSize + change)
1153 		change = next_nsize - R_NSize;
1154 	}
1155 
1156 	if (R_MaxNSize >= R_NSize + change)
1157 	    R_NSize += change;
1158     }
1159     else if (node_occup < R_NShrinkFrac) {
1160 	R_NSize -= (R_size_t)(R_NShrinkIncrMin + R_NShrinkIncrFrac * R_NSize);
1161 	if (R_NSize < NNeeded)
1162 	    R_NSize = (NNeeded < R_MaxNSize) ? NNeeded: R_MaxNSize;
1163 	if (R_NSize < orig_R_NSize)
1164 	    R_NSize = orig_R_NSize;
1165     }
1166 
1167     if (vect_occup > 1.0 && VNeeded < R_MaxVSize)
1168 	R_VSize = VNeeded;
1169     if (vect_occup > R_VGrowFrac) {
1170 	R_size_t change = (R_size_t)(R_VGrowIncrMin + R_VGrowIncrFrac * R_VSize);
1171 	if (R_MaxVSize - R_VSize >= change)
1172 	    R_VSize += change;
1173     }
1174     else if (vect_occup < R_VShrinkFrac) {
1175 	R_VSize -= (R_size_t)(R_VShrinkIncrMin + R_VShrinkIncrFrac * R_VSize);
1176 	if (R_VSize < VNeeded)
1177 	    R_VSize = VNeeded;
1178 	if (R_VSize < orig_R_VSize)
1179 	    R_VSize = orig_R_VSize;
1180     }
1181 
1182     DEBUG_ADJUST_HEAP_PRINT(node_occup, vect_occup);
1183 }
1184 
1185 
1186 /* Managing Old-to-New References. */
1187 
1188 #define AGE_NODE(s,g) do { \
1189   SEXP an__n__ = (s); \
1190   int an__g__ = (g); \
1191   if (an__n__ && NODE_GEN_IS_YOUNGER(an__n__, an__g__)) { \
1192     if (NODE_IS_MARKED(an__n__)) \
1193        R_GenHeap[NODE_CLASS(an__n__)].OldCount[NODE_GENERATION(an__n__)]--; \
1194     else \
1195       MARK_NODE(an__n__); \
1196     SET_NODE_GENERATION(an__n__, an__g__); \
1197     UNSNAP_NODE(an__n__); \
1198     SET_NEXT_NODE(an__n__, forwarded_nodes); \
1199     forwarded_nodes = an__n__; \
1200   } \
1201 } while (0)
1202 
AgeNodeAndChildren(SEXP s,int gen)1203 static void AgeNodeAndChildren(SEXP s, int gen)
1204 {
1205     SEXP forwarded_nodes = NULL;
1206     AGE_NODE(s, gen);
1207     while (forwarded_nodes != NULL) {
1208 	s = forwarded_nodes;
1209 	forwarded_nodes = NEXT_NODE(forwarded_nodes);
1210 	if (NODE_GENERATION(s) != gen)
1211 	    gc_error("****snapping into wrong generation\n");
1212 	SNAP_NODE(s, R_GenHeap[NODE_CLASS(s)].Old[gen]);
1213 	R_GenHeap[NODE_CLASS(s)].OldCount[gen]++;
1214 	DO_CHILDREN(s, AGE_NODE, gen);
1215     }
1216 }
1217 
old_to_new(SEXP x,SEXP y)1218 static void old_to_new(SEXP x, SEXP y)
1219 {
1220 #ifdef EXPEL_OLD_TO_NEW
1221     AgeNodeAndChildren(y, NODE_GENERATION(x));
1222 #else
1223     UNSNAP_NODE(x);
1224     SNAP_NODE(x, R_GenHeap[NODE_CLASS(x)].OldToNew[NODE_GENERATION(x)]);
1225 #endif
1226 }
1227 
1228 #ifdef COMPUTE_REFCNT_VALUES
1229 #define FIX_REFCNT_EX(x, old, new, chkpnd) do {				\
1230 	SEXP __x__ = (x);						\
1231 	if (TRACKREFS(__x__)) {						\
1232 	    SEXP __old__ = (old);					\
1233 	    SEXP __new__ = (new);					\
1234 	    if (__old__ != __new__) {					\
1235 		if (__old__) {						\
1236 		    if ((chkpnd) && ASSIGNMENT_PENDING(__x__))		\
1237 			SET_ASSIGNMENT_PENDING(__x__, FALSE);		\
1238 		    else						\
1239 			DECREMENT_REFCNT(__old__);			\
1240 		}							\
1241 		if (__new__) INCREMENT_REFCNT(__new__);			\
1242 	    }								\
1243 	}								\
1244     } while (0)
1245 #define FIX_REFCNT(x, old, new) FIX_REFCNT_EX(x, old, new, FALSE)
1246 #define FIX_BINDING_REFCNT(x, old, new)		\
1247     FIX_REFCNT_EX(x, old, new, TRUE)
1248 #else
1249 #define FIX_REFCNT(x, old, new) do {} while (0)
1250 #define FIX_BINDING_REFCNT(x, old, new) do {\
1251 	SEXP __x__ = (x);						\
1252 	SEXP __old__ = (old);						\
1253 	SEXP __new__ = (new);						\
1254 	if (ASSIGNMENT_PENDING(__x__) && __old__ &&			\
1255 	    __old__ != __new__)						\
1256 	    SET_ASSIGNMENT_PENDING(__x__, FALSE);			\
1257     } while (0)
1258 #endif
1259 
1260 #define CHECK_OLD_TO_NEW(x,y) do { \
1261   if (NODE_IS_OLDER(CHK(x), CHK(y))) old_to_new(x,y);  } while (0)
1262 
1263 
1264 /* Node Sorting.  SortNodes attempts to improve locality of reference
1265    by rearranging the free list to place nodes on the same place page
1266    together and order nodes within pages.  This involves a sweep of the
1267    heap, so it should not be done too often, but doing it at least
1268    occasionally does seem essential.  Sorting on each full colllection is
1269    probably sufficient.
1270 */
1271 
1272 #define SORT_NODES
1273 #ifdef SORT_NODES
SortNodes(void)1274 static void SortNodes(void)
1275 {
1276     SEXP s;
1277     int i;
1278 
1279     for (i = 0; i < NUM_SMALL_NODE_CLASSES; i++) {
1280 	PAGE_HEADER *page;
1281 	int node_size = NODE_SIZE(i);
1282 	int page_count = (R_PAGE_SIZE - sizeof(PAGE_HEADER)) / node_size;
1283 
1284 	SET_NEXT_NODE(R_GenHeap[i].New, R_GenHeap[i].New);
1285 	SET_PREV_NODE(R_GenHeap[i].New, R_GenHeap[i].New);
1286 	for (page = R_GenHeap[i].pages; page != NULL; page = page->next) {
1287 	    int j;
1288 	    char *data = PAGE_DATA(page);
1289 
1290 	    for (j = 0; j < page_count; j++, data += node_size) {
1291 		s = (SEXP) data;
1292 		if (! NODE_IS_MARKED(s))
1293 		    SNAP_NODE(s, R_GenHeap[i].New);
1294 	    }
1295 	}
1296 	R_GenHeap[i].Free = NEXT_NODE(R_GenHeap[i].New);
1297     }
1298 }
1299 #endif
1300 
1301 
1302 /* Finalization and Weak References */
1303 
1304 /* The design of this mechanism is very close to the one described in
1305    "Stretching the storage manager: weak pointers and stable names in
1306    Haskell" by Peyton Jones, Marlow, and Elliott (at
1307    www.research.microsoft.com/Users/simonpj/papers/weak.ps.gz). --LT */
1308 
1309 static SEXP R_weak_refs = NULL;
1310 
1311 #define READY_TO_FINALIZE_MASK 1
1312 
1313 #define SET_READY_TO_FINALIZE(s) ((s)->sxpinfo.gp |= READY_TO_FINALIZE_MASK)
1314 #define CLEAR_READY_TO_FINALIZE(s) ((s)->sxpinfo.gp &= ~READY_TO_FINALIZE_MASK)
1315 #define IS_READY_TO_FINALIZE(s) ((s)->sxpinfo.gp & READY_TO_FINALIZE_MASK)
1316 
1317 #define FINALIZE_ON_EXIT_MASK 2
1318 
1319 #define SET_FINALIZE_ON_EXIT(s) ((s)->sxpinfo.gp |= FINALIZE_ON_EXIT_MASK)
1320 #define CLEAR_FINALIZE_ON_EXIT(s) ((s)->sxpinfo.gp &= ~FINALIZE_ON_EXIT_MASK)
1321 #define FINALIZE_ON_EXIT(s) ((s)->sxpinfo.gp & FINALIZE_ON_EXIT_MASK)
1322 
1323 #define WEAKREF_SIZE 4
1324 #define WEAKREF_KEY(w) VECTOR_ELT(w, 0)
1325 #define SET_WEAKREF_KEY(w, k) SET_VECTOR_ELT(w, 0, k)
1326 #define WEAKREF_VALUE(w) VECTOR_ELT(w, 1)
1327 #define SET_WEAKREF_VALUE(w, v) SET_VECTOR_ELT(w, 1, v)
1328 #define WEAKREF_FINALIZER(w) VECTOR_ELT(w, 2)
1329 #define SET_WEAKREF_FINALIZER(w, f) SET_VECTOR_ELT(w, 2, f)
1330 #define WEAKREF_NEXT(w) VECTOR_ELT(w, 3)
1331 #define SET_WEAKREF_NEXT(w, n) SET_VECTOR_ELT(w, 3, n)
1332 
1333 static SEXP MakeCFinalizer(R_CFinalizer_t cfun);
1334 
NewWeakRef(SEXP key,SEXP val,SEXP fin,Rboolean onexit)1335 static SEXP NewWeakRef(SEXP key, SEXP val, SEXP fin, Rboolean onexit)
1336 {
1337     SEXP w;
1338 
1339     switch (TYPEOF(key)) {
1340     case NILSXP:
1341     case ENVSXP:
1342     case EXTPTRSXP:
1343     case BCODESXP:
1344 	break;
1345     default: error(_("can only weakly reference/finalize reference objects"));
1346     }
1347 
1348     PROTECT(key);
1349     PROTECT(val = MAYBE_REFERENCED(val) ? duplicate(val) : val);
1350     PROTECT(fin);
1351     w = allocVector(VECSXP, WEAKREF_SIZE);
1352     SET_TYPEOF(w, WEAKREFSXP);
1353     if (key != R_NilValue) {
1354 	/* If the key is R_NilValue we don't register the weak reference.
1355 	   This is used in loading saved images. */
1356 	SET_WEAKREF_KEY(w, key);
1357 	SET_WEAKREF_VALUE(w, val);
1358 	SET_WEAKREF_FINALIZER(w, fin);
1359 	SET_WEAKREF_NEXT(w, R_weak_refs);
1360 	CLEAR_READY_TO_FINALIZE(w);
1361 	if (onexit)
1362 	    SET_FINALIZE_ON_EXIT(w);
1363 	else
1364 	    CLEAR_FINALIZE_ON_EXIT(w);
1365 	R_weak_refs = w;
1366     }
1367     UNPROTECT(3);
1368     return w;
1369 }
1370 
R_MakeWeakRef(SEXP key,SEXP val,SEXP fin,Rboolean onexit)1371 SEXP R_MakeWeakRef(SEXP key, SEXP val, SEXP fin, Rboolean onexit)
1372 {
1373     switch (TYPEOF(fin)) {
1374     case NILSXP:
1375     case CLOSXP:
1376     case BUILTINSXP:
1377     case SPECIALSXP:
1378 	break;
1379     default: error(_("finalizer must be a function or NULL"));
1380     }
1381     return NewWeakRef(key, val, fin, onexit);
1382 }
1383 
R_MakeWeakRefC(SEXP key,SEXP val,R_CFinalizer_t fin,Rboolean onexit)1384 SEXP R_MakeWeakRefC(SEXP key, SEXP val, R_CFinalizer_t fin, Rboolean onexit)
1385 {
1386     SEXP w;
1387     PROTECT(key);
1388     PROTECT(val);
1389     w = NewWeakRef(key, val, MakeCFinalizer(fin), onexit);
1390     UNPROTECT(2);
1391     return w;
1392 }
1393 
1394 static Rboolean R_finalizers_pending = FALSE;
CheckFinalizers(void)1395 static void CheckFinalizers(void)
1396 {
1397     SEXP s;
1398     R_finalizers_pending = FALSE;
1399     for (s = R_weak_refs; s != R_NilValue; s = WEAKREF_NEXT(s)) {
1400 	if (! NODE_IS_MARKED(WEAKREF_KEY(s)) && ! IS_READY_TO_FINALIZE(s))
1401 	    SET_READY_TO_FINALIZE(s);
1402 	if (IS_READY_TO_FINALIZE(s))
1403 	    R_finalizers_pending = TRUE;
1404     }
1405 }
1406 
1407 /* C finalizers are stored in a CHARSXP.  It would be nice if we could
1408    use EXTPTRSXP's but these only hold a void *, and function pointers
1409    are not guaranteed to be compatible with a void *.  There should be
1410    a cleaner way of doing this, but this will do for now. --LT */
1411 /* Changed to RAWSXP in 2.8.0 */
isCFinalizer(SEXP fun)1412 static Rboolean isCFinalizer(SEXP fun)
1413 {
1414     return TYPEOF(fun) == RAWSXP;
1415     /*return TYPEOF(fun) == EXTPTRSXP;*/
1416 }
1417 
MakeCFinalizer(R_CFinalizer_t cfun)1418 static SEXP MakeCFinalizer(R_CFinalizer_t cfun)
1419 {
1420     SEXP s = allocVector(RAWSXP, sizeof(R_CFinalizer_t));
1421     *((R_CFinalizer_t *) RAW(s)) = cfun;
1422     return s;
1423     /*return R_MakeExternalPtr((void *) cfun, R_NilValue, R_NilValue);*/
1424 }
1425 
GetCFinalizer(SEXP fun)1426 static R_CFinalizer_t GetCFinalizer(SEXP fun)
1427 {
1428     return *((R_CFinalizer_t *) RAW(fun));
1429     /*return (R_CFinalizer_t) R_ExternalPtrAddr(fun);*/
1430 }
1431 
R_WeakRefKey(SEXP w)1432 SEXP R_WeakRefKey(SEXP w)
1433 {
1434     if (TYPEOF(w) != WEAKREFSXP)
1435 	error(_("not a weak reference"));
1436     return WEAKREF_KEY(w);
1437 }
1438 
R_WeakRefValue(SEXP w)1439 SEXP R_WeakRefValue(SEXP w)
1440 {
1441     SEXP v;
1442     if (TYPEOF(w) != WEAKREFSXP)
1443 	error(_("not a weak reference"));
1444     v = WEAKREF_VALUE(w);
1445     if (v != R_NilValue)
1446 	ENSURE_NAMEDMAX(v);
1447     return v;
1448 }
1449 
R_RunWeakRefFinalizer(SEXP w)1450 void R_RunWeakRefFinalizer(SEXP w)
1451 {
1452     SEXP key, fun, e;
1453     if (TYPEOF(w) != WEAKREFSXP)
1454 	error(_("not a weak reference"));
1455     key = WEAKREF_KEY(w);
1456     fun = WEAKREF_FINALIZER(w);
1457     SET_WEAKREF_KEY(w, R_NilValue);
1458     SET_WEAKREF_VALUE(w, R_NilValue);
1459     SET_WEAKREF_FINALIZER(w, R_NilValue);
1460     if (! IS_READY_TO_FINALIZE(w))
1461 	SET_READY_TO_FINALIZE(w); /* insures removal from list on next gc */
1462     PROTECT(key);
1463     PROTECT(fun);
1464     int oldintrsusp = R_interrupts_suspended;
1465     R_interrupts_suspended = TRUE;
1466     if (isCFinalizer(fun)) {
1467 	/* Must be a C finalizer. */
1468 	R_CFinalizer_t cfun = GetCFinalizer(fun);
1469 	cfun(key);
1470     }
1471     else if (fun != R_NilValue) {
1472 	/* An R finalizer. */
1473 	PROTECT(e = LCONS(fun, LCONS(key, R_NilValue)));
1474 	eval(e, R_GlobalEnv);
1475 	UNPROTECT(1);
1476     }
1477     R_interrupts_suspended = oldintrsusp;
1478     UNPROTECT(2);
1479 }
1480 
RunFinalizers(void)1481 static Rboolean RunFinalizers(void)
1482 {
1483     R_CHECK_THREAD;
1484     /* Prevent this function from running again when already in
1485        progress. Jumps can only occur inside the top level context
1486        where they will be caught, so the flag is guaranteed to be
1487        reset at the end. */
1488     static Rboolean running = FALSE;
1489     if (running) return FALSE;
1490     running = TRUE;
1491 
1492     volatile SEXP s, last;
1493     volatile Rboolean finalizer_run = FALSE;
1494 
1495     for (s = R_weak_refs, last = R_NilValue; s != R_NilValue;) {
1496 	SEXP next = WEAKREF_NEXT(s);
1497 	if (IS_READY_TO_FINALIZE(s)) {
1498 	    /**** use R_ToplevelExec here? */
1499 	    RCNTXT thiscontext;
1500 	    RCNTXT * volatile saveToplevelContext;
1501 	    volatile int savestack;
1502 	    volatile SEXP topExp, oldHStack, oldRStack, oldRVal;
1503 	    volatile Rboolean oldvis;
1504 	    PROTECT(oldHStack = R_HandlerStack);
1505 	    PROTECT(oldRStack = R_RestartStack);
1506 	    PROTECT(oldRVal = R_ReturnedValue);
1507 	    oldvis = R_Visible;
1508 	    R_HandlerStack = R_NilValue;
1509 	    R_RestartStack = R_NilValue;
1510 
1511 	    finalizer_run = TRUE;
1512 
1513 	    /* A top level context is established for the finalizer to
1514 	       insure that any errors that might occur do not spill
1515 	       into the call that triggered the collection. */
1516 	    begincontext(&thiscontext, CTXT_TOPLEVEL, R_NilValue, R_GlobalEnv,
1517 			 R_BaseEnv, R_NilValue, R_NilValue);
1518 	    saveToplevelContext = R_ToplevelContext;
1519 	    PROTECT(topExp = R_CurrentExpr);
1520 	    savestack = R_PPStackTop;
1521 	    /* The value of 'next' is protected to make it safe
1522 	       for this routine to be called recursively from a
1523 	       gc triggered by a finalizer. */
1524 	    PROTECT(next);
1525 	    if (! SETJMP(thiscontext.cjmpbuf)) {
1526 		R_GlobalContext = R_ToplevelContext = &thiscontext;
1527 
1528 		/* The entry in the weak reference list is removed
1529 		   before running the finalizer.  This insures that a
1530 		   finalizer is run only once, even if running it
1531 		   raises an error. */
1532 		if (last == R_NilValue)
1533 		    R_weak_refs = next;
1534 		else
1535 		    SET_WEAKREF_NEXT(last, next);
1536 		R_RunWeakRefFinalizer(s);
1537 	    }
1538 	    endcontext(&thiscontext);
1539 	    UNPROTECT(1); /* next */
1540 	    R_ToplevelContext = saveToplevelContext;
1541 	    R_PPStackTop = savestack;
1542 	    R_CurrentExpr = topExp;
1543 	    R_HandlerStack = oldHStack;
1544 	    R_RestartStack = oldRStack;
1545 	    R_ReturnedValue = oldRVal;
1546 	    R_Visible = oldvis;
1547 	    UNPROTECT(4);/* topExp, oldRVal, oldRStack, oldHStack */
1548 	}
1549 	else last = s;
1550 	s = next;
1551     }
1552     running = FALSE;
1553     R_finalizers_pending = FALSE;
1554     return finalizer_run;
1555 }
1556 
R_RunExitFinalizers(void)1557 void R_RunExitFinalizers(void)
1558 {
1559     SEXP s;
1560 
1561     R_checkConstants(TRUE);
1562 
1563     for (s = R_weak_refs; s != R_NilValue; s = WEAKREF_NEXT(s))
1564 	if (FINALIZE_ON_EXIT(s))
1565 	    SET_READY_TO_FINALIZE(s);
1566     RunFinalizers();
1567 }
1568 
R_RunPendingFinalizers(void)1569 void R_RunPendingFinalizers(void)
1570 {
1571     if (R_finalizers_pending)
1572 	RunFinalizers();
1573 }
1574 
R_RegisterFinalizerEx(SEXP s,SEXP fun,Rboolean onexit)1575 void R_RegisterFinalizerEx(SEXP s, SEXP fun, Rboolean onexit)
1576 {
1577     R_MakeWeakRef(s, R_NilValue, fun, onexit);
1578 }
1579 
R_RegisterFinalizer(SEXP s,SEXP fun)1580 void R_RegisterFinalizer(SEXP s, SEXP fun)
1581 {
1582     R_RegisterFinalizerEx(s, fun, FALSE);
1583 }
1584 
R_RegisterCFinalizerEx(SEXP s,R_CFinalizer_t fun,Rboolean onexit)1585 void R_RegisterCFinalizerEx(SEXP s, R_CFinalizer_t fun, Rboolean onexit)
1586 {
1587     R_MakeWeakRefC(s, R_NilValue, fun, onexit);
1588 }
1589 
R_RegisterCFinalizer(SEXP s,R_CFinalizer_t fun)1590 void R_RegisterCFinalizer(SEXP s, R_CFinalizer_t fun)
1591 {
1592     R_RegisterCFinalizerEx(s, fun, FALSE);
1593 }
1594 
1595 /* R interface function */
1596 
do_regFinaliz(SEXP call,SEXP op,SEXP args,SEXP rho)1597 SEXP attribute_hidden do_regFinaliz(SEXP call, SEXP op, SEXP args, SEXP rho)
1598 {
1599     int onexit;
1600 
1601     checkArity(op, args);
1602 
1603     if (TYPEOF(CAR(args)) != ENVSXP && TYPEOF(CAR(args)) != EXTPTRSXP)
1604 	error(_("first argument must be environment or external pointer"));
1605     if (TYPEOF(CADR(args)) != CLOSXP)
1606 	error(_("second argument must be a function"));
1607 
1608     onexit = asLogical(CADDR(args));
1609     if(onexit == NA_LOGICAL)
1610 	error(_("third argument must be 'TRUE' or 'FALSE'"));
1611 
1612     R_RegisterFinalizerEx(CAR(args), CADR(args), onexit);
1613     return R_NilValue;
1614 }
1615 
1616 
1617 /* The Generational Collector. */
1618 
1619 #define PROCESS_NODES() do { \
1620     while (forwarded_nodes != NULL) { \
1621 	s = forwarded_nodes; \
1622 	forwarded_nodes = NEXT_NODE(forwarded_nodes); \
1623 	SNAP_NODE(s, R_GenHeap[NODE_CLASS(s)].Old[NODE_GENERATION(s)]); \
1624 	R_GenHeap[NODE_CLASS(s)].OldCount[NODE_GENERATION(s)]++; \
1625 	FORWARD_CHILDREN(s); \
1626     } \
1627 } while (0)
1628 
RunGenCollect(R_size_t size_needed)1629 static int RunGenCollect(R_size_t size_needed)
1630 {
1631     int i, gen, gens_collected;
1632     RCNTXT *ctxt;
1633     SEXP s;
1634     SEXP forwarded_nodes;
1635 
1636     bad_sexp_type_seen = 0;
1637 
1638     /* determine number of generations to collect */
1639     while (num_old_gens_to_collect < NUM_OLD_GENERATIONS) {
1640 	if (collect_counts[num_old_gens_to_collect]-- <= 0) {
1641 	    collect_counts[num_old_gens_to_collect] =
1642 		collect_counts_max[num_old_gens_to_collect];
1643 	    num_old_gens_to_collect++;
1644 	}
1645 	else break;
1646     }
1647 
1648 #ifdef PROTECTCHECK
1649     num_old_gens_to_collect = NUM_OLD_GENERATIONS;
1650 #endif
1651 
1652  again:
1653     gens_collected = num_old_gens_to_collect;
1654 
1655 #ifndef EXPEL_OLD_TO_NEW
1656     /* eliminate old-to-new references in generations to collect by
1657        transferring referenced nodes to referring generation */
1658     for (gen = 0; gen < num_old_gens_to_collect; gen++) {
1659 	for (i = 0; i < NUM_NODE_CLASSES; i++) {
1660 	    s = NEXT_NODE(R_GenHeap[i].OldToNew[gen]);
1661 	    while (s != R_GenHeap[i].OldToNew[gen]) {
1662 		SEXP next = NEXT_NODE(s);
1663 		DO_CHILDREN(s, AgeNodeAndChildren, gen);
1664 		UNSNAP_NODE(s);
1665 		if (NODE_GENERATION(s) != gen)
1666 		    gc_error("****snapping into wrong generation\n");
1667 		SNAP_NODE(s, R_GenHeap[i].Old[gen]);
1668 		s = next;
1669 	    }
1670 	}
1671     }
1672 #endif
1673 
1674     DEBUG_CHECK_NODE_COUNTS("at start");
1675 
1676     /* unmark all marked nodes in old generations to be collected and
1677        move to New space */
1678     for (gen = 0; gen < num_old_gens_to_collect; gen++) {
1679 	for (i = 0; i < NUM_NODE_CLASSES; i++) {
1680 	    R_GenHeap[i].OldCount[gen] = 0;
1681 	    s = NEXT_NODE(R_GenHeap[i].Old[gen]);
1682 	    while (s != R_GenHeap[i].Old[gen]) {
1683 		SEXP next = NEXT_NODE(s);
1684 		if (gen < NUM_OLD_GENERATIONS - 1)
1685 		    SET_NODE_GENERATION(s, gen + 1);
1686 		UNMARK_NODE(s);
1687 		s = next;
1688 	    }
1689 	    if (NEXT_NODE(R_GenHeap[i].Old[gen]) != R_GenHeap[i].Old[gen])
1690 		BULK_MOVE(R_GenHeap[i].Old[gen], R_GenHeap[i].New);
1691 	}
1692     }
1693 
1694     forwarded_nodes = NULL;
1695 
1696 #ifndef EXPEL_OLD_TO_NEW
1697     /* scan nodes in uncollected old generations with old-to-new pointers */
1698     for (gen = num_old_gens_to_collect; gen < NUM_OLD_GENERATIONS; gen++)
1699 	for (i = 0; i < NUM_NODE_CLASSES; i++)
1700 	    for (s = NEXT_NODE(R_GenHeap[i].OldToNew[gen]);
1701 		 s != R_GenHeap[i].OldToNew[gen];
1702 		 s = NEXT_NODE(s))
1703 		FORWARD_CHILDREN(s);
1704 #endif
1705 
1706     /* forward all roots */
1707     FORWARD_NODE(R_NilValue);	           /* Builtin constants */
1708     FORWARD_NODE(NA_STRING);
1709     FORWARD_NODE(R_BlankString);
1710     FORWARD_NODE(R_BlankScalarString);
1711     FORWARD_NODE(R_CurrentExpression);
1712     FORWARD_NODE(R_UnboundValue);
1713     FORWARD_NODE(R_RestartToken);
1714     FORWARD_NODE(R_MissingArg);
1715     FORWARD_NODE(R_InBCInterpreter);
1716 
1717     FORWARD_NODE(R_GlobalEnv);	           /* Global environment */
1718     FORWARD_NODE(R_BaseEnv);
1719     FORWARD_NODE(R_EmptyEnv);
1720     FORWARD_NODE(R_Warnings);	           /* Warnings, if any */
1721     FORWARD_NODE(R_ReturnedValue);
1722 
1723     FORWARD_NODE(R_HandlerStack);          /* Condition handler stack */
1724     FORWARD_NODE(R_RestartStack);          /* Available restarts stack */
1725 
1726     FORWARD_NODE(R_BCbody);                /* Current byte code object */
1727     FORWARD_NODE(R_Srcref);                /* Current source reference */
1728 
1729     FORWARD_NODE(R_TrueValue);
1730     FORWARD_NODE(R_FalseValue);
1731     FORWARD_NODE(R_LogicalNAValue);
1732 
1733     FORWARD_NODE(R_print.na_string);
1734     FORWARD_NODE(R_print.na_string_noquote);
1735 
1736     if (R_SymbolTable != NULL)             /* in case of GC during startup */
1737 	for (i = 0; i < HSIZE; i++) {      /* Symbol table */
1738 	    FORWARD_NODE(R_SymbolTable[i]);
1739 	    SEXP s;
1740 	    for (s = R_SymbolTable[i]; s != R_NilValue; s = CDR(s))
1741 		if (ATTRIB(CAR(s)) != R_NilValue)
1742 		    gc_error("****found a symbol with attributes\n");
1743 	}
1744 
1745     if (R_CurrentExpr != NULL)	           /* Current expression */
1746 	FORWARD_NODE(R_CurrentExpr);
1747 
1748     for (i = 0; i < R_MaxDevices; i++) {   /* Device display lists */
1749 	pGEDevDesc gdd = GEgetDevice(i);
1750 	if (gdd) {
1751 	    FORWARD_NODE(gdd->displayList);
1752 	    FORWARD_NODE(gdd->savedSnapshot);
1753 	    if (gdd->dev)
1754 		FORWARD_NODE(gdd->dev->eventEnv);
1755 	}
1756     }
1757 
1758     for (ctxt = R_GlobalContext ; ctxt != NULL ; ctxt = ctxt->nextcontext) {
1759 	FORWARD_NODE(ctxt->conexit);       /* on.exit expressions */
1760 	FORWARD_NODE(ctxt->promargs);	   /* promises supplied to closure */
1761 	FORWARD_NODE(ctxt->callfun);       /* the closure called */
1762 	FORWARD_NODE(ctxt->sysparent);     /* calling environment */
1763 	FORWARD_NODE(ctxt->call);          /* the call */
1764 	FORWARD_NODE(ctxt->cloenv);        /* the closure environment */
1765 	FORWARD_NODE(ctxt->bcbody);        /* the current byte code object */
1766 	FORWARD_NODE(ctxt->handlerstack);  /* the condition handler stack */
1767 	FORWARD_NODE(ctxt->restartstack);  /* the available restarts stack */
1768 	FORWARD_NODE(ctxt->srcref);	   /* the current source reference */
1769 	FORWARD_NODE(ctxt->returnValue);   /* For on.exit calls */
1770     }
1771 
1772     FORWARD_NODE(R_PreciousList);
1773 
1774     for (i = 0; i < R_PPStackTop; i++)	   /* Protected pointers */
1775 	FORWARD_NODE(R_PPStack[i]);
1776 
1777     FORWARD_NODE(R_VStack);		   /* R_alloc stack */
1778 
1779     for (R_bcstack_t *sp = R_BCNodeStackBase; sp < R_BCNodeStackTop; sp++) {
1780 	if (sp->tag == RAWMEM_TAG)
1781 	    sp += sp->u.ival;
1782 	else if (sp->tag == 0 || IS_PARTIAL_SXP_TAG(sp->tag))
1783 	    FORWARD_NODE(sp->u.sxpval);
1784     }
1785 
1786     /* main processing loop */
1787     PROCESS_NODES();
1788 
1789     /* identify weakly reachable nodes */
1790     {
1791 	Rboolean recheck_weak_refs;
1792 	do {
1793 	    recheck_weak_refs = FALSE;
1794 	    for (s = R_weak_refs; s != R_NilValue; s = WEAKREF_NEXT(s)) {
1795 		if (NODE_IS_MARKED(WEAKREF_KEY(s))) {
1796 		    if (! NODE_IS_MARKED(WEAKREF_VALUE(s))) {
1797 			recheck_weak_refs = TRUE;
1798 			FORWARD_NODE(WEAKREF_VALUE(s));
1799 		    }
1800 		    if (! NODE_IS_MARKED(WEAKREF_FINALIZER(s))) {
1801 			recheck_weak_refs = TRUE;
1802 			FORWARD_NODE(WEAKREF_FINALIZER(s));
1803 		    }
1804 		}
1805 	    }
1806 	    PROCESS_NODES();
1807 	} while (recheck_weak_refs);
1808     }
1809 
1810     /* mark nodes ready for finalizing */
1811     CheckFinalizers();
1812 
1813     /* process the weak reference chain */
1814     for (s = R_weak_refs; s != R_NilValue; s = WEAKREF_NEXT(s)) {
1815 	FORWARD_NODE(s);
1816 	FORWARD_NODE(WEAKREF_KEY(s));
1817 	FORWARD_NODE(WEAKREF_VALUE(s));
1818 	FORWARD_NODE(WEAKREF_FINALIZER(s));
1819     }
1820     PROCESS_NODES();
1821 
1822     DEBUG_CHECK_NODE_COUNTS("after processing forwarded list");
1823 
1824     /* process CHARSXP cache */
1825     if (R_StringHash != NULL) /* in case of GC during initialization */
1826     {
1827 	SEXP t;
1828 	int nc = 0;
1829 	for (i = 0; i < LENGTH(R_StringHash); i++) {
1830 	    s = VECTOR_ELT(R_StringHash, i);
1831 	    t = R_NilValue;
1832 	    while (s != R_NilValue) {
1833 		if (! NODE_IS_MARKED(CXHEAD(s))) { /* remove unused CHARSXP and cons cell */
1834 		    if (t == R_NilValue) /* head of list */
1835 			VECTOR_ELT(R_StringHash, i) = CXTAIL(s);
1836 		    else
1837 			CXTAIL(t) = CXTAIL(s);
1838 		    s = CXTAIL(s);
1839 		    continue;
1840 		}
1841 		FORWARD_NODE(s);
1842 		FORWARD_NODE(CXHEAD(s));
1843 		t = s;
1844 		s = CXTAIL(s);
1845 	    }
1846 	    if(VECTOR_ELT(R_StringHash, i) != R_NilValue) nc++;
1847 	}
1848 	SET_TRUELENGTH(R_StringHash, nc); /* SET_HASHPRI, really */
1849     }
1850     FORWARD_NODE(R_StringHash);
1851     PROCESS_NODES();
1852 
1853 #ifdef PROTECTCHECK
1854     for(i=0; i< NUM_SMALL_NODE_CLASSES;i++){
1855 	s = NEXT_NODE(R_GenHeap[i].New);
1856 	while (s != R_GenHeap[i].New) {
1857 	    SEXP next = NEXT_NODE(s);
1858 	    if (TYPEOF(s) != NEWSXP) {
1859 		if (TYPEOF(s) != FREESXP) {
1860 		    SETOLDTYPE(s, TYPEOF(s));
1861 		    SET_TYPEOF(s, FREESXP);
1862 		}
1863 		if (gc_inhibit_release)
1864 		    FORWARD_NODE(s);
1865 	    }
1866 	    s = next;
1867 	}
1868     }
1869     for (i = CUSTOM_NODE_CLASS; i <= LARGE_NODE_CLASS; i++) {
1870 	s = NEXT_NODE(R_GenHeap[i].New);
1871 	while (s != R_GenHeap[i].New) {
1872 	    SEXP next = NEXT_NODE(s);
1873 	    if (TYPEOF(s) != NEWSXP) {
1874 		if (TYPEOF(s) != FREESXP) {
1875 		    /**** could also leave this alone and restore the old
1876 			  node type in ReleaseLargeFreeVectors before
1877 			  calculating size */
1878 		    if (CHAR(s) != NULL) {
1879 			R_size_t size = getVecSizeInVEC(s);
1880 			SET_STDVEC_LENGTH(s, size);
1881 		    }
1882 		    SETOLDTYPE(s, TYPEOF(s));
1883 		    SET_TYPEOF(s, FREESXP);
1884 		}
1885 		if (gc_inhibit_release)
1886 		    FORWARD_NODE(s);
1887 	    }
1888 	    s = next;
1889 	}
1890     }
1891     if (gc_inhibit_release)
1892 	PROCESS_NODES();
1893 #endif
1894 
1895     /* release large vector allocations */
1896     ReleaseLargeFreeVectors();
1897 
1898     DEBUG_CHECK_NODE_COUNTS("after releasing large allocated nodes");
1899 
1900     /* tell Valgrind about free nodes */
1901 #if VALGRIND_LEVEL > 1
1902     for(i = 1; i< NUM_NODE_CLASSES; i++) {
1903 	for(s = NEXT_NODE(R_GenHeap[i].New);
1904 	    s != R_GenHeap[i].Free;
1905 	    s = NEXT_NODE(s)) {
1906 	    VALGRIND_MAKE_MEM_NOACCESS(STDVEC_DATAPTR(s),
1907 				       NodeClassSize[i]*sizeof(VECREC));
1908 	}
1909     }
1910 #endif
1911 
1912     /* reset Free pointers */
1913     for (i = 0; i < NUM_NODE_CLASSES; i++)
1914 	R_GenHeap[i].Free = NEXT_NODE(R_GenHeap[i].New);
1915 
1916 
1917     /* update heap statistics */
1918     R_Collected = R_NSize;
1919     R_SmallVallocSize = 0;
1920     for (gen = 0; gen < NUM_OLD_GENERATIONS; gen++) {
1921 	for (i = 1; i < NUM_SMALL_NODE_CLASSES; i++)
1922 	    R_SmallVallocSize += R_GenHeap[i].OldCount[gen] * NodeClassSize[i];
1923 	for (i = 0; i < NUM_NODE_CLASSES; i++)
1924 	    R_Collected -= R_GenHeap[i].OldCount[gen];
1925     }
1926     R_NodesInUse = R_NSize - R_Collected;
1927 
1928     if (num_old_gens_to_collect < NUM_OLD_GENERATIONS) {
1929 	if (R_Collected < R_MinFreeFrac * R_NSize ||
1930 	    VHEAP_FREE() < size_needed + R_MinFreeFrac * R_VSize) {
1931 	    num_old_gens_to_collect++;
1932 	    if (R_Collected <= 0 || VHEAP_FREE() < size_needed)
1933 		goto again;
1934 	}
1935 	else num_old_gens_to_collect = 0;
1936     }
1937     else num_old_gens_to_collect = 0;
1938 
1939     gen_gc_counts[gens_collected]++;
1940 
1941     if (gens_collected == NUM_OLD_GENERATIONS) {
1942 	/**** do some adjustment for intermediate collections? */
1943 	AdjustHeapSize(size_needed);
1944 	TryToReleasePages();
1945 	DEBUG_CHECK_NODE_COUNTS("after heap adjustment");
1946     }
1947     else if (gens_collected > 0) {
1948 	TryToReleasePages();
1949 	DEBUG_CHECK_NODE_COUNTS("after heap adjustment");
1950     }
1951 #ifdef SORT_NODES
1952     if (gens_collected == NUM_OLD_GENERATIONS)
1953 	SortNodes();
1954 #endif
1955 
1956     return gens_collected;
1957 }
1958 
1959 
1960 /* public interface for controlling GC torture settings */
1961 /* maybe, but in no header */
R_gc_torture(int gap,int wait,Rboolean inhibit)1962 void R_gc_torture(int gap, int wait, Rboolean inhibit)
1963 {
1964     if (gap != NA_INTEGER && gap >= 0)
1965 	gc_force_wait = gc_force_gap = gap;
1966     if (gap > 0) {
1967 	if (wait != NA_INTEGER && wait > 0)
1968 	    gc_force_wait = wait;
1969     }
1970 #ifdef PROTECTCHECK
1971     if (gap > 0) {
1972 	if (inhibit != NA_LOGICAL)
1973 	    gc_inhibit_release = inhibit;
1974     }
1975     else gc_inhibit_release = FALSE;
1976 #endif
1977 }
1978 
do_gctorture(SEXP call,SEXP op,SEXP args,SEXP rho)1979 SEXP attribute_hidden do_gctorture(SEXP call, SEXP op, SEXP args, SEXP rho)
1980 {
1981     int gap;
1982     SEXP old = ScalarLogical(gc_force_wait > 0);
1983 
1984     checkArity(op, args);
1985 
1986     if (isLogical(CAR(args))) {
1987 	Rboolean on = asLogical(CAR(args));
1988 	if (on == NA_LOGICAL) gap = NA_INTEGER;
1989 	else if (on) gap = 1;
1990 	else gap = 0;
1991     }
1992     else gap = asInteger(CAR(args));
1993 
1994     R_gc_torture(gap, 0, FALSE);
1995 
1996     return old;
1997 }
1998 
do_gctorture2(SEXP call,SEXP op,SEXP args,SEXP rho)1999 SEXP attribute_hidden do_gctorture2(SEXP call, SEXP op, SEXP args, SEXP rho)
2000 {
2001     int gap, wait;
2002     Rboolean inhibit;
2003     int old = gc_force_gap;
2004 
2005     checkArity(op, args);
2006     gap = asInteger(CAR(args));
2007     wait = asInteger(CADR(args));
2008     inhibit = asLogical(CADDR(args));
2009     R_gc_torture(gap, wait, inhibit);
2010 
2011     return ScalarInteger(old);
2012 }
2013 
2014 /* initialize gctorture settings from environment variables */
init_gctorture(void)2015 static void init_gctorture(void)
2016 {
2017     char *arg = getenv("R_GCTORTURE");
2018     if (arg != NULL) {
2019 	int gap = atoi(arg);
2020 	if (gap > 0) {
2021 	    gc_force_wait = gc_force_gap = gap;
2022 	    arg = getenv("R_GCTORTURE_WAIT");
2023 	    if (arg != NULL) {
2024 		int wait = atoi(arg);
2025 		if (wait > 0)
2026 		    gc_force_wait = wait;
2027 	    }
2028 #ifdef PROTECTCHECK
2029 	    arg = getenv("R_GCTORTURE_INHIBIT_RELEASE");
2030 	    if (arg != NULL) {
2031 		int inhibit = atoi(arg);
2032 		if (inhibit > 0) gc_inhibit_release = TRUE;
2033 		else gc_inhibit_release = FALSE;
2034 	    }
2035 #endif
2036 	}
2037     }
2038 }
2039 
do_gcinfo(SEXP call,SEXP op,SEXP args,SEXP rho)2040 SEXP attribute_hidden do_gcinfo(SEXP call, SEXP op, SEXP args, SEXP rho)
2041 {
2042     int i;
2043     SEXP old = ScalarLogical(gc_reporting);
2044     checkArity(op, args);
2045     i = asLogical(CAR(args));
2046     if (i != NA_LOGICAL)
2047 	gc_reporting = i;
2048     return old;
2049 }
2050 
2051 /* reports memory use to profiler in eval.c */
2052 
get_current_mem(size_t * smallvsize,size_t * largevsize,size_t * nodes)2053 void attribute_hidden get_current_mem(size_t *smallvsize,
2054 				      size_t *largevsize,
2055 				      size_t *nodes)
2056 {
2057     *smallvsize = R_SmallVallocSize;
2058     *largevsize = R_LargeVallocSize;
2059     *nodes = R_NodesInUse * sizeof(SEXPREC);
2060     return;
2061 }
2062 
do_gc(SEXP call,SEXP op,SEXP args,SEXP rho)2063 SEXP attribute_hidden do_gc(SEXP call, SEXP op, SEXP args, SEXP rho)
2064 {
2065     SEXP value;
2066     int ogc, reset_max, full;
2067     R_size_t onsize = R_NSize /* can change during collection */;
2068 
2069     checkArity(op, args);
2070     ogc = gc_reporting;
2071     gc_reporting = asLogical(CAR(args));
2072     reset_max = asLogical(CADR(args));
2073     full = asLogical(CADDR(args));
2074     if (full)
2075 	R_gc();
2076     else
2077 	R_gc_lite();
2078 
2079     gc_reporting = ogc;
2080     /*- now return the [used , gc trigger size] for cells and heap */
2081     PROTECT(value = allocVector(REALSXP, 14));
2082     REAL(value)[0] = onsize - R_Collected;
2083     REAL(value)[1] = R_VSize - VHEAP_FREE();
2084     REAL(value)[4] = R_NSize;
2085     REAL(value)[5] = R_VSize;
2086     /* next four are in 0.1Mb, rounded up */
2087     REAL(value)[2] = 0.1*ceil(10. * (onsize - R_Collected)/Mega * sizeof(SEXPREC));
2088     REAL(value)[3] = 0.1*ceil(10. * (R_VSize - VHEAP_FREE())/Mega * vsfac);
2089     REAL(value)[6] = 0.1*ceil(10. * R_NSize/Mega * sizeof(SEXPREC));
2090     REAL(value)[7] = 0.1*ceil(10. * R_VSize/Mega * vsfac);
2091     REAL(value)[8] = (R_MaxNSize < R_SIZE_T_MAX) ?
2092 	0.1*ceil(10. * R_MaxNSize/Mega * sizeof(SEXPREC)) : NA_REAL;
2093     REAL(value)[9] = (R_MaxVSize < R_SIZE_T_MAX) ?
2094 	0.1*ceil(10. * R_MaxVSize/Mega * vsfac) : NA_REAL;
2095     if (reset_max){
2096 	    R_N_maxused = onsize - R_Collected;
2097 	    R_V_maxused = R_VSize - VHEAP_FREE();
2098     }
2099     REAL(value)[10] = R_N_maxused;
2100     REAL(value)[11] = R_V_maxused;
2101     REAL(value)[12] = 0.1*ceil(10. * R_N_maxused/Mega*sizeof(SEXPREC));
2102     REAL(value)[13] = 0.1*ceil(10. * R_V_maxused/Mega*vsfac);
2103     UNPROTECT(1);
2104     return value;
2105 }
2106 
2107 
mem_err_heap(R_size_t size)2108 static void NORET mem_err_heap(R_size_t size)
2109 {
2110     errorcall(R_NilValue, _("vector memory exhausted (limit reached?)"));
2111 }
2112 
2113 
mem_err_cons(void)2114 static void NORET mem_err_cons(void)
2115 {
2116     errorcall(R_NilValue, _("cons memory exhausted (limit reached?)"));
2117 }
2118 
mem_err_malloc(R_size_t size)2119 static void NORET mem_err_malloc(R_size_t size)
2120 {
2121     errorcall(R_NilValue, _("memory exhausted (limit reached?)"));
2122 }
2123 
2124 /* InitMemory : Initialise the memory to be used in R. */
2125 /* This includes: stack space, node space and vector space */
2126 
2127 #define PP_REDZONE_SIZE 1000L
2128 static int R_StandardPPStackSize, R_RealPPStackSize;
2129 
InitMemory()2130 void attribute_hidden InitMemory()
2131 {
2132     int i;
2133     int gen;
2134     char *arg;
2135 
2136     init_gctorture();
2137     init_gc_grow_settings();
2138 
2139     arg = getenv("_R_GC_FAIL_ON_ERROR_");
2140     if (arg != NULL && StringTrue(arg))
2141 	gc_fail_on_error = TRUE;
2142     else if (arg != NULL && StringFalse(arg))
2143 	gc_fail_on_error = FALSE;
2144 
2145     gc_reporting = R_Verbose;
2146     R_StandardPPStackSize = R_PPStackSize;
2147     R_RealPPStackSize = R_PPStackSize + PP_REDZONE_SIZE;
2148     if (!(R_PPStack = (SEXP *) malloc(R_RealPPStackSize * sizeof(SEXP))))
2149 	R_Suicide("couldn't allocate memory for pointer stack");
2150     R_PPStackTop = 0;
2151 #if VALGRIND_LEVEL > 1
2152     VALGRIND_MAKE_MEM_NOACCESS(R_PPStack+R_PPStackSize, PP_REDZONE_SIZE);
2153 #endif
2154     vsfac = sizeof(VECREC);
2155     R_VSize = (R_VSize + 1)/vsfac;
2156     if (R_MaxVSize < R_SIZE_T_MAX) R_MaxVSize = (R_MaxVSize + 1)/vsfac;
2157 
2158     UNMARK_NODE(&UnmarkedNodeTemplate);
2159 
2160     for (i = 0; i < NUM_NODE_CLASSES; i++) {
2161       for (gen = 0; gen < NUM_OLD_GENERATIONS; gen++) {
2162 	R_GenHeap[i].Old[gen] = &R_GenHeap[i].OldPeg[gen];
2163 	SET_PREV_NODE(R_GenHeap[i].Old[gen], R_GenHeap[i].Old[gen]);
2164 	SET_NEXT_NODE(R_GenHeap[i].Old[gen], R_GenHeap[i].Old[gen]);
2165 
2166 #ifndef EXPEL_OLD_TO_NEW
2167 	R_GenHeap[i].OldToNew[gen] = &R_GenHeap[i].OldToNewPeg[gen];
2168 	SET_PREV_NODE(R_GenHeap[i].OldToNew[gen], R_GenHeap[i].OldToNew[gen]);
2169 	SET_NEXT_NODE(R_GenHeap[i].OldToNew[gen], R_GenHeap[i].OldToNew[gen]);
2170 #endif
2171 
2172 	R_GenHeap[i].OldCount[gen] = 0;
2173       }
2174       R_GenHeap[i].New = &R_GenHeap[i].NewPeg;
2175       SET_PREV_NODE(R_GenHeap[i].New, R_GenHeap[i].New);
2176       SET_NEXT_NODE(R_GenHeap[i].New, R_GenHeap[i].New);
2177     }
2178 
2179     for (i = 0; i < NUM_NODE_CLASSES; i++)
2180 	R_GenHeap[i].Free = NEXT_NODE(R_GenHeap[i].New);
2181 
2182     SET_NODE_CLASS(&UnmarkedNodeTemplate, 0);
2183     orig_R_NSize = R_NSize;
2184     orig_R_VSize = R_VSize;
2185 
2186     /* R_NilValue */
2187     /* THIS MUST BE THE FIRST CONS CELL ALLOCATED */
2188     /* OR ARMAGEDDON HAPPENS. */
2189     /* Field assignments for R_NilValue must not go through write barrier
2190        since the write barrier prevents assignments to R_NilValue's fields.
2191        because of checks for nil */
2192     GET_FREE_NODE(R_NilValue);
2193     R_NilValue->sxpinfo = UnmarkedNodeTemplate.sxpinfo;
2194     INIT_REFCNT(R_NilValue);
2195     SET_REFCNT(R_NilValue, REFCNTMAX);
2196     SET_TYPEOF(R_NilValue, NILSXP);
2197     CAR0(R_NilValue) = R_NilValue;
2198     CDR(R_NilValue) = R_NilValue;
2199     TAG(R_NilValue) = R_NilValue;
2200     ATTRIB(R_NilValue) = R_NilValue;
2201     MARK_NOT_MUTABLE(R_NilValue);
2202 
2203     R_BCNodeStackBase =
2204 	(R_bcstack_t *) malloc(R_BCNODESTACKSIZE * sizeof(R_bcstack_t));
2205     if (R_BCNodeStackBase == NULL)
2206 	R_Suicide("couldn't allocate node stack");
2207     R_BCNodeStackTop = R_BCNodeStackBase;
2208     R_BCNodeStackEnd = R_BCNodeStackBase + R_BCNODESTACKSIZE;
2209     R_BCProtTop = R_BCNodeStackTop;
2210 
2211     R_weak_refs = R_NilValue;
2212 
2213     R_HandlerStack = R_RestartStack = R_NilValue;
2214 
2215     /*  Unbound values which are to be preserved through GCs */
2216     R_PreciousList = R_NilValue;
2217 
2218     /*  The current source line */
2219     R_Srcref = R_NilValue;
2220 
2221     /* R_TrueValue and R_FalseValue */
2222     R_TrueValue = mkTrue();
2223     MARK_NOT_MUTABLE(R_TrueValue);
2224     R_FalseValue = mkFalse();
2225     MARK_NOT_MUTABLE(R_FalseValue);
2226     R_LogicalNAValue = allocVector(LGLSXP, 1);
2227     LOGICAL(R_LogicalNAValue)[0] = NA_LOGICAL;
2228     MARK_NOT_MUTABLE(R_LogicalNAValue);
2229 }
2230 
2231 /* Since memory allocated from the heap is non-moving, R_alloc just
2232    allocates off the heap as RAWSXP/REALSXP and maintains the stack of
2233    allocations through the ATTRIB pointer.  The stack pointer R_VStack
2234    is traced by the collector. */
vmaxget(void)2235 void *vmaxget(void)
2236 {
2237     return (void *) R_VStack;
2238 }
2239 
vmaxset(const void * ovmax)2240 void vmaxset(const void *ovmax)
2241 {
2242     R_VStack = (SEXP) ovmax;
2243 }
2244 
R_alloc(size_t nelem,int eltsize)2245 char *R_alloc(size_t nelem, int eltsize)
2246 {
2247     R_size_t size = nelem * eltsize;
2248     /* doubles are a precaution against integer overflow on 32-bit */
2249     double dsize = (double) nelem * eltsize;
2250     if (dsize > 0) {
2251 	SEXP s;
2252 #ifdef LONG_VECTOR_SUPPORT
2253 	/* 64-bit platform: previous version used REALSXPs */
2254 	if(dsize > R_XLEN_T_MAX)  /* currently 4096 TB */
2255 	    error(_("cannot allocate memory block of size %0.f Tb"),
2256 		  dsize/R_pow_di(1024.0, 4));
2257 	s = allocVector(RAWSXP, size + 1);
2258 #else
2259 	if(dsize > R_LEN_T_MAX) /* must be in the Gb range */
2260 	    error(_("cannot allocate memory block of size %0.1f Gb"),
2261 		  dsize/R_pow_di(1024.0, 3));
2262 	s = allocVector(RAWSXP, size + 1);
2263 #endif
2264 	ATTRIB(s) = R_VStack;
2265 	R_VStack = s;
2266 	return (char *) DATAPTR(s);
2267     }
2268     /* One programmer has relied on this, but it is undocumented! */
2269     else return NULL;
2270 }
2271 
2272 #ifdef HAVE_STDALIGN_H
2273 # include <stdalign.h>
2274 #endif
2275 
2276 #include <stdint.h>
2277 
R_allocLD(size_t nelem)2278 long double *R_allocLD(size_t nelem)
2279 {
2280 #if __alignof_is_defined
2281     // This is C11: picky compilers may warn.
2282     size_t ld_align = alignof(long double);
2283 #elif __GNUC__
2284     // This is C99, but do not rely on it.
2285     size_t ld_align = offsetof(struct { char __a; long double __b; }, __b);
2286 #else
2287     size_t ld_align = 0x0F; // value of x86_64, known others are 4 or 8
2288 #endif
2289     if (ld_align > 8) {
2290 	uintptr_t tmp = (uintptr_t) R_alloc(nelem + 1, sizeof(long double));
2291 	tmp = (tmp + ld_align - 1) & ~((uintptr_t)ld_align - 1);
2292 	return (long double *) tmp;
2293     } else {
2294 	return (long double *) R_alloc(nelem, sizeof(long double));
2295     }
2296 }
2297 
2298 
2299 /* S COMPATIBILITY */
2300 
S_alloc(long nelem,int eltsize)2301 char *S_alloc(long nelem, int eltsize)
2302 {
2303     R_size_t size  = nelem * eltsize;
2304     char *p = R_alloc(nelem, eltsize);
2305 
2306     if(p) memset(p, 0, size);
2307     return p;
2308 }
2309 
2310 
S_realloc(char * p,long new,long old,int size)2311 char *S_realloc(char *p, long new, long old, int size)
2312 {
2313     size_t nold;
2314     char *q;
2315     /* shrinking is a no-op */
2316     if(new <= old) return p; // so nnew > 0 below
2317     q = R_alloc((size_t)new, size);
2318     nold = (size_t)old * size;
2319     memcpy(q, p, nold);
2320     memset(q + nold, 0, (size_t)new*size - nold);
2321     return q;
2322 }
2323 
2324 
2325 /* Allocation functions that GC on initial failure */
2326 
R_malloc_gc(size_t n)2327 void *R_malloc_gc(size_t n)
2328 {
2329     void *np = malloc(n);
2330     if (np == NULL) {
2331 	R_gc();
2332 	np = malloc(n);
2333     }
2334     return np;
2335 }
2336 
R_calloc_gc(size_t n,size_t s)2337 void *R_calloc_gc(size_t n, size_t s)
2338 {
2339     void *np = calloc(n, s);
2340     if (np == NULL) {
2341 	R_gc();
2342 	np = calloc(n, s);
2343     }
2344     return np;
2345 }
2346 
R_realloc_gc(void * p,size_t n)2347 void *R_realloc_gc(void *p, size_t n)
2348 {
2349     void *np = realloc(p, n);
2350     if (np == NULL) {
2351 	R_gc();
2352 	np = realloc(p, n);
2353     }
2354     return np;
2355 }
2356 
2357 
2358 /* "allocSExp" allocate a SEXPREC */
2359 /* call gc if necessary */
2360 
allocSExp(SEXPTYPE t)2361 SEXP allocSExp(SEXPTYPE t)
2362 {
2363     SEXP s;
2364     if (FORCE_GC || NO_FREE_NODES()) {
2365 	R_gc_internal(0);
2366 	if (NO_FREE_NODES())
2367 	    mem_err_cons();
2368     }
2369     GET_FREE_NODE(s);
2370     s->sxpinfo = UnmarkedNodeTemplate.sxpinfo;
2371     INIT_REFCNT(s);
2372     SET_TYPEOF(s, t);
2373     CAR0(s) = R_NilValue;
2374     CDR(s) = R_NilValue;
2375     TAG(s) = R_NilValue;
2376     ATTRIB(s) = R_NilValue;
2377     return s;
2378 }
2379 
allocSExpNonCons(SEXPTYPE t)2380 static SEXP allocSExpNonCons(SEXPTYPE t)
2381 {
2382     SEXP s;
2383     if (FORCE_GC || NO_FREE_NODES()) {
2384 	R_gc_internal(0);
2385 	if (NO_FREE_NODES())
2386 	    mem_err_cons();
2387     }
2388     GET_FREE_NODE(s);
2389     s->sxpinfo = UnmarkedNodeTemplate.sxpinfo;
2390     INIT_REFCNT(s);
2391     SET_TYPEOF(s, t);
2392     TAG(s) = R_NilValue;
2393     ATTRIB(s) = R_NilValue;
2394     return s;
2395 }
2396 
2397 /* cons is defined directly to avoid the need to protect its arguments
2398    unless a GC will actually occur. */
cons(SEXP car,SEXP cdr)2399 SEXP cons(SEXP car, SEXP cdr)
2400 {
2401     SEXP s;
2402     if (FORCE_GC || NO_FREE_NODES()) {
2403 	PROTECT(car);
2404 	PROTECT(cdr);
2405 	R_gc_internal(0);
2406 	UNPROTECT(2);
2407 	if (NO_FREE_NODES())
2408 	    mem_err_cons();
2409     }
2410 
2411     if (NEED_NEW_PAGE()) {
2412 	PROTECT(car);
2413 	PROTECT(cdr);
2414 	GET_FREE_NODE(s);
2415 	UNPROTECT(2);
2416     }
2417     else
2418 	QUICK_GET_FREE_NODE(s);
2419 
2420     s->sxpinfo = UnmarkedNodeTemplate.sxpinfo;
2421     INIT_REFCNT(s);
2422     SET_TYPEOF(s, LISTSXP);
2423     CAR0(s) = CHK(car); if (car) INCREMENT_REFCNT(car);
2424     CDR(s) = CHK(cdr); if (cdr) INCREMENT_REFCNT(cdr);
2425     TAG(s) = R_NilValue;
2426     ATTRIB(s) = R_NilValue;
2427     return s;
2428 }
2429 
CONS_NR(SEXP car,SEXP cdr)2430 SEXP attribute_hidden CONS_NR(SEXP car, SEXP cdr)
2431 {
2432     SEXP s;
2433     if (FORCE_GC || NO_FREE_NODES()) {
2434 	PROTECT(car);
2435 	PROTECT(cdr);
2436 	R_gc_internal(0);
2437 	UNPROTECT(2);
2438 	if (NO_FREE_NODES())
2439 	    mem_err_cons();
2440     }
2441 
2442     if (NEED_NEW_PAGE()) {
2443 	PROTECT(car);
2444 	PROTECT(cdr);
2445 	GET_FREE_NODE(s);
2446 	UNPROTECT(2);
2447     }
2448     else
2449 	QUICK_GET_FREE_NODE(s);
2450 
2451     s->sxpinfo = UnmarkedNodeTemplate.sxpinfo;
2452     INIT_REFCNT(s);
2453     DISABLE_REFCNT(s);
2454     SET_TYPEOF(s, LISTSXP);
2455     CAR0(s) = CHK(car);
2456     CDR(s) = CHK(cdr);
2457     TAG(s) = R_NilValue;
2458     ATTRIB(s) = R_NilValue;
2459     return s;
2460 }
2461 
2462 /*----------------------------------------------------------------------
2463 
2464   NewEnvironment
2465 
2466   Create an environment by extending "rho" with a frame obtained by
2467   pairing the variable names given by the tags on "namelist" with
2468   the values given by the elements of "valuelist".
2469 
2470   NewEnvironment is defined directly to avoid the need to protect its
2471   arguments unless a GC will actually occur.  This definition allows
2472   the namelist argument to be shorter than the valuelist; in this
2473   case the remaining values must be named already.  (This is useful
2474   in cases where the entire valuelist is already named--namelist can
2475   then be R_NilValue.)
2476 
2477   The valuelist is destructively modified and used as the
2478   environment's frame.
2479 */
NewEnvironment(SEXP namelist,SEXP valuelist,SEXP rho)2480 SEXP NewEnvironment(SEXP namelist, SEXP valuelist, SEXP rho)
2481 {
2482     SEXP v, n, newrho;
2483 
2484     if (FORCE_GC || NO_FREE_NODES()) {
2485 	PROTECT(namelist);
2486 	PROTECT(valuelist);
2487 	PROTECT(rho);
2488 	R_gc_internal(0);
2489 	UNPROTECT(3);
2490 	if (NO_FREE_NODES())
2491 	    mem_err_cons();
2492     }
2493 
2494     if (NEED_NEW_PAGE()) {
2495 	PROTECT(namelist);
2496 	PROTECT(valuelist);
2497 	PROTECT(rho);
2498 	GET_FREE_NODE(newrho);
2499 	UNPROTECT(3);
2500     }
2501     else
2502 	QUICK_GET_FREE_NODE(newrho);
2503 
2504     newrho->sxpinfo = UnmarkedNodeTemplate.sxpinfo;
2505     INIT_REFCNT(newrho);
2506     SET_TYPEOF(newrho, ENVSXP);
2507     FRAME(newrho) = valuelist; INCREMENT_REFCNT(valuelist);
2508     ENCLOS(newrho) = CHK(rho); if (rho != NULL) INCREMENT_REFCNT(rho);
2509     HASHTAB(newrho) = R_NilValue;
2510     ATTRIB(newrho) = R_NilValue;
2511 
2512     v = CHK(valuelist);
2513     n = CHK(namelist);
2514     while (v != R_NilValue && n != R_NilValue) {
2515 	SET_TAG(v, TAG(n));
2516 	v = CDR(v);
2517 	n = CDR(n);
2518     }
2519     return (newrho);
2520 }
2521 
2522 /* mkPROMISE is defined directly do avoid the need to protect its arguments
2523    unless a GC will actually occur. */
mkPROMISE(SEXP expr,SEXP rho)2524 SEXP attribute_hidden mkPROMISE(SEXP expr, SEXP rho)
2525 {
2526     SEXP s;
2527     if (FORCE_GC || NO_FREE_NODES()) {
2528 	PROTECT(expr);
2529 	PROTECT(rho);
2530 	R_gc_internal(0);
2531 	UNPROTECT(2);
2532 	if (NO_FREE_NODES())
2533 	    mem_err_cons();
2534     }
2535 
2536     if (NEED_NEW_PAGE()) {
2537 	PROTECT(expr);
2538 	PROTECT(rho);
2539 	GET_FREE_NODE(s);
2540 	UNPROTECT(2);
2541     }
2542     else
2543 	QUICK_GET_FREE_NODE(s);
2544 
2545     /* precaution to ensure code does not get modified via
2546        substitute() and the like */
2547     ENSURE_NAMEDMAX(expr);
2548 
2549     s->sxpinfo = UnmarkedNodeTemplate.sxpinfo;
2550     INIT_REFCNT(s);
2551     SET_TYPEOF(s, PROMSXP);
2552     PRCODE(s) = CHK(expr); INCREMENT_REFCNT(expr);
2553     PRENV(s) = CHK(rho); INCREMENT_REFCNT(rho);
2554     PRVALUE(s) = R_UnboundValue;
2555     PRSEEN(s) = 0;
2556     ATTRIB(s) = R_NilValue;
2557     return s;
2558 }
2559 
R_mkEVPROMISE(SEXP expr,SEXP val)2560 SEXP R_mkEVPROMISE(SEXP expr, SEXP val)
2561 {
2562     SEXP prom = mkPROMISE(expr, R_NilValue);
2563     SET_PRVALUE(prom, val);
2564     return prom;
2565 }
2566 
R_mkEVPROMISE_NR(SEXP expr,SEXP val)2567 SEXP attribute_hidden R_mkEVPROMISE_NR(SEXP expr, SEXP val)
2568 {
2569     SEXP prom = mkPROMISE(expr, R_NilValue);
2570     DISABLE_REFCNT(prom);
2571     SET_PRVALUE(prom, val);
2572     return prom;
2573 }
2574 
2575 /* support for custom allocators that allow vectors to be allocated
2576    using non-standard means such as COW mmap() */
2577 
custom_node_alloc(R_allocator_t * allocator,size_t size)2578 static void *custom_node_alloc(R_allocator_t *allocator, size_t size) {
2579     if (!allocator || !allocator->mem_alloc) return NULL;
2580     void *ptr = allocator->mem_alloc(allocator, size + sizeof(R_allocator_t));
2581     if (ptr) {
2582 	R_allocator_t *ca = (R_allocator_t*) ptr;
2583 	*ca = *allocator;
2584 	return (void*) (ca + 1);
2585     }
2586     return NULL;
2587 }
2588 
custom_node_free(void * ptr)2589 static void custom_node_free(void *ptr) {
2590     if (ptr) {
2591 	R_allocator_t *allocator = ((R_allocator_t*) ptr) - 1;
2592 	allocator->mem_free(allocator, (void*)allocator);
2593     }
2594 }
2595 
2596 /* All vector objects must be a multiple of sizeof(SEXPREC_ALIGN)
2597    bytes so that alignment is preserved for all objects */
2598 
2599 /* Allocate a vector object (and also list-like objects).
2600    This ensures only validity of list-like (LISTSXP, VECSXP, EXPRSXP),
2601    STRSXP and CHARSXP types;  e.g., atomic types remain un-initialized
2602    and must be initialized upstream, e.g., in do_makevector().
2603 */
2604 #define intCHARSXP 73
2605 
allocVector3(SEXPTYPE type,R_xlen_t length,R_allocator_t * allocator)2606 SEXP allocVector3(SEXPTYPE type, R_xlen_t length, R_allocator_t *allocator)
2607 {
2608     SEXP s;     /* For the generational collector it would be safer to
2609 		   work in terms of a VECSEXP here, but that would
2610 		   require several casts below... */
2611     R_size_t size = 0, alloc_size, old_R_VSize;
2612     int node_class;
2613 #if VALGRIND_LEVEL > 0
2614     R_size_t actual_size = 0;
2615 #endif
2616 
2617     /* Handle some scalars directly to improve speed. */
2618     if (length == 1) {
2619 	switch(type) {
2620 	case REALSXP:
2621 	case INTSXP:
2622 	case LGLSXP:
2623 	    node_class = 1;
2624 	    alloc_size = NodeClassSize[1];
2625 	    if (FORCE_GC || NO_FREE_NODES() || VHEAP_FREE() < alloc_size) {
2626 		R_gc_internal(alloc_size);
2627 		if (NO_FREE_NODES())
2628 		    mem_err_cons();
2629 		if (VHEAP_FREE() < alloc_size)
2630 		    mem_err_heap(size);
2631 	    }
2632 
2633 	    CLASS_GET_FREE_NODE(node_class, s);
2634 #if VALGRIND_LEVEL > 1
2635 	    switch(type) {
2636 	    case REALSXP: actual_size = sizeof(double); break;
2637 	    case INTSXP: actual_size = sizeof(int); break;
2638 	    case LGLSXP: actual_size = sizeof(int); break;
2639 	    }
2640 	    VALGRIND_MAKE_MEM_UNDEFINED(STDVEC_DATAPTR(s), actual_size);
2641 #endif
2642 	    s->sxpinfo = UnmarkedNodeTemplate.sxpinfo;
2643 	    SETSCALAR(s, 1);
2644 	    SET_NODE_CLASS(s, node_class);
2645 	    R_SmallVallocSize += alloc_size;
2646 	    /* Note that we do not include the header size into VallocSize,
2647 	       but it is counted into memory usage via R_NodesInUse. */
2648 	    ATTRIB(s) = R_NilValue;
2649 	    SET_TYPEOF(s, type);
2650 	    SET_STDVEC_LENGTH(s, (R_len_t) length); // is 1
2651 	    SET_STDVEC_TRUELENGTH(s, 0);
2652 	    INIT_REFCNT(s);
2653 	    return(s);
2654 	}
2655     }
2656 
2657     if (length > R_XLEN_T_MAX)
2658 	error(_("vector is too large")); /**** put length into message */
2659     else if (length < 0 )
2660 	error(_("negative length vectors are not allowed"));
2661     /* number of vector cells to allocate */
2662     switch (type) {
2663     case NILSXP:
2664 	return R_NilValue;
2665     case RAWSXP:
2666 	size = BYTE2VEC(length);
2667 #if VALGRIND_LEVEL > 0
2668 	actual_size = length;
2669 #endif
2670 	break;
2671     case CHARSXP:
2672 	error("use of allocVector(CHARSXP ...) is defunct\n");
2673     case intCHARSXP:
2674 	type = CHARSXP;
2675 	size = BYTE2VEC(length + 1);
2676 #if VALGRIND_LEVEL > 0
2677 	actual_size = length + 1;
2678 #endif
2679 	break;
2680     case LGLSXP:
2681     case INTSXP:
2682 	if (length <= 0)
2683 	    size = 0;
2684 	else {
2685 	    if (length > R_SIZE_T_MAX / sizeof(int))
2686 		error(_("cannot allocate vector of length %d"), length);
2687 	    size = INT2VEC(length);
2688 #if VALGRIND_LEVEL > 0
2689 	    actual_size = length*sizeof(int);
2690 #endif
2691 	}
2692 	break;
2693     case REALSXP:
2694 	if (length <= 0)
2695 	    size = 0;
2696 	else {
2697 	    if (length > R_SIZE_T_MAX / sizeof(double))
2698 		error(_("cannot allocate vector of length %d"), length);
2699 	    size = FLOAT2VEC(length);
2700 #if VALGRIND_LEVEL > 0
2701 	    actual_size = length * sizeof(double);
2702 #endif
2703 	}
2704 	break;
2705     case CPLXSXP:
2706 	if (length <= 0)
2707 	    size = 0;
2708 	else {
2709 	    if (length > R_SIZE_T_MAX / sizeof(Rcomplex))
2710 		error(_("cannot allocate vector of length %d"), length);
2711 	    size = COMPLEX2VEC(length);
2712 #if VALGRIND_LEVEL > 0
2713 	    actual_size = length * sizeof(Rcomplex);
2714 #endif
2715 	}
2716 	break;
2717     case STRSXP:
2718     case EXPRSXP:
2719     case VECSXP:
2720 	if (length <= 0)
2721 	    size = 0;
2722 	else {
2723 	    if (length > R_SIZE_T_MAX / sizeof(SEXP))
2724 		error(_("cannot allocate vector of length %d"), length);
2725 	    size = PTR2VEC(length);
2726 #if VALGRIND_LEVEL > 0
2727 	    actual_size = length * sizeof(SEXP);
2728 #endif
2729 	}
2730 	break;
2731     case LANGSXP:
2732 	if(length == 0) return R_NilValue;
2733 #ifdef LONG_VECTOR_SUPPORT
2734 	if (length > R_SHORT_LEN_MAX) error("invalid length for pairlist");
2735 #endif
2736 	s = allocList((int) length);
2737 	SET_TYPEOF(s, LANGSXP);
2738 	return s;
2739     case LISTSXP:
2740 #ifdef LONG_VECTOR_SUPPORT
2741 	if (length > R_SHORT_LEN_MAX) error("invalid length for pairlist");
2742 #endif
2743 	return allocList((int) length);
2744     default:
2745 	error(_("invalid type/length (%s/%d) in vector allocation"),
2746 	      type2char(type), length);
2747     }
2748 
2749     if (allocator) {
2750 	node_class = CUSTOM_NODE_CLASS;
2751 	alloc_size = size;
2752     } else {
2753 	if (size <= NodeClassSize[1]) {
2754 	    node_class = 1;
2755 	    alloc_size = NodeClassSize[1];
2756 	}
2757 	else {
2758 	    node_class = LARGE_NODE_CLASS;
2759 	    alloc_size = size;
2760 	    for (int i = 2; i < NUM_SMALL_NODE_CLASSES; i++) {
2761 		if (size <= NodeClassSize[i]) {
2762 		    node_class = i;
2763 		    alloc_size = NodeClassSize[i];
2764 		    break;
2765 		}
2766 	    }
2767 	}
2768     }
2769 
2770     /* save current R_VSize to roll back adjustment if malloc fails */
2771     old_R_VSize = R_VSize;
2772 
2773     /* we need to do the gc here so allocSExp doesn't! */
2774     if (FORCE_GC || NO_FREE_NODES() || VHEAP_FREE() < alloc_size) {
2775 	R_gc_internal(alloc_size);
2776 	if (NO_FREE_NODES())
2777 	    mem_err_cons();
2778 	if (VHEAP_FREE() < alloc_size)
2779 	    mem_err_heap(size);
2780     }
2781 
2782     if (size > 0) {
2783 	if (node_class < NUM_SMALL_NODE_CLASSES) {
2784 	    CLASS_GET_FREE_NODE(node_class, s);
2785 #if VALGRIND_LEVEL > 1
2786 	    VALGRIND_MAKE_MEM_UNDEFINED(STDVEC_DATAPTR(s), actual_size);
2787 #endif
2788 	    s->sxpinfo = UnmarkedNodeTemplate.sxpinfo;
2789 	    INIT_REFCNT(s);
2790 	    SET_NODE_CLASS(s, node_class);
2791 	    R_SmallVallocSize += alloc_size;
2792 	    SET_STDVEC_LENGTH(s, (R_len_t) length);
2793 	}
2794 	else {
2795 	    Rboolean success = FALSE;
2796 	    R_size_t hdrsize = sizeof(SEXPREC_ALIGN);
2797 	    void *mem = NULL; /* initialize to suppress warning */
2798 	    if (size < (R_SIZE_T_MAX / sizeof(VECREC)) - hdrsize) { /*** not sure this test is quite right -- why subtract the header? LT */
2799 		/* I think subtracting the header is fine, "size" (*VSize)
2800 		   variables do not count the header, but the header is
2801 		   included into memory usage via NodesInUse, instead.
2802 		   We want the whole object including the header to be
2803 		   indexable by size_t. - TK */
2804 		mem = allocator ?
2805 		    custom_node_alloc(allocator, hdrsize + size * sizeof(VECREC)) :
2806 		    malloc(hdrsize + size * sizeof(VECREC));
2807 		if (mem == NULL) {
2808 		    /* If we are near the address space limit, we
2809 		       might be short of address space.  So return
2810 		       all unused objects to malloc and try again. */
2811 		    R_gc_no_finalizers(alloc_size);
2812 		    mem = allocator ?
2813 			custom_node_alloc(allocator, hdrsize + size * sizeof(VECREC)) :
2814 			malloc(hdrsize + size * sizeof(VECREC));
2815 		}
2816 		if (mem != NULL) {
2817 		    s = mem;
2818 		    SET_STDVEC_LENGTH(s, length);
2819 		    success = TRUE;
2820 		}
2821 		else s = NULL;
2822 #ifdef R_MEMORY_PROFILING
2823 		R_ReportAllocation(hdrsize + size * sizeof(VECREC));
2824 #endif
2825 	    } else s = NULL; /* suppress warning */
2826 	    if (! success) {
2827 		double dsize = (double)size * sizeof(VECREC)/1024.0;
2828 		/* reset the vector heap limit */
2829 		R_VSize = old_R_VSize;
2830 		if(dsize > 1024.0*1024.0)
2831 		    errorcall(R_NilValue,
2832 			      _("cannot allocate vector of size %0.1f Gb"),
2833 			      dsize/1024.0/1024.0);
2834 		if(dsize > 1024.0)
2835 		    errorcall(R_NilValue,
2836 			      _("cannot allocate vector of size %0.1f Mb"),
2837 			      dsize/1024.0);
2838 		else
2839 		    errorcall(R_NilValue,
2840 			      _("cannot allocate vector of size %0.f Kb"),
2841 			      dsize);
2842 	    }
2843 	    s->sxpinfo = UnmarkedNodeTemplate.sxpinfo;
2844 	    INIT_REFCNT(s);
2845 	    SET_NODE_CLASS(s, node_class);
2846 	    if (!allocator) R_LargeVallocSize += size;
2847 	    R_GenHeap[node_class].AllocCount++;
2848 	    R_NodesInUse++;
2849 	    SNAP_NODE(s, R_GenHeap[node_class].New);
2850 	}
2851 	ATTRIB(s) = R_NilValue;
2852 	SET_TYPEOF(s, type);
2853     }
2854     else {
2855 	GC_PROT(s = allocSExpNonCons(type));
2856 	SET_STDVEC_LENGTH(s, (R_len_t) length);
2857     }
2858     SETALTREP(s, 0);
2859     SET_STDVEC_TRUELENGTH(s, 0);
2860     INIT_REFCNT(s);
2861 
2862     /* The following prevents disaster in the case */
2863     /* that an uninitialised string vector is marked */
2864     /* Direct assignment is OK since the node was just allocated and */
2865     /* so is at least as new as R_NilValue and R_BlankString */
2866     if (type == EXPRSXP || type == VECSXP) {
2867 	SEXP *data = STRING_PTR(s);
2868 #if VALGRIND_LEVEL > 1
2869 	VALGRIND_MAKE_MEM_DEFINED(STRING_PTR(s), actual_size);
2870 #endif
2871 	for (R_xlen_t i = 0; i < length; i++)
2872 	    data[i] = R_NilValue;
2873     }
2874     else if(type == STRSXP) {
2875 	SEXP *data = STRING_PTR(s);
2876 #if VALGRIND_LEVEL > 1
2877 	VALGRIND_MAKE_MEM_DEFINED(STRING_PTR(s), actual_size);
2878 #endif
2879 	for (R_xlen_t i = 0; i < length; i++)
2880 	    data[i] = R_BlankString;
2881     }
2882     else if (type == CHARSXP || type == intCHARSXP) {
2883 #if VALGRIND_LEVEL > 0
2884 	VALGRIND_MAKE_MEM_UNDEFINED(CHAR(s), actual_size);
2885 #endif
2886 	CHAR_RW(s)[length] = 0;
2887     }
2888 #if VALGRIND_LEVEL > 0
2889     else if (type == REALSXP)
2890 	VALGRIND_MAKE_MEM_UNDEFINED(REAL(s), actual_size);
2891     else if (type == INTSXP)
2892 	VALGRIND_MAKE_MEM_UNDEFINED(INTEGER(s), actual_size);
2893     else if (type == LGLSXP)
2894 	VALGRIND_MAKE_MEM_UNDEFINED(LOGICAL(s), actual_size);
2895     else if (type == CPLXSXP)
2896 	VALGRIND_MAKE_MEM_UNDEFINED(COMPLEX(s), actual_size);
2897     else if (type == RAWSXP)
2898 	VALGRIND_MAKE_MEM_UNDEFINED(RAW(s), actual_size);
2899 #endif
2900     return s;
2901 }
2902 
2903 /* For future hiding of allocVector(CHARSXP) */
allocCharsxp(R_len_t len)2904 SEXP attribute_hidden allocCharsxp(R_len_t len)
2905 {
2906     return allocVector(intCHARSXP, len);
2907 }
2908 
allocList(int n)2909 SEXP allocList(int n)
2910 {
2911     int i;
2912     SEXP result;
2913     result = R_NilValue;
2914     for (i = 0; i < n; i++)
2915 	result = CONS(R_NilValue, result);
2916     return result;
2917 }
2918 
allocS4Object(void)2919 SEXP allocS4Object(void)
2920 {
2921    SEXP s;
2922    GC_PROT(s = allocSExpNonCons(S4SXP));
2923    SET_S4_OBJECT(s);
2924    return s;
2925 }
2926 
allocFormalsList(int nargs,...)2927 static SEXP allocFormalsList(int nargs, ...)
2928 {
2929     SEXP res = R_NilValue;
2930     SEXP n;
2931     int i;
2932     va_list(syms);
2933     va_start(syms, nargs);
2934 
2935     for(i = 0; i < nargs; i++) {
2936 	res = CONS(R_NilValue, res);
2937     }
2938     R_PreserveObject(res);
2939 
2940     n = res;
2941     for(i = 0; i < nargs; i++) {
2942 	SET_TAG(n, (SEXP) va_arg(syms, SEXP));
2943 	MARK_NOT_MUTABLE(n);
2944 	n = CDR(n);
2945     }
2946     va_end(syms);
2947 
2948     return res;
2949 }
2950 
2951 
allocFormalsList2(SEXP sym1,SEXP sym2)2952 SEXP allocFormalsList2(SEXP sym1, SEXP sym2)
2953 {
2954     return allocFormalsList(2, sym1, sym2);
2955 }
2956 
allocFormalsList3(SEXP sym1,SEXP sym2,SEXP sym3)2957 SEXP allocFormalsList3(SEXP sym1, SEXP sym2, SEXP sym3)
2958 {
2959     return allocFormalsList(3, sym1, sym2, sym3);
2960 }
2961 
allocFormalsList4(SEXP sym1,SEXP sym2,SEXP sym3,SEXP sym4)2962 SEXP allocFormalsList4(SEXP sym1, SEXP sym2, SEXP sym3, SEXP sym4)
2963 {
2964     return allocFormalsList(4, sym1, sym2, sym3, sym4);
2965 }
2966 
allocFormalsList5(SEXP sym1,SEXP sym2,SEXP sym3,SEXP sym4,SEXP sym5)2967 SEXP allocFormalsList5(SEXP sym1, SEXP sym2, SEXP sym3, SEXP sym4, SEXP sym5)
2968 {
2969     return allocFormalsList(5, sym1, sym2, sym3, sym4, sym5);
2970 }
2971 
allocFormalsList6(SEXP sym1,SEXP sym2,SEXP sym3,SEXP sym4,SEXP sym5,SEXP sym6)2972 SEXP allocFormalsList6(SEXP sym1, SEXP sym2, SEXP sym3, SEXP sym4,
2973 		       SEXP sym5, SEXP sym6)
2974 {
2975     return allocFormalsList(6, sym1, sym2, sym3, sym4, sym5, sym6);
2976 }
2977 
2978 /* "gc" a mark-sweep or in-place generational garbage collector */
2979 
R_gc(void)2980 void R_gc(void)
2981 {
2982     num_old_gens_to_collect = NUM_OLD_GENERATIONS;
2983     R_gc_internal(0);
2984 #ifndef IMMEDIATE_FINALIZERS
2985     R_RunPendingFinalizers();
2986 #endif
2987 }
2988 
R_gc_lite(void)2989 void R_gc_lite(void)
2990 {
2991     R_gc_internal(0);
2992 #ifndef IMMEDIATE_FINALIZERS
2993     R_RunPendingFinalizers();
2994 #endif
2995 }
2996 
R_gc_no_finalizers(R_size_t size_needed)2997 static void R_gc_no_finalizers(R_size_t size_needed)
2998 {
2999     num_old_gens_to_collect = NUM_OLD_GENERATIONS;
3000     R_gc_internal(size_needed);
3001 }
3002 
3003 static double gctimes[5], gcstarttimes[5];
3004 static Rboolean gctime_enabled = FALSE;
3005 
3006 /* this is primitive */
do_gctime(SEXP call,SEXP op,SEXP args,SEXP env)3007 SEXP attribute_hidden do_gctime(SEXP call, SEXP op, SEXP args, SEXP env)
3008 {
3009     SEXP ans;
3010 
3011     if (args == R_NilValue)
3012 	gctime_enabled = TRUE;
3013     else {
3014 	check1arg(args, call, "on");
3015 	gctime_enabled = asLogical(CAR(args));
3016     }
3017     ans = allocVector(REALSXP, 5);
3018     REAL(ans)[0] = gctimes[0];
3019     REAL(ans)[1] = gctimes[1];
3020     REAL(ans)[2] = gctimes[2];
3021     REAL(ans)[3] = gctimes[3];
3022     REAL(ans)[4] = gctimes[4];
3023     return ans;
3024 }
3025 
gc_start_timing(void)3026 static void gc_start_timing(void)
3027 {
3028     if (gctime_enabled)
3029 	R_getProcTime(gcstarttimes);
3030 }
3031 
gc_end_timing(void)3032 static void gc_end_timing(void)
3033 {
3034     if (gctime_enabled) {
3035 	double times[5], delta;
3036 	R_getProcTime(times);
3037 
3038 	/* add delta to compensate for timer resolution */
3039 #if 0
3040 	/* this seems to over-compensate too */
3041 	delta = R_getClockIncrement();
3042 #else
3043 	delta = 0;
3044 #endif
3045 
3046 	gctimes[0] += times[0] - gcstarttimes[0] + delta;
3047 	gctimes[1] += times[1] - gcstarttimes[1] + delta;
3048 	gctimes[2] += times[2] - gcstarttimes[2];
3049 	gctimes[3] += times[3] - gcstarttimes[3];
3050 	gctimes[4] += times[4] - gcstarttimes[4];
3051     }
3052 }
3053 
3054 #define R_MAX(a,b) (a) < (b) ? (b) : (a)
3055 
3056 #ifdef THREADCHECK
3057 # if !defined(Win32) && defined(HAVE_PTHREAD)
3058 #   include <pthread.h>
R_check_thread(const char * s)3059 void attribute_hidden R_check_thread(const char *s)
3060 {
3061     static Rboolean main_thread_inited = FALSE;
3062     static pthread_t main_thread;
3063     if (! main_thread_inited) {
3064         main_thread = pthread_self();
3065         main_thread_inited = TRUE;
3066     }
3067     if (! pthread_equal(main_thread, pthread_self())) {
3068         char buf[1024];
3069 	size_t bsize = sizeof buf;
3070 	memset(buf, 0, bsize);
3071         snprintf(buf, bsize - 1, "Wrong thread calling '%s'", s);
3072         R_Suicide(buf);
3073     }
3074 }
3075 # else
3076 /* This could be implemented for Windows using their threading API */
R_check_thread(const char * s)3077 void attribute_hidden R_check_thread(const char *s) {}
3078 # endif
3079 #endif
3080 
R_gc_internal(R_size_t size_needed)3081 static void R_gc_internal(R_size_t size_needed)
3082 {
3083     R_CHECK_THREAD;
3084     if (!R_GCEnabled || R_in_gc) {
3085       if (R_in_gc)
3086         gc_error("*** recursive gc invocation\n");
3087       if (NO_FREE_NODES())
3088 	R_NSize = R_NodesInUse + 1;
3089 
3090       if (num_old_gens_to_collect < NUM_OLD_GENERATIONS &&
3091 	  VHEAP_FREE() < size_needed + R_MinFreeFrac * R_VSize)
3092 	num_old_gens_to_collect++;
3093 
3094       if (size_needed > VHEAP_FREE()) {
3095 	  R_size_t expand = size_needed - VHEAP_FREE();
3096 	  if (R_VSize + expand > R_MaxVSize)
3097 	      mem_err_heap(size_needed);
3098 	  R_VSize += expand;
3099       }
3100 
3101       gc_pending = TRUE;
3102       return;
3103     }
3104     gc_pending = FALSE;
3105 
3106     R_size_t onsize = R_NSize /* can change during collection */;
3107     double ncells, vcells, vfrac, nfrac;
3108     SEXPTYPE first_bad_sexp_type = 0;
3109 #ifdef PROTECTCHECK
3110     SEXPTYPE first_bad_sexp_type_old_type = 0;
3111 #endif
3112     SEXP first_bad_sexp_type_sexp = NULL;
3113     int first_bad_sexp_type_line = 0;
3114     int gens_collected = 0;
3115 
3116 #ifdef IMMEDIATE_FINALIZERS
3117     Rboolean first = TRUE;
3118  again:
3119 #endif
3120 
3121     gc_count++;
3122 
3123     R_N_maxused = R_MAX(R_N_maxused, R_NodesInUse);
3124     R_V_maxused = R_MAX(R_V_maxused, R_VSize - VHEAP_FREE());
3125 
3126     BEGIN_SUSPEND_INTERRUPTS {
3127 	R_in_gc = TRUE;
3128 	gc_start_timing();
3129 	gens_collected = RunGenCollect(size_needed);
3130 	gc_end_timing();
3131 	R_in_gc = FALSE;
3132     } END_SUSPEND_INTERRUPTS;
3133 
3134     if (R_check_constants > 2 ||
3135 	    (R_check_constants > 1 && gens_collected == NUM_OLD_GENERATIONS))
3136 	R_checkConstants(TRUE);
3137 
3138     if (gc_reporting) {
3139 	REprintf("Garbage collection %d = %d", gc_count, gen_gc_counts[0]);
3140 	for (int i = 0; i < NUM_OLD_GENERATIONS; i++)
3141 	    REprintf("+%d", gen_gc_counts[i + 1]);
3142 	REprintf(" (level %d) ... ", gens_collected);
3143 	DEBUG_GC_SUMMARY(gens_collected == NUM_OLD_GENERATIONS);
3144     }
3145 
3146     if (bad_sexp_type_seen != 0 && first_bad_sexp_type == 0) {
3147 	first_bad_sexp_type = bad_sexp_type_seen;
3148 #ifdef PROTECTCHECK
3149 	first_bad_sexp_type_old_type = bad_sexp_type_old_type;
3150 #endif
3151 	first_bad_sexp_type_sexp = bad_sexp_type_sexp;
3152 	first_bad_sexp_type_line = bad_sexp_type_line;
3153     }
3154 
3155     if (gc_reporting) {
3156 	ncells = onsize - R_Collected;
3157 	nfrac = (100.0 * ncells) / R_NSize;
3158 	/* We try to make this consistent with the results returned by gc */
3159 	ncells = 0.1*ceil(10*ncells * sizeof(SEXPREC)/Mega);
3160 	REprintf("\n%.1f Mbytes of cons cells used (%d%%)\n",
3161 		 ncells, (int) (nfrac + 0.5));
3162 	vcells = R_VSize - VHEAP_FREE();
3163 	vfrac = (100.0 * vcells) / R_VSize;
3164 	vcells = 0.1*ceil(10*vcells * vsfac/Mega);
3165 	REprintf("%.1f Mbytes of vectors used (%d%%)\n",
3166 		 vcells, (int) (vfrac + 0.5));
3167     }
3168 
3169 #ifdef IMMEDIATE_FINALIZERS
3170     if (first) {
3171 	first = FALSE;
3172 	/* Run any eligible finalizers.  The return result of
3173 	   RunFinalizers is TRUE if any finalizers are actually run.
3174 	   There is a small chance that running finalizers here may
3175 	   chew up enough memory to make another immediate collection
3176 	   necessary.  If so, we jump back to the beginning and run
3177 	   the collection, but on this second pass we do not run
3178 	   finalizers. */
3179 	if (RunFinalizers() &&
3180 	    (NO_FREE_NODES() || size_needed > VHEAP_FREE()))
3181 	    goto again;
3182     }
3183 #endif
3184 
3185     if (first_bad_sexp_type != 0) {
3186 	char msg[256];
3187 #ifdef PROTECTCHECK
3188 	if (first_bad_sexp_type == FREESXP)
3189 	    snprintf(msg, 256,
3190 	          "GC encountered a node (%p) with type FREESXP (was %s)"
3191 		  " at memory.c:%d",
3192 		  (void *) first_bad_sexp_type_sexp,
3193 		  sexptype2char(first_bad_sexp_type_old_type),
3194 		  first_bad_sexp_type_line);
3195 	else
3196 	    snprintf(msg, 256,
3197 		     "GC encountered a node (%p) with an unknown SEXP type: %d"
3198 		     " at memory.c:%d",
3199 		     (void *) first_bad_sexp_type_sexp,
3200 		     first_bad_sexp_type,
3201 		     first_bad_sexp_type_line);
3202 #else
3203 	snprintf(msg, 256,
3204 		 "GC encountered a node (%p) with an unknown SEXP type: %d"
3205 		 " at memory.c:%d",
3206 		 (void *)first_bad_sexp_type_sexp,
3207 		 first_bad_sexp_type,
3208 		 first_bad_sexp_type_line);
3209 	gc_error(msg);
3210 #endif
3211     }
3212 
3213     /* sanity check on logical scalar values */
3214     if (R_TrueValue != NULL && LOGICAL(R_TrueValue)[0] != TRUE) {
3215 	LOGICAL(R_TrueValue)[0] = TRUE;
3216 	gc_error("internal TRUE value has been modified");
3217     }
3218     if (R_FalseValue != NULL && LOGICAL(R_FalseValue)[0] != FALSE) {
3219 	LOGICAL(R_FalseValue)[0] = FALSE;
3220 	gc_error("internal FALSE value has been modified");
3221     }
3222     if (R_LogicalNAValue != NULL &&
3223 	LOGICAL(R_LogicalNAValue)[0] != NA_LOGICAL) {
3224 	LOGICAL(R_LogicalNAValue)[0] = NA_LOGICAL;
3225 	gc_error("internal logical NA value has been modified");
3226     }
3227 }
3228 
3229 
do_memoryprofile(SEXP call,SEXP op,SEXP args,SEXP env)3230 SEXP attribute_hidden do_memoryprofile(SEXP call, SEXP op, SEXP args, SEXP env)
3231 {
3232     SEXP ans, nms;
3233     int i, tmp;
3234 
3235     checkArity(op, args);
3236     PROTECT(ans = allocVector(INTSXP, 24));
3237     PROTECT(nms = allocVector(STRSXP, 24));
3238     for (i = 0; i < 24; i++) {
3239 	INTEGER(ans)[i] = 0;
3240 	SET_STRING_ELT(nms, i, type2str(i > LGLSXP? i+2 : i));
3241     }
3242     setAttrib(ans, R_NamesSymbol, nms);
3243 
3244     BEGIN_SUSPEND_INTERRUPTS {
3245       int gen;
3246 
3247       /* run a full GC to make sure that all stuff in use is in Old space */
3248       R_gc();
3249       for (gen = 0; gen < NUM_OLD_GENERATIONS; gen++) {
3250 	for (i = 0; i < NUM_NODE_CLASSES; i++) {
3251 	  SEXP s;
3252 	  for (s = NEXT_NODE(R_GenHeap[i].Old[gen]);
3253 	       s != R_GenHeap[i].Old[gen];
3254 	       s = NEXT_NODE(s)) {
3255 	      tmp = TYPEOF(s);
3256 	      if(tmp > LGLSXP) tmp -= 2;
3257 	      INTEGER(ans)[tmp]++;
3258 	  }
3259 	}
3260       }
3261     } END_SUSPEND_INTERRUPTS;
3262     UNPROTECT(2);
3263     return ans;
3264 }
3265 
3266 /* "protect" push a single argument onto R_PPStack */
3267 
3268 /* In handling a stack overflow we have to be careful not to use
3269    PROTECT. error("protect(): stack overflow") would call deparse1,
3270    which uses PROTECT and segfaults.*/
3271 
3272 /* However, the traceback creation in the normal error handler also
3273    does a PROTECT, as does the jumping code, at least if there are
3274    cleanup expressions to handle on the way out.  So for the moment
3275    we'll allocate a slightly larger PP stack and only enable the added
3276    red zone during handling of a stack overflow error.  LT */
3277 
reset_pp_stack(void * data)3278 static void reset_pp_stack(void *data)
3279 {
3280     int *poldpps = data;
3281     R_PPStackSize =  *poldpps;
3282 }
3283 
R_signal_protect_error(void)3284 void NORET R_signal_protect_error(void)
3285 {
3286     RCNTXT cntxt;
3287     int oldpps = R_PPStackSize;
3288 
3289     begincontext(&cntxt, CTXT_CCODE, R_NilValue, R_BaseEnv, R_BaseEnv,
3290 		 R_NilValue, R_NilValue);
3291     cntxt.cend = &reset_pp_stack;
3292     cntxt.cenddata = &oldpps;
3293 
3294     if (R_PPStackSize < R_RealPPStackSize)
3295 	R_PPStackSize = R_RealPPStackSize;
3296     errorcall(R_NilValue, _("protect(): protection stack overflow"));
3297 
3298     endcontext(&cntxt); /* not reached */
3299 }
3300 
R_signal_unprotect_error(void)3301 void NORET R_signal_unprotect_error(void)
3302 {
3303     error(ngettext("unprotect(): only %d protected item",
3304 		   "unprotect(): only %d protected items", R_PPStackTop),
3305 	  R_PPStackTop);
3306 }
3307 
3308 #ifndef INLINE_PROTECT
protect(SEXP s)3309 SEXP protect(SEXP s)
3310 {
3311     R_CHECK_THREAD;
3312     if (R_PPStackTop >= R_PPStackSize)
3313 	R_signal_protect_error();
3314     R_PPStack[R_PPStackTop++] = CHK(s);
3315     return s;
3316 }
3317 
3318 
3319 /* "unprotect" pop argument list from top of R_PPStack */
3320 
unprotect(int l)3321 void unprotect(int l)
3322 {
3323     R_CHECK_THREAD;
3324     if (R_PPStackTop >=  l)
3325 	R_PPStackTop -= l;
3326     else R_signal_unprotect_error();
3327 }
3328 #endif
3329 
3330 /* "unprotect_ptr" remove pointer from somewhere in R_PPStack */
3331 
unprotect_ptr(SEXP s)3332 void unprotect_ptr(SEXP s)
3333 {
3334     R_CHECK_THREAD;
3335     int i = R_PPStackTop;
3336 
3337     /* go look for  s  in  R_PPStack */
3338     /* (should be among the top few items) */
3339     do {
3340 	if (i == 0)
3341 	    error(_("unprotect_ptr: pointer not found"));
3342     } while ( R_PPStack[--i] != s );
3343 
3344     /* OK, got it, and  i  is indexing its location */
3345     /* Now drop stack above it, if any */
3346 
3347     while (++i < R_PPStackTop) R_PPStack[i - 1] = R_PPStack[i];
3348 
3349     R_PPStackTop--;
3350 }
3351 
3352 /* Debugging function:  is s protected? */
3353 
Rf_isProtected(SEXP s)3354 int Rf_isProtected(SEXP s)
3355 {
3356     R_CHECK_THREAD;
3357     int i = R_PPStackTop;
3358 
3359     /* go look for  s  in  R_PPStack */
3360     do {
3361 	if (i == 0)
3362 	    return(i);
3363     } while ( R_PPStack[--i] != s );
3364 
3365     /* OK, got it, and  i  is indexing its location */
3366     return(i);
3367 }
3368 
3369 
3370 #ifndef INLINE_PROTECT
R_ProtectWithIndex(SEXP s,PROTECT_INDEX * pi)3371 void R_ProtectWithIndex(SEXP s, PROTECT_INDEX *pi)
3372 {
3373     protect(s);
3374     *pi = R_PPStackTop - 1;
3375 }
3376 #endif
3377 
R_signal_reprotect_error(PROTECT_INDEX i)3378 void NORET R_signal_reprotect_error(PROTECT_INDEX i)
3379 {
3380     error(ngettext("R_Reprotect: only %d protected item, can't reprotect index %d",
3381 		   "R_Reprotect: only %d protected items, can't reprotect index %d",
3382 		   R_PPStackTop),
3383 	  R_PPStackTop, i);
3384 }
3385 
3386 #ifndef INLINE_PROTECT
R_Reprotect(SEXP s,PROTECT_INDEX i)3387 void R_Reprotect(SEXP s, PROTECT_INDEX i)
3388 {
3389     R_CHECK_THREAD;
3390     if (i >= R_PPStackTop || i < 0)
3391 	R_signal_reprotect_error(i);
3392     R_PPStack[i] = s;
3393 }
3394 #endif
3395 
3396 #ifdef UNUSED
3397 /* remove all objects from the protection stack from index i upwards
3398    and return them in a vector. The order in the vector is from new
3399    to old. */
R_CollectFromIndex(PROTECT_INDEX i)3400 SEXP R_CollectFromIndex(PROTECT_INDEX i)
3401 {
3402     R_CHECK_THREAD;
3403     SEXP res;
3404     int top = R_PPStackTop, j = 0;
3405     if (i > top) i = top;
3406     res = protect(allocVector(VECSXP, top - i));
3407     while (i < top)
3408 	SET_VECTOR_ELT(res, j++, R_PPStack[--top]);
3409     R_PPStackTop = top; /* this includes the protect we used above */
3410     return res;
3411 }
3412 #endif
3413 
3414 /* "initStack" initialize environment stack */
3415 attribute_hidden
initStack(void)3416 void initStack(void)
3417 {
3418     R_PPStackTop = 0;
3419 }
3420 
3421 
3422 /* S-like wrappers for calloc, realloc and free that check for error
3423    conditions */
3424 
R_chk_calloc(size_t nelem,size_t elsize)3425 void *R_chk_calloc(size_t nelem, size_t elsize)
3426 {
3427     void *p;
3428 #ifndef HAVE_WORKING_CALLOC
3429     if(nelem == 0)
3430 	return(NULL);
3431 #endif
3432     p = calloc(nelem, elsize);
3433     if(!p) /* problem here is that we don't have a format for size_t. */
3434 	error(_("'Calloc' could not allocate memory (%.0f of %u bytes)"),
3435 	      (double) nelem, elsize);
3436     return(p);
3437 }
3438 
R_chk_realloc(void * ptr,size_t size)3439 void *R_chk_realloc(void *ptr, size_t size)
3440 {
3441     void *p;
3442     /* Protect against broken realloc */
3443     if(ptr) p = realloc(ptr, size); else p = malloc(size);
3444     if(!p)
3445 	error(_("'Realloc' could not re-allocate memory (%.0f bytes)"),
3446 	      (double) size);
3447     return(p);
3448 }
3449 
R_chk_free(void * ptr)3450 void R_chk_free(void *ptr)
3451 {
3452     /* S-PLUS warns here, but there seems no reason to do so */
3453     /* if(!ptr) warning("attempt to free NULL pointer by Free"); */
3454     if(ptr) free(ptr); /* ANSI C says free has no effect on NULL, but
3455 			  better to be safe here */
3456 }
3457 
3458 /* This code keeps a list of objects which are not assigned to variables
3459    but which are required to persist across garbage collections.  The
3460    objects are registered with R_PreserveObject and deregistered with
3461    R_ReleaseObject. */
3462 
DeleteFromList(SEXP object,SEXP list)3463 static SEXP DeleteFromList(SEXP object, SEXP list)
3464 {
3465     if (CAR(list) == object)
3466 	return CDR(list);
3467     else {
3468 	SEXP last = list;
3469 	for (SEXP head = CDR(list); head != R_NilValue; head = CDR(head)) {
3470 	    if (CAR(head) == object) {
3471 		SETCDR(last, CDR(head));
3472 		return list;
3473 	    }
3474 	    else last = head;
3475 	}
3476 	return list;
3477     }
3478 }
3479 
3480 #define ALLOW_PRECIOUS_HASH
3481 #ifdef ALLOW_PRECIOUS_HASH
3482 /* This allows using a fixed size hash table. This makes deleting mush
3483    more efficient for applications that don't follow the "sparing use"
3484    advice in R-exts.texi. Using the hash table is enabled by starting
3485    R with the environment variable R_HASH_PRECIOUS set.
3486 
3487    Pointer hashing as used here isn't entirely portable (we do it in
3488    at least one othe rplace, in serialize.c) but it could be made so
3489    by computing a unique value based on the allocation page and
3490    position in the page. */
3491 
3492 #define PHASH_SIZE 1069
3493 #define PTRHASH(obj) (((R_size_t) (obj)) >> 3)
3494 
3495 static int use_precious_hash = FALSE;
3496 static int precious_inited = FALSE;
3497 
R_PreserveObject(SEXP object)3498 void R_PreserveObject(SEXP object)
3499 {
3500     R_CHECK_THREAD;
3501     if (! precious_inited) {
3502 	precious_inited = TRUE;
3503 	if (getenv("R_HASH_PRECIOUS"))
3504 	    use_precious_hash = TRUE;
3505     }
3506     if (use_precious_hash) {
3507 	if (R_PreciousList == R_NilValue)
3508 	    R_PreciousList = allocVector(VECSXP, PHASH_SIZE);
3509 	int bin = PTRHASH(object) % PHASH_SIZE;
3510 	SET_VECTOR_ELT(R_PreciousList, bin,
3511 		       CONS(object, VECTOR_ELT(R_PreciousList, bin)));
3512     }
3513     else
3514 	R_PreciousList = CONS(object, R_PreciousList);
3515 }
3516 
R_ReleaseObject(SEXP object)3517 void R_ReleaseObject(SEXP object)
3518 {
3519     R_CHECK_THREAD;
3520     if (! precious_inited)
3521 	return; /* can't be anything to delete yet */
3522     if (use_precious_hash) {
3523 	int bin = PTRHASH(object) % PHASH_SIZE;
3524 	SET_VECTOR_ELT(R_PreciousList, bin,
3525 		       DeleteFromList(object,
3526 				      VECTOR_ELT(R_PreciousList, bin)));
3527     }
3528     else
3529 	R_PreciousList =  DeleteFromList(object, R_PreciousList);
3530 }
3531 #else
R_PreserveObject(SEXP object)3532 void R_PreserveObject(SEXP object)
3533 {
3534     R_CHECK_THREAD;
3535     R_PreciousList = CONS(object, R_PreciousList);
3536 }
3537 
R_ReleaseObject(SEXP object)3538 void R_ReleaseObject(SEXP object)
3539 {
3540     R_CHECK_THREAD;
3541     R_PreciousList =  DeleteFromList(object, R_PreciousList);
3542 }
3543 #endif
3544 
3545 
3546 /* This code is similar to R_PreserveObject/R_ReleasObject, but objects are
3547    kept in a provided multi-set (which needs to be itself protected).
3548    When protected via PROTECT, the multi-set is automatically unprotected
3549    during long jump, and thus all its members are eventually reclaimed.
3550    These functions were introduced for parsers generated by bison, because
3551    one cannot instruct bison to use PROTECT/UNPROTECT when working with
3552    the stack of semantic values. */
3553 
3554 /* Multi-set is defined by a triple (store, npreserved, initialSize)
3555      npreserved is the number of elements in the store (counting each instance
3556        of the same value)
3557      store is a VECSXP or R_NilValue
3558        when VECSXP, preserved values are stored at the beginning, filled up by
3559        R_NilValue
3560      initialSize is the size for the VECSXP to be allocated if preserving values
3561        while store is R_NilValue
3562 
3563     The representation is CONS(store, npreserved) with TAG()==initialSize
3564 */
3565 
3566 /* Create new multi-set for protecting objects. initialSize may be zero
3567    (a hardcoded default is then used). */
R_NewPreciousMSet(int initialSize)3568 SEXP R_NewPreciousMSet(int initialSize)
3569 {
3570     SEXP npreserved, mset, isize;
3571 
3572     /* npreserved is modified in place */
3573     npreserved = allocVector(INTSXP, 1);
3574     SET_INTEGER_ELT(npreserved, 0, 0);
3575     PROTECT(mset = CONS(R_NilValue, npreserved));
3576     /* isize is not modified in place */
3577     if (initialSize < 0)
3578 	error("'initialSize' must be non-negative");
3579     isize = ScalarInteger(initialSize);
3580     SET_TAG(mset, isize);
3581     UNPROTECT(1); /* mset */
3582     return mset;
3583 }
3584 
checkMSet(SEXP mset)3585 static void checkMSet(SEXP mset)
3586 {
3587     SEXP store = CAR(mset);
3588     SEXP npreserved = CDR(mset);
3589     SEXP isize = TAG(mset);
3590     if (/*MAYBE_REFERENCED(mset) ||*/
3591 	((store != R_NilValue) &&
3592 	 (TYPEOF(store) != VECSXP /*|| MAYBE_REFERENCED(store)*/)) ||
3593 	(TYPEOF(npreserved) != INTSXP || XLENGTH(npreserved) != 1 /*||
3594 	 MAYBE_REFERENCED(npreserved)*/) ||
3595 	(TYPEOF(isize) != INTSXP || XLENGTH(isize) != 1))
3596 
3597 	error("Invalid mset");
3598 }
3599 
3600 /* Add object to multi-set. The object will be protected as long as the
3601    multi-set is protected. */
R_PreserveInMSet(SEXP x,SEXP mset)3602 void R_PreserveInMSet(SEXP x, SEXP mset)
3603 {
3604     if (x == R_NilValue || isSymbol(x))
3605 	return; /* no need to preserve */
3606     PROTECT(x);
3607     checkMSet(mset);
3608     SEXP store = CAR(mset);
3609     int *n = INTEGER(CDR(mset));
3610     if (store == R_NilValue) {
3611 	R_xlen_t newsize = INTEGER_ELT(TAG(mset), 0);
3612 	if (newsize == 0)
3613 	    newsize = 4; /* default minimum size */
3614 	store = allocVector(VECSXP, newsize);
3615 	SETCAR(mset, store);
3616     }
3617     R_xlen_t size = XLENGTH(store);
3618     if (*n == size) {
3619 	R_xlen_t newsize = 2 * size;
3620 	if (newsize >= INT_MAX || newsize < size)
3621 	    error("Multi-set overflow");
3622 	SEXP newstore = PROTECT(allocVector(VECSXP, newsize));
3623 	for(R_xlen_t i = 0; i < size; i++)
3624 	    SET_VECTOR_ELT(newstore, i, VECTOR_ELT(store, i));
3625 	SETCAR(mset, newstore);
3626 	UNPROTECT(1); /* newstore */
3627 	store = newstore;
3628     }
3629     UNPROTECT(1); /* x */
3630     SET_VECTOR_ELT(store, (*n)++, x);
3631 }
3632 
3633 /* Remove (one instance of) the object from the multi-set. If there is another
3634    instance of the object in the multi-set, it will still be protected. If there
3635    is no instance of the object, the function does nothing. */
R_ReleaseFromMSet(SEXP x,SEXP mset)3636 void R_ReleaseFromMSet(SEXP x, SEXP mset)
3637 {
3638     if (x == R_NilValue || isSymbol(x))
3639 	return; /* not preserved */
3640     checkMSet(mset);
3641     SEXP store = CAR(mset);
3642     if (store == R_NilValue)
3643 	return; /* not preserved */
3644     int *n = INTEGER(CDR(mset));
3645     for(R_xlen_t i = (*n) - 1; i >= 0; i--) {
3646 	if (VECTOR_ELT(store, i) == x) {
3647 	    for(;i < (*n) - 1; i++)
3648 		SET_VECTOR_ELT(store, i, VECTOR_ELT(store, i + 1));
3649 	    SET_VECTOR_ELT(store, i, R_NilValue);
3650 	    (*n)--;
3651 	    return;
3652 	}
3653     }
3654     /* not preserved */
3655 }
3656 
3657 /* Release all objects from the multi-set, but the multi-set can be used for
3658    preserving more objects. */
R_ReleaseMSet(SEXP mset,int keepSize)3659 void R_ReleaseMSet(SEXP mset, int keepSize)
3660 {
3661     checkMSet(mset);
3662     SEXP store = CAR(mset);
3663     if (store == R_NilValue)
3664 	return; /* already empty */
3665     int *n = INTEGER(CDR(mset));
3666     if (XLENGTH(store) <= keepSize) {
3667 	/* just free the entries */
3668 	for(R_xlen_t i = 0; i < *n; i++)
3669 	    SET_VECTOR_ELT(store, i, R_NilValue);
3670     } else
3671 	SETCAR(mset, R_NilValue);
3672     *n = 0;
3673 }
3674 
3675 /* External Pointer Objects */
R_MakeExternalPtr(void * p,SEXP tag,SEXP prot)3676 SEXP R_MakeExternalPtr(void *p, SEXP tag, SEXP prot)
3677 {
3678     SEXP s = allocSExp(EXTPTRSXP);
3679     EXTPTR_PTR(s) = p;
3680     EXTPTR_PROT(s) = CHK(prot); if (prot) INCREMENT_REFCNT(prot);
3681     EXTPTR_TAG(s) = CHK(tag); if (tag) INCREMENT_REFCNT(tag);
3682     return s;
3683 }
3684 
R_ExternalPtrAddr(SEXP s)3685 void *R_ExternalPtrAddr(SEXP s)
3686 {
3687     return EXTPTR_PTR(CHK(s));
3688 }
3689 
R_ExternalPtrTag(SEXP s)3690 SEXP R_ExternalPtrTag(SEXP s)
3691 {
3692     return CHK(EXTPTR_TAG(CHK(s)));
3693 }
3694 
R_ExternalPtrProtected(SEXP s)3695 SEXP R_ExternalPtrProtected(SEXP s)
3696 {
3697     return CHK(EXTPTR_PROT(CHK(s)));
3698 }
3699 
R_ClearExternalPtr(SEXP s)3700 void R_ClearExternalPtr(SEXP s)
3701 {
3702     EXTPTR_PTR(s) = NULL;
3703 }
3704 
R_SetExternalPtrAddr(SEXP s,void * p)3705 void R_SetExternalPtrAddr(SEXP s, void *p)
3706 {
3707     EXTPTR_PTR(s) = p;
3708 }
3709 
R_SetExternalPtrTag(SEXP s,SEXP tag)3710 void R_SetExternalPtrTag(SEXP s, SEXP tag)
3711 {
3712     FIX_REFCNT(s, EXTPTR_TAG(s), tag);
3713     CHECK_OLD_TO_NEW(s, tag);
3714     EXTPTR_TAG(s) = tag;
3715 }
3716 
R_SetExternalPtrProtected(SEXP s,SEXP p)3717 void R_SetExternalPtrProtected(SEXP s, SEXP p)
3718 {
3719     FIX_REFCNT(s, EXTPTR_PROT(s), p);
3720     CHECK_OLD_TO_NEW(s, p);
3721     EXTPTR_PROT(s) = p;
3722 }
3723 
3724 /*
3725    Added to API in R 3.4.0.
3726    Work around casting issues: works where it is needed.
3727  */
3728 typedef union {void *p; DL_FUNC fn;} fn_ptr;
3729 
R_MakeExternalPtrFn(DL_FUNC p,SEXP tag,SEXP prot)3730 SEXP R_MakeExternalPtrFn(DL_FUNC p, SEXP tag, SEXP prot)
3731 {
3732     fn_ptr tmp;
3733     SEXP s = allocSExp(EXTPTRSXP);
3734     tmp.fn = p;
3735     EXTPTR_PTR(s) = tmp.p;
3736     EXTPTR_PROT(s) = CHK(prot); if (prot) INCREMENT_REFCNT(prot);
3737     EXTPTR_TAG(s) = CHK(tag); if (tag) INCREMENT_REFCNT(tag);
3738     return s;
3739 }
3740 
R_ExternalPtrAddrFn(SEXP s)3741 DL_FUNC R_ExternalPtrAddrFn(SEXP s)
3742 {
3743     fn_ptr tmp;
3744     tmp.p =  EXTPTR_PTR(CHK(s));
3745     return tmp.fn;
3746 }
3747 
3748 
3749 
3750 /* The following functions are replacements for the accessor macros.
3751    They are used by code that does not have direct access to the
3752    internal representation of objects.  The replacement functions
3753    implement the write barrier. */
3754 
3755 /* General Cons Cell Attributes */
SEXP(ATTRIB)3756 SEXP (ATTRIB)(SEXP x) { return CHK(ATTRIB(CHK(x))); }
3757 int (OBJECT)(SEXP x) { return OBJECT(CHK(x)); }
3758 int (MARK)(SEXP x) { return MARK(CHK(x)); }
3759 int (TYPEOF)(SEXP x) { return TYPEOF(CHK(x)); }
3760 int (NAMED)(SEXP x) { return NAMED(CHK(x)); }
3761 int (RTRACE)(SEXP x) { return RTRACE(CHK(x)); }
3762 int (LEVELS)(SEXP x) { return LEVELS(CHK(x)); }
3763 int (REFCNT)(SEXP x) { return REFCNT(CHK(x)); }
3764 int (TRACKREFS)(SEXP x) { return TRACKREFS(CHK(x)); }
3765 int (ALTREP)(SEXP x) { return ALTREP(CHK(x)); }
3766 int (IS_SCALAR)(SEXP x, int type) { return IS_SCALAR(CHK(x), type); }
3767 void (DECREMENT_REFCNT)(SEXP x) { DECREMENT_REFCNT(CHK(x)); }
3768 void (INCREMENT_REFCNT)(SEXP x) { INCREMENT_REFCNT(CHK(x)); }
3769 void (DISABLE_REFCNT)(SEXP x)  { DISABLE_REFCNT(CHK(x)); }
3770 void (ENABLE_REFCNT)(SEXP x) { ENABLE_REFCNT(CHK(x)); }
3771 void (MARK_NOT_MUTABLE)(SEXP x) { MARK_NOT_MUTABLE(CHK(x)); }
3772 int (ASSIGNMENT_PENDING)(SEXP x) { return ASSIGNMENT_PENDING(CHK(x)); }
3773 void (SET_ASSIGNMENT_PENDING)(SEXP x, int v)
3774 {
3775     SET_ASSIGNMENT_PENDING(CHK(x), v);
3776 }
3777 int (IS_ASSIGNMENT_CALL)(SEXP x) { return IS_ASSIGNMENT_CALL(CHK(x)); }
3778 void (MARK_ASSIGNMENT_CALL)(SEXP x) { MARK_ASSIGNMENT_CALL(CHK(x)); }
3779 
3780 void (SET_ATTRIB)(SEXP x, SEXP v) {
3781     if(TYPEOF(v) != LISTSXP && TYPEOF(v) != NILSXP)
3782 	error("value of 'SET_ATTRIB' must be a pairlist or NULL, not a '%s'",
3783 	      type2char(TYPEOF(v)));
3784     FIX_REFCNT(x, ATTRIB(x), v);
3785     CHECK_OLD_TO_NEW(x, v);
3786     ATTRIB(x) = v;
3787 }
3788 void (SET_OBJECT)(SEXP x, int v) { SET_OBJECT(CHK(x), v); }
3789 void (SET_TYPEOF)(SEXP x, int v) { SET_TYPEOF(CHK(x), v); }
3790 void (SET_NAMED)(SEXP x, int v)
3791 {
3792 #ifndef SWITCH_TO_REFCNT
3793     SET_NAMED(CHK(x), v);
3794 #endif
3795 }
3796 void (SET_RTRACE)(SEXP x, int v) { SET_RTRACE(CHK(x), v); }
3797 int (SETLEVELS)(SEXP x, int v) { return SETLEVELS(CHK(x), v); }
DUPLICATE_ATTRIB(SEXP to,SEXP from)3798 void DUPLICATE_ATTRIB(SEXP to, SEXP from) {
3799     SET_ATTRIB(CHK(to), duplicate(CHK(ATTRIB(CHK(from)))));
3800     SET_OBJECT(CHK(to), OBJECT(from));
3801     IS_S4_OBJECT(from) ?  SET_S4_OBJECT(to) : UNSET_S4_OBJECT(to);
3802 }
SHALLOW_DUPLICATE_ATTRIB(SEXP to,SEXP from)3803 void SHALLOW_DUPLICATE_ATTRIB(SEXP to, SEXP from) {
3804     SET_ATTRIB(CHK(to), shallow_duplicate(CHK(ATTRIB(CHK(from)))));
3805     SET_OBJECT(CHK(to), OBJECT(from));
3806     IS_S4_OBJECT(from) ?  SET_S4_OBJECT(to) : UNSET_S4_OBJECT(to);
3807 }
3808 
3809 void (ENSURE_NAMEDMAX)(SEXP x) { ENSURE_NAMEDMAX(CHK(x)); }
3810 void (ENSURE_NAMED)(SEXP x) { ENSURE_NAMED(CHK(x)); }
3811 void (SETTER_CLEAR_NAMED)(SEXP x) { SETTER_CLEAR_NAMED(CHK(x)); }
3812 void (RAISE_NAMED)(SEXP x, int n) { RAISE_NAMED(CHK(x), n); }
3813 
3814 /* S4 object testing */
3815 int (IS_S4_OBJECT)(SEXP x){ return IS_S4_OBJECT(CHK(x)); }
3816 void (SET_S4_OBJECT)(SEXP x){ SET_S4_OBJECT(CHK(x)); }
3817 void (UNSET_S4_OBJECT)(SEXP x){ UNSET_S4_OBJECT(CHK(x)); }
3818 
3819 /* JIT optimization support */
3820 int (NOJIT)(SEXP x) { return NOJIT(CHK(x)); }
3821 int (MAYBEJIT)(SEXP x) { return MAYBEJIT(CHK(x)); }
3822 void (SET_NOJIT)(SEXP x) { SET_NOJIT(CHK(x)); }
3823 void (SET_MAYBEJIT)(SEXP x) { SET_MAYBEJIT(CHK(x)); }
3824 void (UNSET_MAYBEJIT)(SEXP x) { UNSET_MAYBEJIT(CHK(x)); }
3825 
3826 /* Growable vector support */
3827 int (IS_GROWABLE)(SEXP x) { return IS_GROWABLE(CHK(x)); }
3828 void (SET_GROWABLE_BIT)(SEXP x) { SET_GROWABLE_BIT(CHK(x)); }
3829 
3830 static int nvec[32] = {
3831     1,1,1,1,1,1,1,1,
3832     1,0,0,1,1,0,0,0,
3833     0,1,1,0,0,1,1,0,
3834     0,1,1,1,1,1,1,1
3835 };
3836 
CHK2(SEXP x)3837 static R_INLINE SEXP CHK2(SEXP x)
3838 {
3839     x = CHK(x);
3840     if(nvec[TYPEOF(x)])
3841 	error("LENGTH or similar applied to %s object", type2char(TYPEOF(x)));
3842     return x;
3843 }
3844 
3845 /* Vector Accessors */
3846 int (LENGTH)(SEXP x) { return x == R_NilValue ? 0 : LENGTH(CHK2(x)); }
R_xlen_t(XLENGTH)3847 R_xlen_t (XLENGTH)(SEXP x) { return XLENGTH(CHK2(x)); }
R_xlen_t(TRUELENGTH)3848 R_xlen_t (TRUELENGTH)(SEXP x) { return TRUELENGTH(CHK2(x)); }
3849 
3850 void (SETLENGTH)(SEXP x, R_xlen_t v)
3851 {
3852     if (ALTREP(x))
3853 	error("SETLENGTH() cannot be applied to an ALTVEC object.");
3854     if (! isVector(x))
3855 	error(_("SETLENGTH() can only be applied to a standard vector, "
3856 		"not a '%s'"), type2char(TYPEOF(x)));
3857     SET_STDVEC_LENGTH(CHK2(x), v);
3858 }
3859 
3860 void (SET_TRUELENGTH)(SEXP x, R_xlen_t v) { SET_TRUELENGTH(CHK2(x), v); }
3861 int  (IS_LONG_VEC)(SEXP x) { return IS_LONG_VEC(CHK2(x)); }
3862 #ifdef TESTING_WRITE_BARRIER
R_xlen_t(STDVEC_LENGTH)3863 R_xlen_t (STDVEC_LENGTH)(SEXP x) { return STDVEC_LENGTH(CHK2(x)); }
R_xlen_t(STDVEC_TRUELENGTH)3864 R_xlen_t (STDVEC_TRUELENGTH)(SEXP x) { return STDVEC_TRUELENGTH(CHK2(x)); }
3865 void (SETALTREP)(SEXP x, int v) { SETALTREP(x, v); }
3866 #endif
3867 
3868 /* temporary, to ease transition away from remapping */
Rf_XLENGTH(SEXP x)3869 R_xlen_t Rf_XLENGTH(SEXP x) { return XLENGTH(x); }
3870 
3871 const char *(R_CHAR)(SEXP x) {
3872     if(TYPEOF(x) != CHARSXP) // Han-Tak proposes to prepend  'x && '
3873 	error("%s() can only be applied to a '%s', not a '%s'",
3874 	      "CHAR", "CHARSXP", type2char(TYPEOF(x)));
3875     return (const char *) CHAR(CHK(x));
3876 }
3877 
SEXP(STRING_ELT)3878 SEXP (STRING_ELT)(SEXP x, R_xlen_t i) {
3879     if(TYPEOF(x) != STRSXP)
3880 	error("%s() can only be applied to a '%s', not a '%s'",
3881 	      "STRING_ELT", "character vector", type2char(TYPEOF(x)));
3882     if (ALTREP(x))
3883 	return CHK(ALTSTRING_ELT(CHK(x), i));
3884     else {
3885 	SEXP *ps = STDVEC_DATAPTR(CHK(x));
3886 	return CHK(ps[i]);
3887     }
3888 }
3889 
SEXP(VECTOR_ELT)3890 SEXP (VECTOR_ELT)(SEXP x, R_xlen_t i) {
3891     /* We need to allow vector-like types here */
3892     if(TYPEOF(x) != VECSXP &&
3893        TYPEOF(x) != EXPRSXP &&
3894        TYPEOF(x) != WEAKREFSXP)
3895 	error("%s() can only be applied to a '%s', not a '%s'",
3896 	      "VECTOR_ELT", "list", type2char(TYPEOF(x)));
3897     return CHK(VECTOR_ELT(CHK(x), i));
3898 }
3899 
3900 #ifdef CATCH_ZERO_LENGTH_ACCESS
3901 /* Attempts to read or write elements of a zero length vector will
3902    result in a segfault, rather than read and write random memory.
3903    Returning NULL would be more natural, but Matrix seems to assume
3904    that even zero-length vectors have non-NULL data pointers, so
3905    return (void *) 1 instead. Zero-length CHARSXP objects still have a
3906    trailing zero byte so they are not handled. */
3907 # define CHKZLN(x) do {					   \
3908 	CHK(x);						   \
3909 	if (STDVEC_LENGTH(x) == 0 && TYPEOF(x) != CHARSXP) \
3910 	    return (void *) 1;				   \
3911     } while (0)
3912 #else
3913 # define CHKZLN(x) do { } while (0)
3914 #endif
3915 
3916 void *(STDVEC_DATAPTR)(SEXP x)
3917 {
3918     if (ALTREP(x))
3919 	error("cannot get STDVEC_DATAPTR from ALTREP object");
3920     if (! isVector(x) && TYPEOF(x) != WEAKREFSXP)
3921 	error("STDVEC_DATAPTR can only be applied to a vector, not a '%s'",
3922 	      type2char(TYPEOF(x)));
3923     CHKZLN(x);
3924     return STDVEC_DATAPTR(x);
3925 }
3926 
3927 int *(LOGICAL)(SEXP x) {
3928     if(TYPEOF(x) != LGLSXP)
3929 	error("%s() can only be applied to a '%s', not a '%s'",
3930 	      "LOGICAL",  "logical", type2char(TYPEOF(x)));
3931     CHKZLN(x);
3932     return LOGICAL(x);
3933 }
3934 
3935 const int *(LOGICAL_RO)(SEXP x) {
3936     if(TYPEOF(x) != LGLSXP)
3937 	error("%s() can only be applied to a '%s', not a '%s'",
3938 	      "LOGICAL",  "logical", type2char(TYPEOF(x)));
3939     CHKZLN(x);
3940     return LOGICAL_RO(x);
3941 }
3942 
3943 /* Maybe this should exclude logicals, but it is widely used */
3944 int *(INTEGER)(SEXP x) {
3945     if(TYPEOF(x) != INTSXP && TYPEOF(x) != LGLSXP)
3946 	error("%s() can only be applied to a '%s', not a '%s'",
3947 	      "INTEGER", "integer", type2char(TYPEOF(x)));
3948     CHKZLN(x);
3949     return INTEGER(x);
3950 }
3951 
3952 const int *(INTEGER_RO)(SEXP x) {
3953     if(TYPEOF(x) != INTSXP && TYPEOF(x) != LGLSXP)
3954 	error("%s() can only be applied to a '%s', not a '%s'",
3955 	      "INTEGER", "integer", type2char(TYPEOF(x)));
3956     CHKZLN(x);
3957     return INTEGER_RO(x);
3958 }
3959 
3960 Rbyte *(RAW)(SEXP x) {
3961     if(TYPEOF(x) != RAWSXP)
3962 	error("%s() can only be applied to a '%s', not a '%s'",
3963 	      "RAW", "raw", type2char(TYPEOF(x)));
3964     CHKZLN(x);
3965     return RAW(x);
3966 }
3967 
3968 const Rbyte *(RAW_RO)(SEXP x) {
3969     if(TYPEOF(x) != RAWSXP)
3970 	error("%s() can only be applied to a '%s', not a '%s'",
3971 	      "RAW", "raw", type2char(TYPEOF(x)));
3972     CHKZLN(x);
3973     return RAW(x);
3974 }
3975 
3976 double *(REAL)(SEXP x) {
3977     if(TYPEOF(x) != REALSXP)
3978 	error("%s() can only be applied to a '%s', not a '%s'",
3979 	      "REAL", "numeric", type2char(TYPEOF(x)));
3980     CHKZLN(x);
3981     return REAL(x);
3982 }
3983 
3984 const double *(REAL_RO)(SEXP x) {
3985     if(TYPEOF(x) != REALSXP)
3986 	error("%s() can only be applied to a '%s', not a '%s'",
3987 	      "REAL", "numeric", type2char(TYPEOF(x)));
3988     CHKZLN(x);
3989     return REAL_RO(x);
3990 }
3991 
3992 Rcomplex *(COMPLEX)(SEXP x) {
3993     if(TYPEOF(x) != CPLXSXP)
3994 	error("%s() can only be applied to a '%s', not a '%s'",
3995 	      "COMPLEX", "complex", type2char(TYPEOF(x)));
3996     CHKZLN(x);
3997     return COMPLEX(x);
3998 }
3999 
4000 const Rcomplex *(COMPLEX_RO)(SEXP x) {
4001     if(TYPEOF(x) != CPLXSXP)
4002 	error("%s() can only be applied to a '%s', not a '%s'",
4003 	      "COMPLEX", "complex", type2char(TYPEOF(x)));
4004     CHKZLN(x);
4005     return COMPLEX_RO(x);
4006 }
4007 
4008 SEXP *(STRING_PTR)(SEXP x) {
4009     if(TYPEOF(x) != STRSXP)
4010 	error("%s() can only be applied to a '%s', not a '%s'",
4011 	      "STRING_PTR", "character", type2char(TYPEOF(x)));
4012     CHKZLN(x);
4013     return STRING_PTR(x);
4014 }
4015 
4016 const SEXP *(STRING_PTR_RO)(SEXP x) {
4017     if(TYPEOF(x) != STRSXP)
4018 	error("%s() can only be applied to a '%s', not a '%s'",
4019 	      "STRING_PTR_RO", "character", type2char(TYPEOF(x)));
4020     CHKZLN(x);
4021     return STRING_PTR_RO(x);
4022 }
4023 
NORET(VECTOR_PTR)4024 SEXP * NORET (VECTOR_PTR)(SEXP x)
4025 {
4026   error(_("not safe to return vector pointer"));
4027 }
4028 
4029 void (SET_STRING_ELT)(SEXP x, R_xlen_t i, SEXP v) {
4030     if(TYPEOF(CHK(x)) != STRSXP)
4031 	error("%s() can only be applied to a '%s', not a '%s'",
4032 	      "SET_STRING_ELT", "character vector", type2char(TYPEOF(x)));
4033     if(TYPEOF(CHK(v)) != CHARSXP)
4034        error("Value of SET_STRING_ELT() must be a 'CHARSXP' not a '%s'",
4035 	     type2char(TYPEOF(v)));
4036     if (i < 0 || i >= XLENGTH(x))
4037 	error(_("attempt to set index %lld/%lld in SET_STRING_ELT"),
4038 	      (long long)i, (long long)XLENGTH(x));
4039     CHECK_OLD_TO_NEW(x, v);
4040     if (ALTREP(x))
4041 	ALTSTRING_SET_ELT(x, i, v);
4042     else {
4043 	SEXP *ps = STDVEC_DATAPTR(x);
4044 	FIX_REFCNT(x, ps[i], v);
4045 	ps[i] = v;
4046     }
4047 }
4048 
SEXP(SET_VECTOR_ELT)4049 SEXP (SET_VECTOR_ELT)(SEXP x, R_xlen_t i, SEXP v) {
4050     /*  we need to allow vector-like types here */
4051     if(TYPEOF(x) != VECSXP &&
4052        TYPEOF(x) != EXPRSXP &&
4053        TYPEOF(x) != WEAKREFSXP) {
4054 	error("%s() can only be applied to a '%s', not a '%s'",
4055 	      "SET_VECTOR_ELT", "list", type2char(TYPEOF(x)));
4056     }
4057     if (i < 0 || i >= XLENGTH(x))
4058 	error(_("attempt to set index %lld/%lld in SET_VECTOR_ELT"),
4059 	      (long long)i, (long long)XLENGTH(x));
4060     FIX_REFCNT(x, VECTOR_ELT(x, i), v);
4061     CHECK_OLD_TO_NEW(x, v);
4062     return VECTOR_ELT(x, i) = v;
4063 }
4064 
4065 /* check for a CONS-like object */
4066 #ifdef TESTING_WRITE_BARRIER
CHKCONS(SEXP e)4067 static R_INLINE SEXP CHKCONS(SEXP e)
4068 {
4069     if (ALTREP(e))
4070 	return CHK(e);
4071     switch (TYPEOF(e)) {
4072     case LISTSXP:
4073     case LANGSXP:
4074     case NILSXP:
4075     case DOTSXP:
4076     case CLOSXP:    /**** use separate accessors? */
4077     case BCODESXP:  /**** use separate accessors? */
4078     case ENVSXP:    /**** use separate accessors? */
4079     case PROMSXP:   /**** use separate accessors? */
4080     case EXTPTRSXP: /**** use separate accessors? */
4081 	return CHK(e);
4082     default:
4083 	error("CAR/CDR/TAG or similar applied to %s object",
4084 	      type2char(TYPEOF(e)));
4085     }
4086 }
4087 #else
4088 #define CHKCONS(e) CHK(e)
4089 #endif
4090 
4091 attribute_hidden
4092 int (BNDCELL_TAG)(SEXP cell) { return BNDCELL_TAG(cell); }
4093 attribute_hidden
4094 void (SET_BNDCELL_TAG)(SEXP cell, int val) { SET_BNDCELL_TAG(cell, val); }
4095 attribute_hidden
4096 double (BNDCELL_DVAL)(SEXP cell) { return BNDCELL_DVAL(cell); }
4097 attribute_hidden
4098 int (BNDCELL_IVAL)(SEXP cell) { return BNDCELL_IVAL(cell); }
4099 attribute_hidden
4100 int (BNDCELL_LVAL)(SEXP cell) { return BNDCELL_LVAL(cell); }
4101 attribute_hidden
4102 void (SET_BNDCELL_DVAL)(SEXP cell, double v) { SET_BNDCELL_DVAL(cell, v); }
4103 attribute_hidden
4104 void (SET_BNDCELL_IVAL)(SEXP cell, int v) { SET_BNDCELL_IVAL(cell, v); }
4105 attribute_hidden
4106 void (SET_BNDCELL_LVAL)(SEXP cell, int v) { SET_BNDCELL_LVAL(cell, v); }
4107 attribute_hidden
4108 void (INIT_BNDCELL)(SEXP cell, int type) { INIT_BNDCELL(cell, type); }
4109 
4110 #define CLEAR_BNDCELL_TAG(cell) do {		\
4111 	if (BNDCELL_TAG(cell)) {		\
4112 	    CAR0(cell) = R_NilValue;		\
4113 	    SET_BNDCELL_TAG(cell, 0);		\
4114 	}					\
4115     } while (0)
4116 
4117 attribute_hidden
SET_BNDCELL(SEXP cell,SEXP val)4118 void SET_BNDCELL(SEXP cell, SEXP val)
4119 {
4120     CLEAR_BNDCELL_TAG(cell);
4121     SETCAR(cell, val);
4122 }
4123 
R_expand_binding_value(SEXP b)4124 attribute_hidden void R_expand_binding_value(SEXP b)
4125 {
4126 #if BOXED_BINDING_CELLS
4127     SET_BNDCELL_TAG(b, 0);
4128 #else
4129     int typetag = BNDCELL_TAG(b);
4130     if (typetag) {
4131 	union {
4132 	    SEXP sxpval;
4133 	    double dval;
4134 	    int ival;
4135 	} vv;
4136 	SEXP val;
4137 	vv.sxpval = CAR0(b);
4138 	switch (typetag) {
4139 	case REALSXP:
4140 	    PROTECT(b);
4141 	    val = ScalarReal(vv.dval);
4142 	    SET_BNDCELL(b, val);
4143 	    INCREMENT_NAMED(val);
4144 	    UNPROTECT(1);
4145 	    break;
4146 	case INTSXP:
4147 	    PROTECT(b);
4148 	    val = ScalarInteger(vv.ival);
4149 	    SET_BNDCELL(b, val);
4150 	    INCREMENT_NAMED(val);
4151 	    UNPROTECT(1);
4152 	    break;
4153 	case LGLSXP:
4154 	    PROTECT(b);
4155 	    val = ScalarLogical(vv.ival);
4156 	    SET_BNDCELL(b, val);
4157 	    INCREMENT_NAMED(val);
4158 	    UNPROTECT(1);
4159 	    break;
4160 	}
4161     }
4162 #endif
4163 }
4164 
R_args_enable_refcnt(SEXP args)4165 void attribute_hidden R_args_enable_refcnt(SEXP args)
4166 {
4167 #ifdef SWITCH_TO_REFCNT
4168     /* args is escaping into user C code and might get captured, so
4169        make sure it is reference counting. Should be able to get rid
4170        of this function if we reduce use of CONS_NR. */
4171     for (SEXP a = args; a != R_NilValue; a = CDR(a))
4172 	if (! TRACKREFS(a)) {
4173 	    ENABLE_REFCNT(a);
4174 	    INCREMENT_REFCNT(CAR(a));
4175 	    INCREMENT_REFCNT(CDR(a));
4176 #ifdef TESTING_WRITE_BARRIER
4177 	    /* this should not see non-tracking arguments */
4178 	    if (! TRACKREFS(CAR(a)))
4179 		error("argument not tracking references");
4180 #endif
4181 	}
4182 #endif
4183 }
4184 
4185 /* List Accessors */
SEXP(TAG)4186 SEXP (TAG)(SEXP e) { return CHK(TAG(CHKCONS(e))); }
SEXP(CAR0)4187 SEXP (CAR0)(SEXP e) { return CHK(CAR0(CHKCONS(e))); }
SEXP(CDR)4188 SEXP (CDR)(SEXP e) { return CHK(CDR(CHKCONS(e))); }
SEXP(CAAR)4189 SEXP (CAAR)(SEXP e) { return CHK(CAAR(CHKCONS(e))); }
SEXP(CDAR)4190 SEXP (CDAR)(SEXP e) { return CHK(CDAR(CHKCONS(e))); }
SEXP(CADR)4191 SEXP (CADR)(SEXP e) { return CHK(CADR(CHKCONS(e))); }
SEXP(CDDR)4192 SEXP (CDDR)(SEXP e) { return CHK(CDDR(CHKCONS(e))); }
SEXP(CDDDR)4193 SEXP (CDDDR)(SEXP e) { return CHK(CDDDR(CHKCONS(e))); }
SEXP(CADDR)4194 SEXP (CADDR)(SEXP e) { return CHK(CADDR(CHKCONS(e))); }
SEXP(CADDDR)4195 SEXP (CADDDR)(SEXP e) { return CHK(CADDDR(CHKCONS(e))); }
SEXP(CAD4R)4196 SEXP (CAD4R)(SEXP e) { return CHK(CAD4R(CHKCONS(e))); }
4197 int (MISSING)(SEXP x) { return MISSING(CHKCONS(x)); }
4198 
4199 void (SET_TAG)(SEXP x, SEXP v)
4200 {
4201     if (CHKCONS(x) == NULL || x == R_NilValue)
4202 	error(_("bad value"));
4203     FIX_REFCNT(x, TAG(x), v);
4204     CHECK_OLD_TO_NEW(x, v);
4205     TAG(x) = v;
4206 }
4207 
SEXP(SETCAR)4208 SEXP (SETCAR)(SEXP x, SEXP y)
4209 {
4210     if (CHKCONS(x) == NULL || x == R_NilValue)
4211 	error(_("bad value"));
4212     CLEAR_BNDCELL_TAG(x);
4213     if (y == CAR(x))
4214 	return y;
4215     FIX_BINDING_REFCNT(x, CAR(x), y);
4216     CHECK_OLD_TO_NEW(x, y);
4217     CAR0(x) = y;
4218     return y;
4219 }
4220 
SEXP(SETCDR)4221 SEXP (SETCDR)(SEXP x, SEXP y)
4222 {
4223     if (CHKCONS(x) == NULL || x == R_NilValue)
4224 	error(_("bad value"));
4225     FIX_REFCNT(x, CDR(x), y);
4226 #ifdef TESTING_WRITE_BARRIER
4227     /* this should not add a non-tracking CDR to a tracking cell */
4228     if (TRACKREFS(x) && y && ! TRACKREFS(y))
4229 	error("inserting non-tracking CDR in tracking cell");
4230 #endif
4231     CHECK_OLD_TO_NEW(x, y);
4232     CDR(x) = y;
4233     return y;
4234 }
4235 
SEXP(SETCADR)4236 SEXP (SETCADR)(SEXP x, SEXP y)
4237 {
4238     SEXP cell;
4239     if (CHKCONS(x) == NULL || x == R_NilValue ||
4240 	CHKCONS(CDR(x)) == NULL || CDR(x) == R_NilValue)
4241 	error(_("bad value"));
4242     cell = CDR(x);
4243     CLEAR_BNDCELL_TAG(cell);
4244     FIX_REFCNT(cell, CAR(cell), y);
4245     CHECK_OLD_TO_NEW(cell, y);
4246     CAR0(cell) = y;
4247     return y;
4248 }
4249 
SEXP(SETCADDR)4250 SEXP (SETCADDR)(SEXP x, SEXP y)
4251 {
4252     SEXP cell;
4253     if (CHKCONS(x) == NULL || x == R_NilValue ||
4254 	CHKCONS(CDR(x)) == NULL || CDR(x) == R_NilValue ||
4255 	CHKCONS(CDDR(x)) == NULL || CDDR(x) == R_NilValue)
4256 	error(_("bad value"));
4257     cell = CDDR(x);
4258     CLEAR_BNDCELL_TAG(cell);
4259     FIX_REFCNT(cell, CAR(cell), y);
4260     CHECK_OLD_TO_NEW(cell, y);
4261     CAR0(cell) = y;
4262     return y;
4263 }
4264 
SEXP(SETCADDDR)4265 SEXP (SETCADDDR)(SEXP x, SEXP y)
4266 {
4267     SEXP cell;
4268     if (CHKCONS(x) == NULL || x == R_NilValue ||
4269 	CHKCONS(CDR(x)) == NULL || CDR(x) == R_NilValue ||
4270 	CHKCONS(CDDR(x)) == NULL || CDDR(x) == R_NilValue ||
4271 	CHKCONS(CDDDR(x)) == NULL || CDDDR(x) == R_NilValue)
4272 	error(_("bad value"));
4273     cell = CDDDR(x);
4274     CLEAR_BNDCELL_TAG(cell);
4275     FIX_REFCNT(cell, CAR(cell), y);
4276     CHECK_OLD_TO_NEW(cell, y);
4277     CAR0(cell) = y;
4278     return y;
4279 }
4280 
4281 #define CD4R(x) CDR(CDR(CDR(CDR(x))))
4282 
SEXP(SETCAD4R)4283 SEXP (SETCAD4R)(SEXP x, SEXP y)
4284 {
4285     SEXP cell;
4286     if (CHKCONS(x) == NULL || x == R_NilValue ||
4287 	CHKCONS(CDR(x)) == NULL || CDR(x) == R_NilValue ||
4288 	CHKCONS(CDDR(x)) == NULL || CDDR(x) == R_NilValue ||
4289 	CHKCONS(CDDDR(x)) == NULL || CDDDR(x) == R_NilValue ||
4290 	CHKCONS(CD4R(x)) == NULL || CD4R(x) == R_NilValue)
4291 	error(_("bad value"));
4292     cell = CD4R(x);
4293     CLEAR_BNDCELL_TAG(cell);
4294     FIX_REFCNT(cell, CAR(cell), y);
4295     CHECK_OLD_TO_NEW(cell, y);
4296     CAR0(cell) = y;
4297     return y;
4298 }
4299 
4300 void *(EXTPTR_PTR)(SEXP x) { return EXTPTR_PTR(CHK(x)); }
4301 
4302 void (SET_MISSING)(SEXP x, int v) { SET_MISSING(CHKCONS(x), v); }
4303 
4304 /* Closure Accessors */
SEXP(FORMALS)4305 SEXP (FORMALS)(SEXP x) { return CHK(FORMALS(CHK(x))); }
SEXP(BODY)4306 SEXP (BODY)(SEXP x) { return CHK(BODY(CHK(x))); }
SEXP(CLOENV)4307 SEXP (CLOENV)(SEXP x) { return CHK(CLOENV(CHK(x))); }
4308 int (RDEBUG)(SEXP x) { return RDEBUG(CHK(x)); }
4309 int (RSTEP)(SEXP x) { return RSTEP(CHK(x)); }
4310 
4311 void (SET_FORMALS)(SEXP x, SEXP v) { FIX_REFCNT(x, FORMALS(x), v); CHECK_OLD_TO_NEW(x, v); FORMALS(x) = v; }
4312 void (SET_BODY)(SEXP x, SEXP v) { FIX_REFCNT(x, BODY(x), v); CHECK_OLD_TO_NEW(x, v); BODY(x) = v; }
4313 void (SET_CLOENV)(SEXP x, SEXP v) { FIX_REFCNT(x, CLOENV(x), v); CHECK_OLD_TO_NEW(x, v); CLOENV(x) = v; }
4314 void (SET_RDEBUG)(SEXP x, int v) { SET_RDEBUG(CHK(x), v); }
4315 void (SET_RSTEP)(SEXP x, int v) { SET_RSTEP(CHK(x), v); }
4316 
4317 /* These are only needed with the write barrier on */
4318 #ifdef TESTING_WRITE_BARRIER
4319 /* Primitive Accessors */
4320 /* not hidden since needed in some base packages */
4321 int (PRIMOFFSET)(SEXP x) { return PRIMOFFSET(CHK(x)); }
4322 attribute_hidden
4323 void (SET_PRIMOFFSET)(SEXP x, int v) { SET_PRIMOFFSET(CHK(x), v); }
4324 #endif
4325 
4326 /* Symbol Accessors */
SEXP(PRINTNAME)4327 SEXP (PRINTNAME)(SEXP x) { return CHK(PRINTNAME(CHK(x))); }
SEXP(SYMVALUE)4328 SEXP (SYMVALUE)(SEXP x) { return CHK(SYMVALUE(CHK(x))); }
SEXP(INTERNAL)4329 SEXP (INTERNAL)(SEXP x) { return CHK(INTERNAL(CHK(x))); }
4330 int (DDVAL)(SEXP x) { return DDVAL(CHK(x)); }
4331 
4332 void (SET_PRINTNAME)(SEXP x, SEXP v) { FIX_REFCNT(x, PRINTNAME(x), v); CHECK_OLD_TO_NEW(x, v); PRINTNAME(x) = v; }
4333 
4334 void (SET_SYMVALUE)(SEXP x, SEXP v)
4335 {
4336     if (SYMVALUE(x) == v)
4337 	return;
4338     FIX_BINDING_REFCNT(x, SYMVALUE(x), v);
4339     CHECK_OLD_TO_NEW(x, v);
4340     SYMVALUE(x) = v;
4341 }
4342 
4343 void (SET_INTERNAL)(SEXP x, SEXP v) { FIX_REFCNT(x, INTERNAL(x), v); CHECK_OLD_TO_NEW(x, v); INTERNAL(x) = v; }
4344 void (SET_DDVAL)(SEXP x, int v) { SET_DDVAL(CHK(x), v); }
4345 
4346 /* Environment Accessors */
SEXP(FRAME)4347 SEXP (FRAME)(SEXP x) { return CHK(FRAME(CHK(x))); }
SEXP(ENCLOS)4348 SEXP (ENCLOS)(SEXP x) { return CHK(ENCLOS(CHK(x))); }
SEXP(HASHTAB)4349 SEXP (HASHTAB)(SEXP x) { return CHK(HASHTAB(CHK(x))); }
4350 int (ENVFLAGS)(SEXP x) { return ENVFLAGS(CHK(x)); }
4351 
4352 void (SET_FRAME)(SEXP x, SEXP v) { FIX_REFCNT(x, FRAME(x), v); CHECK_OLD_TO_NEW(x, v); FRAME(x) = v; }
4353 void (SET_ENCLOS)(SEXP x, SEXP v) { FIX_REFCNT(x, ENCLOS(x), v); CHECK_OLD_TO_NEW(x, v); ENCLOS(x) = v; }
4354 void (SET_HASHTAB)(SEXP x, SEXP v) { FIX_REFCNT(x, HASHTAB(x), v); CHECK_OLD_TO_NEW(x, v); HASHTAB(x) = v; }
4355 void (SET_ENVFLAGS)(SEXP x, int v) { SET_ENVFLAGS(x, v); }
4356 
4357 /* Promise Accessors */
SEXP(PRCODE)4358 SEXP (PRCODE)(SEXP x) { return CHK(PRCODE(CHK(x))); }
SEXP(PRENV)4359 SEXP (PRENV)(SEXP x) { return CHK(PRENV(CHK(x))); }
SEXP(PRVALUE)4360 SEXP (PRVALUE)(SEXP x) { return CHK(PRVALUE(CHK(x))); }
4361 int (PRSEEN)(SEXP x) { return PRSEEN(CHK(x)); }
4362 
4363 void (SET_PRENV)(SEXP x, SEXP v){ FIX_REFCNT(x, PRENV(x), v); CHECK_OLD_TO_NEW(x, v); PRENV(x) = v; }
4364 void (SET_PRVALUE)(SEXP x, SEXP v) { FIX_REFCNT(x, PRVALUE(x), v); CHECK_OLD_TO_NEW(x, v); PRVALUE(x) = v; }
4365 void (SET_PRCODE)(SEXP x, SEXP v) { FIX_REFCNT(x, PRCODE(x), v); CHECK_OLD_TO_NEW(x, v); PRCODE(x) = v; }
4366 void (SET_PRSEEN)(SEXP x, int v) { SET_PRSEEN(CHK(x), v); }
4367 
4368 /* Hashing Accessors */
4369 #ifdef TESTING_WRITE_BARRIER
4370 attribute_hidden
4371 int (HASHASH)(SEXP x) { return HASHASH(CHK(x)); }
4372 attribute_hidden
4373 int (HASHVALUE)(SEXP x) { return HASHVALUE(CHK(x)); }
4374 
4375 attribute_hidden
4376 void (SET_HASHASH)(SEXP x, int v) { SET_HASHASH(CHK(x), v); }
4377 attribute_hidden
4378 void (SET_HASHVALUE)(SEXP x, int v) { SET_HASHVALUE(CHK(x), v); }
4379 #endif
4380 
4381 attribute_hidden
SEXP(SET_CXTAIL)4382 SEXP (SET_CXTAIL)(SEXP x, SEXP v) {
4383 #ifdef USE_TYPE_CHECKING
4384     if(TYPEOF(v) != CHARSXP && TYPEOF(v) != NILSXP)
4385 	error("value of 'SET_CXTAIL' must be a char or NULL, not a '%s'",
4386 	      type2char(TYPEOF(v)));
4387 #endif
4388     /*CHECK_OLD_TO_NEW(x, v); *//* not needed since not properly traced */
4389     ATTRIB(x) = v;
4390     return x;
4391 }
4392 
4393 /* Test functions */
Rf_isNull(SEXP s)4394 Rboolean Rf_isNull(SEXP s) { return isNull(CHK(s)); }
Rf_isSymbol(SEXP s)4395 Rboolean Rf_isSymbol(SEXP s) { return isSymbol(CHK(s)); }
Rf_isLogical(SEXP s)4396 Rboolean Rf_isLogical(SEXP s) { return isLogical(CHK(s)); }
Rf_isReal(SEXP s)4397 Rboolean Rf_isReal(SEXP s) { return isReal(CHK(s)); }
Rf_isComplex(SEXP s)4398 Rboolean Rf_isComplex(SEXP s) { return isComplex(CHK(s)); }
Rf_isExpression(SEXP s)4399 Rboolean Rf_isExpression(SEXP s) { return isExpression(CHK(s)); }
Rf_isEnvironment(SEXP s)4400 Rboolean Rf_isEnvironment(SEXP s) { return isEnvironment(CHK(s)); }
Rf_isString(SEXP s)4401 Rboolean Rf_isString(SEXP s) { return isString(CHK(s)); }
Rf_isObject(SEXP s)4402 Rboolean Rf_isObject(SEXP s) { return isObject(CHK(s)); }
4403 
4404 /* Bindings accessors */
attribute_hidden(IS_ACTIVE_BINDING)4405 Rboolean attribute_hidden
4406 (IS_ACTIVE_BINDING)(SEXP b) {return IS_ACTIVE_BINDING(CHK(b));}
attribute_hidden(BINDING_IS_LOCKED)4407 Rboolean attribute_hidden
4408 (BINDING_IS_LOCKED)(SEXP b) {return BINDING_IS_LOCKED(CHK(b));}
attribute_hidden(SET_ACTIVE_BINDING_BIT)4409 void attribute_hidden
4410 (SET_ACTIVE_BINDING_BIT)(SEXP b) {SET_ACTIVE_BINDING_BIT(CHK(b));}
attribute_hidden(LOCK_BINDING)4411 void attribute_hidden (LOCK_BINDING)(SEXP b) {LOCK_BINDING(CHK(b));}
attribute_hidden(UNLOCK_BINDING)4412 void attribute_hidden (UNLOCK_BINDING)(SEXP b) {UNLOCK_BINDING(CHK(b));}
4413 
4414 attribute_hidden
4415 void (SET_BASE_SYM_CACHED)(SEXP b) { SET_BASE_SYM_CACHED(CHK(b)); }
4416 attribute_hidden
4417 void (UNSET_BASE_SYM_CACHED)(SEXP b) { UNSET_BASE_SYM_CACHED(CHK(b)); }
4418 attribute_hidden
Rboolean(BASE_SYM_CACHED)4419 Rboolean (BASE_SYM_CACHED)(SEXP b) { return BASE_SYM_CACHED(CHK(b)); }
4420 
4421 attribute_hidden
4422 void (SET_SPECIAL_SYMBOL)(SEXP b) { SET_SPECIAL_SYMBOL(CHK(b)); }
4423 attribute_hidden
4424 void (UNSET_SPECIAL_SYMBOL)(SEXP b) { UNSET_SPECIAL_SYMBOL(CHK(b)); }
4425 attribute_hidden
Rboolean(IS_SPECIAL_SYMBOL)4426 Rboolean (IS_SPECIAL_SYMBOL)(SEXP b) { return IS_SPECIAL_SYMBOL(CHK(b)); }
4427 attribute_hidden
4428 void (SET_NO_SPECIAL_SYMBOLS)(SEXP b) { SET_NO_SPECIAL_SYMBOLS(CHK(b)); }
4429 attribute_hidden
4430 void (UNSET_NO_SPECIAL_SYMBOLS)(SEXP b) { UNSET_NO_SPECIAL_SYMBOLS(CHK(b)); }
4431 attribute_hidden
Rboolean(NO_SPECIAL_SYMBOLS)4432 Rboolean (NO_SPECIAL_SYMBOLS)(SEXP b) { return NO_SPECIAL_SYMBOLS(CHK(b)); }
4433 
4434 /* R_FunTab accessors, only needed when write barrier is on */
4435 /* Not hidden to allow experimentaiton without rebuilding R - LT */
4436 /* attribute_hidden */
4437 int (PRIMVAL)(SEXP x) { return PRIMVAL(CHK(x)); }
4438 /* attribute_hidden */
CCODE(PRIMFUN)4439 CCODE (PRIMFUN)(SEXP x) { return PRIMFUN(CHK(x)); }
4440 /* attribute_hidden */
4441 void (SET_PRIMFUN)(SEXP x, CCODE f) { PRIMFUN(CHK(x)) = f; }
4442 
4443 /* for use when testing the write barrier */
attribute_hidden(IS_BYTES)4444 int  attribute_hidden (IS_BYTES)(SEXP x) { return IS_BYTES(CHK(x)); }
attribute_hidden(IS_LATIN1)4445 int  attribute_hidden (IS_LATIN1)(SEXP x) { return IS_LATIN1(CHK(x)); }
4446 /* Next two are used in package utils */
4447 int  (IS_ASCII)(SEXP x) { return IS_ASCII(CHK(x)); }
4448 int  (IS_UTF8)(SEXP x) { return IS_UTF8(CHK(x)); }
attribute_hidden(SET_BYTES)4449 void attribute_hidden (SET_BYTES)(SEXP x) { SET_BYTES(CHK(x)); }
attribute_hidden(SET_LATIN1)4450 void attribute_hidden (SET_LATIN1)(SEXP x) { SET_LATIN1(CHK(x)); }
attribute_hidden(SET_UTF8)4451 void attribute_hidden (SET_UTF8)(SEXP x) { SET_UTF8(CHK(x)); }
attribute_hidden(SET_ASCII)4452 void attribute_hidden (SET_ASCII)(SEXP x) { SET_ASCII(CHK(x)); }
4453 int  (ENC_KNOWN)(SEXP x) { return ENC_KNOWN(CHK(x)); }
attribute_hidden(SET_CACHED)4454 void attribute_hidden (SET_CACHED)(SEXP x) { SET_CACHED(CHK(x)); }
4455 int  (IS_CACHED)(SEXP x) { return IS_CACHED(CHK(x)); }
4456 
4457 /*******************************************/
4458 /* Non-sampling memory use profiler
4459    reports all large vector heap
4460    allocations and all calls to GetNewPage */
4461 /*******************************************/
4462 
4463 #ifndef R_MEMORY_PROFILING
4464 
do_Rprofmem(SEXP args)4465 SEXP NORET do_Rprofmem(SEXP args)
4466 {
4467     error(_("memory profiling is not available on this system"));
4468 }
4469 
4470 #else
4471 static int R_IsMemReporting;  /* Rboolean more appropriate? */
4472 static FILE *R_MemReportingOutfile;
4473 static R_size_t R_MemReportingThreshold;
4474 
R_OutputStackTrace(FILE * file)4475 static void R_OutputStackTrace(FILE *file)
4476 {
4477     RCNTXT *cptr;
4478 
4479     for (cptr = R_GlobalContext; cptr; cptr = cptr->nextcontext) {
4480 	if ((cptr->callflag & (CTXT_FUNCTION | CTXT_BUILTIN))
4481 	    && TYPEOF(cptr->call) == LANGSXP) {
4482 	    SEXP fun = CAR(cptr->call);
4483 	    fprintf(file, "\"%s\" ",
4484 		    TYPEOF(fun) == SYMSXP ? CHAR(PRINTNAME(fun)) :
4485 		    "<Anonymous>");
4486 	}
4487     }
4488 }
4489 
R_ReportAllocation(R_size_t size)4490 static void R_ReportAllocation(R_size_t size)
4491 {
4492     if (R_IsMemReporting) {
4493 	if(size > R_MemReportingThreshold) {
4494 	    fprintf(R_MemReportingOutfile, "%lu :", (unsigned long) size);
4495 	    R_OutputStackTrace(R_MemReportingOutfile);
4496 	    fprintf(R_MemReportingOutfile, "\n");
4497 	}
4498     }
4499     return;
4500 }
4501 
R_ReportNewPage(void)4502 static void R_ReportNewPage(void)
4503 {
4504     if (R_IsMemReporting) {
4505 	fprintf(R_MemReportingOutfile, "new page:");
4506 	R_OutputStackTrace(R_MemReportingOutfile);
4507 	fprintf(R_MemReportingOutfile, "\n");
4508     }
4509     return;
4510 }
4511 
R_EndMemReporting()4512 static void R_EndMemReporting()
4513 {
4514     if(R_MemReportingOutfile != NULL) {
4515 	/* does not fclose always flush? */
4516 	fflush(R_MemReportingOutfile);
4517 	fclose(R_MemReportingOutfile);
4518 	R_MemReportingOutfile=NULL;
4519     }
4520     R_IsMemReporting = 0;
4521     return;
4522 }
4523 
R_InitMemReporting(SEXP filename,int append,R_size_t threshold)4524 static void R_InitMemReporting(SEXP filename, int append,
4525 			       R_size_t threshold)
4526 {
4527     if(R_MemReportingOutfile != NULL) R_EndMemReporting();
4528     R_MemReportingOutfile = RC_fopen(filename, append ? "a" : "w", TRUE);
4529     if (R_MemReportingOutfile == NULL)
4530 	error(_("Rprofmem: cannot open output file '%s'"), filename);
4531     R_MemReportingThreshold = threshold;
4532     R_IsMemReporting = 1;
4533     return;
4534 }
4535 
do_Rprofmem(SEXP args)4536 SEXP do_Rprofmem(SEXP args)
4537 {
4538     SEXP filename;
4539     R_size_t threshold;
4540     int append_mode;
4541 
4542     if (!isString(CAR(args)) || (LENGTH(CAR(args))) != 1)
4543 	error(_("invalid '%s' argument"), "filename");
4544     append_mode = asLogical(CADR(args));
4545     filename = STRING_ELT(CAR(args), 0);
4546     threshold = (R_size_t) REAL(CADDR(args))[0];
4547     if (strlen(CHAR(filename)))
4548 	R_InitMemReporting(filename, append_mode, threshold);
4549     else
4550 	R_EndMemReporting();
4551     return R_NilValue;
4552 }
4553 
4554 #endif /* R_MEMORY_PROFILING */
4555 
4556 /* RBufferUtils, moved from deparse.c */
4557 
4558 #include "RBufferUtils.h"
4559 
R_AllocStringBuffer(size_t blen,R_StringBuffer * buf)4560 void *R_AllocStringBuffer(size_t blen, R_StringBuffer *buf)
4561 {
4562     size_t blen1, bsize = buf->defaultSize;
4563 
4564     /* for backwards compatibility, this used to free the buffer */
4565     if(blen == (size_t)-1)
4566 	error("R_AllocStringBuffer( (size_t)-1 ) is no longer allowed");
4567 
4568     if(blen * sizeof(char) < buf->bufsize) return buf->data;
4569     blen1 = blen = (blen + 1) * sizeof(char);
4570     blen = (blen / bsize) * bsize;
4571     if(blen < blen1) blen += bsize;
4572 
4573     if(buf->data == NULL) {
4574 	buf->data = (char *) malloc(blen);
4575 	if(buf->data)
4576 	    buf->data[0] = '\0';
4577     } else
4578 	buf->data = (char *) realloc(buf->data, blen);
4579     buf->bufsize = blen;
4580     if(!buf->data) {
4581 	buf->bufsize = 0;
4582 	/* don't translate internal error message */
4583 	error("could not allocate memory (%u Mb) in C function 'R_AllocStringBuffer'",
4584 	      (unsigned int) blen/1024/1024);
4585     }
4586     return buf->data;
4587 }
4588 
4589 void
R_FreeStringBuffer(R_StringBuffer * buf)4590 R_FreeStringBuffer(R_StringBuffer *buf)
4591 {
4592     if (buf->data != NULL) {
4593 	free(buf->data);
4594 	buf->bufsize = 0;
4595 	buf->data = NULL;
4596     }
4597 }
4598 
4599 void attribute_hidden
R_FreeStringBufferL(R_StringBuffer * buf)4600 R_FreeStringBufferL(R_StringBuffer *buf)
4601 {
4602     if (buf->bufsize > buf->defaultSize) {
4603 	free(buf->data);
4604 	buf->bufsize = 0;
4605 	buf->data = NULL;
4606     }
4607 }
4608 
4609 /* ======== This needs direct access to gp field for efficiency ======== */
4610 
4611 /* this has NA_STRING = NA_STRING */
4612 attribute_hidden
Seql(SEXP a,SEXP b)4613 int Seql(SEXP a, SEXP b)
4614 {
4615     /* The only case where pointer comparisons do not suffice is where
4616       we have two strings in different encodings (which must be
4617       non-ASCII strings). Note that one of the strings could be marked
4618       as unknown. */
4619     if (a == b) return 1;
4620     /* Leave this to compiler to optimize */
4621     if (IS_CACHED(a) && IS_CACHED(b) && ENC_KNOWN(a) == ENC_KNOWN(b))
4622 	return 0;
4623     else {
4624 	SEXP vmax = R_VStack;
4625 	int result = !strcmp(translateCharUTF8(a), translateCharUTF8(b));
4626 	R_VStack = vmax; /* discard any memory used by translateCharUTF8 */
4627 	return result;
4628     }
4629 }
4630 
4631 
4632 #ifdef LONG_VECTOR_SUPPORT
R_BadLongVector(SEXP x,const char * file,int line)4633 R_len_t NORET R_BadLongVector(SEXP x, const char *file, int line)
4634 {
4635     error(_("long vectors not supported yet: %s:%d"), file, line);
4636 }
4637 #endif
4638