1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- 2 * vim: set ts=8 sts=4 et sw=4 tw=99: 3 * This Source Code Form is subject to the terms of the Mozilla Public 4 * License, v. 2.0. If a copy of the MPL was not distributed with this 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 6 7 /* JS Garbage Collector. */ 8 9 #ifndef jsgc_h 10 #define jsgc_h 11 12 #include "mozilla/Atomics.h" 13 #include "mozilla/DebugOnly.h" 14 #include "mozilla/EnumeratedArray.h" 15 #include "mozilla/MemoryReporting.h" 16 #include "mozilla/TypeTraits.h" 17 18 #include "jslock.h" 19 20 #include "js/GCAPI.h" 21 #include "js/SliceBudget.h" 22 #include "js/Vector.h" 23 24 #include "vm/NativeObject.h" 25 26 namespace js { 27 28 unsigned GetCPUCount(); 29 30 enum ThreadType 31 { 32 MainThread, 33 BackgroundThread 34 }; 35 36 namespace gcstats { 37 struct Statistics; 38 } // namespace gcstats 39 40 class Nursery; 41 42 namespace gc { 43 44 struct FinalizePhase; 45 46 enum State { 47 NO_INCREMENTAL, 48 MARK_ROOTS, 49 MARK, 50 SWEEP, 51 COMPACT 52 }; 53 54 // Expand the given macro D for each valid GC reference type. 55 #define FOR_EACH_GC_POINTER_TYPE(D) \ 56 D(AccessorShape*) \ 57 D(BaseShape*) \ 58 D(UnownedBaseShape*) \ 59 D(jit::JitCode*) \ 60 D(NativeObject*) \ 61 D(ArrayObject*) \ 62 D(ArgumentsObject*) \ 63 D(ArrayBufferObject*) \ 64 D(ArrayBufferObjectMaybeShared*) \ 65 D(ArrayBufferViewObject*) \ 66 D(DebugScopeObject*) \ 67 D(GlobalObject*) \ 68 D(JSObject*) \ 69 D(JSFunction*) \ 70 D(ModuleObject*) \ 71 D(ModuleEnvironmentObject*) \ 72 D(ModuleNamespaceObject*) \ 73 D(NestedScopeObject*) \ 74 D(PlainObject*) \ 75 D(SavedFrame*) \ 76 D(ScopeObject*) \ 77 D(ScriptSourceObject*) \ 78 D(SharedArrayBufferObject*) \ 79 D(ImportEntryObject*) \ 80 D(ExportEntryObject*) \ 81 D(JSScript*) \ 82 D(LazyScript*) \ 83 D(Shape*) \ 84 D(JSAtom*) \ 85 D(JSString*) \ 86 D(JSFlatString*) \ 87 D(JSLinearString*) \ 88 D(PropertyName*) \ 89 D(JS::Symbol*) \ 90 D(js::ObjectGroup*) \ 91 D(Value) \ 92 D(jsid) \ 93 D(TaggedProto) 94 95 /* Map from C++ type to alloc kind. JSObject does not have a 1:1 mapping, so must use Arena::thingSize. */ 96 template <typename T> struct MapTypeToFinalizeKind {}; 97 template <> struct MapTypeToFinalizeKind<JSScript> { static const AllocKind kind = AllocKind::SCRIPT; }; 98 template <> struct MapTypeToFinalizeKind<LazyScript> { static const AllocKind kind = AllocKind::LAZY_SCRIPT; }; 99 template <> struct MapTypeToFinalizeKind<Shape> { static const AllocKind kind = AllocKind::SHAPE; }; 100 template <> struct MapTypeToFinalizeKind<AccessorShape> { static const AllocKind kind = AllocKind::ACCESSOR_SHAPE; }; 101 template <> struct MapTypeToFinalizeKind<BaseShape> { static const AllocKind kind = AllocKind::BASE_SHAPE; }; 102 template <> struct MapTypeToFinalizeKind<ObjectGroup> { static const AllocKind kind = AllocKind::OBJECT_GROUP; }; 103 template <> struct MapTypeToFinalizeKind<JSFatInlineString> { static const AllocKind kind = AllocKind::FAT_INLINE_STRING; }; 104 template <> struct MapTypeToFinalizeKind<JSString> { static const AllocKind kind = AllocKind::STRING; }; 105 template <> struct MapTypeToFinalizeKind<JSExternalString> { static const AllocKind kind = AllocKind::EXTERNAL_STRING; }; 106 template <> struct MapTypeToFinalizeKind<js::FatInlineAtom> { static const AllocKind kind = AllocKind::FAT_INLINE_ATOM; }; 107 template <> struct MapTypeToFinalizeKind<js::NormalAtom> { static const AllocKind kind = AllocKind::ATOM; }; 108 template <> struct MapTypeToFinalizeKind<JS::Symbol> { static const AllocKind kind = AllocKind::SYMBOL; }; 109 template <> struct MapTypeToFinalizeKind<jit::JitCode> { static const AllocKind kind = AllocKind::JITCODE; }; 110 111 template <typename T> struct ParticipatesInCC {}; 112 #define EXPAND_PARTICIPATES_IN_CC(_, type, addToCCKind) \ 113 template <> struct ParticipatesInCC<type> { static const bool value = addToCCKind; }; 114 JS_FOR_EACH_TRACEKIND(EXPAND_PARTICIPATES_IN_CC) 115 #undef EXPAND_PARTICIPATES_IN_CC 116 117 static inline bool 118 IsNurseryAllocable(AllocKind kind) 119 { 120 MOZ_ASSERT(IsValidAllocKind(kind)); 121 static const bool map[] = { 122 true, /* AllocKind::FUNCTION */ 123 true, /* AllocKind::FUNCTION_EXTENDED */ 124 false, /* AllocKind::OBJECT0 */ 125 true, /* AllocKind::OBJECT0_BACKGROUND */ 126 false, /* AllocKind::OBJECT2 */ 127 true, /* AllocKind::OBJECT2_BACKGROUND */ 128 false, /* AllocKind::OBJECT4 */ 129 true, /* AllocKind::OBJECT4_BACKGROUND */ 130 false, /* AllocKind::OBJECT8 */ 131 true, /* AllocKind::OBJECT8_BACKGROUND */ 132 false, /* AllocKind::OBJECT12 */ 133 true, /* AllocKind::OBJECT12_BACKGROUND */ 134 false, /* AllocKind::OBJECT16 */ 135 true, /* AllocKind::OBJECT16_BACKGROUND */ 136 false, /* AllocKind::SCRIPT */ 137 false, /* AllocKind::LAZY_SCRIPT */ 138 false, /* AllocKind::SHAPE */ 139 false, /* AllocKind::ACCESSOR_SHAPE */ 140 false, /* AllocKind::BASE_SHAPE */ 141 false, /* AllocKind::OBJECT_GROUP */ 142 false, /* AllocKind::FAT_INLINE_STRING */ 143 false, /* AllocKind::STRING */ 144 false, /* AllocKind::EXTERNAL_STRING */ 145 false, /* AllocKind::FAT_INLINE_ATOM */ 146 false, /* AllocKind::ATOM */ 147 false, /* AllocKind::SYMBOL */ 148 false, /* AllocKind::JITCODE */ 149 }; 150 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == size_t(AllocKind::LIMIT)); 151 return map[size_t(kind)]; 152 } 153 154 static inline bool 155 IsBackgroundFinalized(AllocKind kind) 156 { 157 MOZ_ASSERT(IsValidAllocKind(kind)); 158 static const bool map[] = { 159 true, /* AllocKind::FUNCTION */ 160 true, /* AllocKind::FUNCTION_EXTENDED */ 161 false, /* AllocKind::OBJECT0 */ 162 true, /* AllocKind::OBJECT0_BACKGROUND */ 163 false, /* AllocKind::OBJECT2 */ 164 true, /* AllocKind::OBJECT2_BACKGROUND */ 165 false, /* AllocKind::OBJECT4 */ 166 true, /* AllocKind::OBJECT4_BACKGROUND */ 167 false, /* AllocKind::OBJECT8 */ 168 true, /* AllocKind::OBJECT8_BACKGROUND */ 169 false, /* AllocKind::OBJECT12 */ 170 true, /* AllocKind::OBJECT12_BACKGROUND */ 171 false, /* AllocKind::OBJECT16 */ 172 true, /* AllocKind::OBJECT16_BACKGROUND */ 173 false, /* AllocKind::SCRIPT */ 174 false, /* AllocKind::LAZY_SCRIPT */ 175 true, /* AllocKind::SHAPE */ 176 true, /* AllocKind::ACCESSOR_SHAPE */ 177 true, /* AllocKind::BASE_SHAPE */ 178 true, /* AllocKind::OBJECT_GROUP */ 179 true, /* AllocKind::FAT_INLINE_STRING */ 180 true, /* AllocKind::STRING */ 181 false, /* AllocKind::EXTERNAL_STRING */ 182 true, /* AllocKind::FAT_INLINE_ATOM */ 183 true, /* AllocKind::ATOM */ 184 true, /* AllocKind::SYMBOL */ 185 false, /* AllocKind::JITCODE */ 186 }; 187 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == size_t(AllocKind::LIMIT)); 188 return map[size_t(kind)]; 189 } 190 191 static inline bool 192 CanBeFinalizedInBackground(AllocKind kind, const Class* clasp) 193 { 194 MOZ_ASSERT(IsObjectAllocKind(kind)); 195 /* If the class has no finalizer or a finalizer that is safe to call on 196 * a different thread, we change the alloc kind. For example, 197 * AllocKind::OBJECT0 calls the finalizer on the main thread, 198 * AllocKind::OBJECT0_BACKGROUND calls the finalizer on the gcHelperThread. 199 * IsBackgroundFinalized is called to prevent recursively incrementing 200 * the alloc kind; kind may already be a background finalize kind. 201 */ 202 return (!IsBackgroundFinalized(kind) && 203 (!clasp->finalize || (clasp->flags & JSCLASS_BACKGROUND_FINALIZE))); 204 } 205 206 /* Capacity for slotsToThingKind */ 207 const size_t SLOTS_TO_THING_KIND_LIMIT = 17; 208 209 extern const AllocKind slotsToThingKind[]; 210 211 /* Get the best kind to use when making an object with the given slot count. */ 212 static inline AllocKind 213 GetGCObjectKind(size_t numSlots) 214 { 215 if (numSlots >= SLOTS_TO_THING_KIND_LIMIT) 216 return AllocKind::OBJECT16; 217 return slotsToThingKind[numSlots]; 218 } 219 220 /* As for GetGCObjectKind, but for dense array allocation. */ 221 static inline AllocKind 222 GetGCArrayKind(size_t numElements) 223 { 224 /* 225 * Dense arrays can use their fixed slots to hold their elements array 226 * (less two Values worth of ObjectElements header), but if more than the 227 * maximum number of fixed slots is needed then the fixed slots will be 228 * unused. 229 */ 230 JS_STATIC_ASSERT(ObjectElements::VALUES_PER_HEADER == 2); 231 if (numElements > NativeObject::MAX_DENSE_ELEMENTS_COUNT || 232 numElements + ObjectElements::VALUES_PER_HEADER >= SLOTS_TO_THING_KIND_LIMIT) 233 { 234 return AllocKind::OBJECT2; 235 } 236 return slotsToThingKind[numElements + ObjectElements::VALUES_PER_HEADER]; 237 } 238 239 static inline AllocKind 240 GetGCObjectFixedSlotsKind(size_t numFixedSlots) 241 { 242 MOZ_ASSERT(numFixedSlots < SLOTS_TO_THING_KIND_LIMIT); 243 return slotsToThingKind[numFixedSlots]; 244 } 245 246 // Get the best kind to use when allocating an object that needs a specific 247 // number of bytes. 248 static inline AllocKind 249 GetGCObjectKindForBytes(size_t nbytes) 250 { 251 MOZ_ASSERT(nbytes <= JSObject::MAX_BYTE_SIZE); 252 253 if (nbytes <= sizeof(NativeObject)) 254 return AllocKind::OBJECT0; 255 nbytes -= sizeof(NativeObject); 256 257 size_t dataSlots = AlignBytes(nbytes, sizeof(Value)) / sizeof(Value); 258 MOZ_ASSERT(nbytes <= dataSlots * sizeof(Value)); 259 return GetGCObjectKind(dataSlots); 260 } 261 262 static inline AllocKind 263 GetBackgroundAllocKind(AllocKind kind) 264 { 265 MOZ_ASSERT(!IsBackgroundFinalized(kind)); 266 MOZ_ASSERT(IsObjectAllocKind(kind)); 267 return AllocKind(size_t(kind) + 1); 268 } 269 270 /* Get the number of fixed slots and initial capacity associated with a kind. */ 271 static inline size_t 272 GetGCKindSlots(AllocKind thingKind) 273 { 274 /* Using a switch in hopes that thingKind will usually be a compile-time constant. */ 275 switch (thingKind) { 276 case AllocKind::FUNCTION: 277 case AllocKind::OBJECT0: 278 case AllocKind::OBJECT0_BACKGROUND: 279 return 0; 280 case AllocKind::FUNCTION_EXTENDED: 281 case AllocKind::OBJECT2: 282 case AllocKind::OBJECT2_BACKGROUND: 283 return 2; 284 case AllocKind::OBJECT4: 285 case AllocKind::OBJECT4_BACKGROUND: 286 return 4; 287 case AllocKind::OBJECT8: 288 case AllocKind::OBJECT8_BACKGROUND: 289 return 8; 290 case AllocKind::OBJECT12: 291 case AllocKind::OBJECT12_BACKGROUND: 292 return 12; 293 case AllocKind::OBJECT16: 294 case AllocKind::OBJECT16_BACKGROUND: 295 return 16; 296 default: 297 MOZ_CRASH("Bad object alloc kind"); 298 } 299 } 300 301 static inline size_t 302 GetGCKindSlots(AllocKind thingKind, const Class* clasp) 303 { 304 size_t nslots = GetGCKindSlots(thingKind); 305 306 /* An object's private data uses the space taken by its last fixed slot. */ 307 if (clasp->flags & JSCLASS_HAS_PRIVATE) { 308 MOZ_ASSERT(nslots > 0); 309 nslots--; 310 } 311 312 /* 313 * Functions have a larger alloc kind than AllocKind::OBJECT to reserve 314 * space for the extra fields in JSFunction, but have no fixed slots. 315 */ 316 if (clasp == FunctionClassPtr) 317 nslots = 0; 318 319 return nslots; 320 } 321 322 static inline size_t 323 GetGCKindBytes(AllocKind thingKind) 324 { 325 return sizeof(JSObject_Slots0) + GetGCKindSlots(thingKind) * sizeof(Value); 326 } 327 328 // Class to assist in triggering background chunk allocation. This cannot be done 329 // while holding the GC or worker thread state lock due to lock ordering issues. 330 // As a result, the triggering is delayed using this class until neither of the 331 // above locks is held. 332 class AutoMaybeStartBackgroundAllocation; 333 334 /* 335 * A single segment of a SortedArenaList. Each segment has a head and a tail, 336 * which track the start and end of a segment for O(1) append and concatenation. 337 */ 338 struct SortedArenaListSegment 339 { 340 ArenaHeader* head; 341 ArenaHeader** tailp; 342 343 void clear() { 344 head = nullptr; 345 tailp = &head; 346 } 347 348 bool isEmpty() const { 349 return tailp == &head; 350 } 351 352 // Appends |aheader| to this segment. 353 void append(ArenaHeader* aheader) { 354 MOZ_ASSERT(aheader); 355 MOZ_ASSERT_IF(head, head->getAllocKind() == aheader->getAllocKind()); 356 *tailp = aheader; 357 tailp = &aheader->next; 358 } 359 360 // Points the tail of this segment at |aheader|, which may be null. Note 361 // that this does not change the tail itself, but merely which arena 362 // follows it. This essentially turns the tail into a cursor (see also the 363 // description of ArenaList), but from the perspective of a SortedArenaList 364 // this makes no difference. 365 void linkTo(ArenaHeader* aheader) { 366 *tailp = aheader; 367 } 368 }; 369 370 /* 371 * Arena lists have a head and a cursor. The cursor conceptually lies on arena 372 * boundaries, i.e. before the first arena, between two arenas, or after the 373 * last arena. 374 * 375 * Normally the arena following the cursor is the first arena in the list with 376 * some free things and all arenas before the cursor are fully allocated. (And 377 * if the cursor is at the end of the list, then all the arenas are full.) 378 * 379 * However, the arena currently being allocated from is considered full while 380 * its list of free spans is moved into the freeList. Therefore, during GC or 381 * cell enumeration, when an unallocated freeList is moved back to the arena, 382 * we can see an arena with some free cells before the cursor. 383 * 384 * Arenas following the cursor should not be full. 385 */ 386 class ArenaList { 387 // The cursor is implemented via an indirect pointer, |cursorp_|, to allow 388 // for efficient list insertion at the cursor point and other list 389 // manipulations. 390 // 391 // - If the list is empty: |head| is null, |cursorp_| points to |head|, and 392 // therefore |*cursorp_| is null. 393 // 394 // - If the list is not empty: |head| is non-null, and... 395 // 396 // - If the cursor is at the start of the list: |cursorp_| points to 397 // |head|, and therefore |*cursorp_| points to the first arena. 398 // 399 // - If cursor is at the end of the list: |cursorp_| points to the |next| 400 // field of the last arena, and therefore |*cursorp_| is null. 401 // 402 // - If the cursor is at neither the start nor the end of the list: 403 // |cursorp_| points to the |next| field of the arena preceding the 404 // cursor, and therefore |*cursorp_| points to the arena following the 405 // cursor. 406 // 407 // |cursorp_| is never null. 408 // 409 ArenaHeader* head_; 410 ArenaHeader** cursorp_; 411 412 void copy(const ArenaList& other) { 413 other.check(); 414 head_ = other.head_; 415 cursorp_ = other.isCursorAtHead() ? &head_ : other.cursorp_; 416 check(); 417 } 418 419 public: 420 ArenaList() { 421 clear(); 422 } 423 424 ArenaList(const ArenaList& other) { 425 copy(other); 426 } 427 428 ArenaList& operator=(const ArenaList& other) { 429 copy(other); 430 return *this; 431 } 432 433 explicit ArenaList(const SortedArenaListSegment& segment) { 434 head_ = segment.head; 435 cursorp_ = segment.isEmpty() ? &head_ : segment.tailp; 436 check(); 437 } 438 439 // This does checking just of |head_| and |cursorp_|. 440 void check() const { 441 #ifdef DEBUG 442 // If the list is empty, it must have this form. 443 MOZ_ASSERT_IF(!head_, cursorp_ == &head_); 444 445 // If there's an arena following the cursor, it must not be full. 446 ArenaHeader* cursor = *cursorp_; 447 MOZ_ASSERT_IF(cursor, cursor->hasFreeThings()); 448 #endif 449 } 450 451 void clear() { 452 head_ = nullptr; 453 cursorp_ = &head_; 454 check(); 455 } 456 457 ArenaList copyAndClear() { 458 ArenaList result = *this; 459 clear(); 460 return result; 461 } 462 463 bool isEmpty() const { 464 check(); 465 return !head_; 466 } 467 468 // This returns nullptr if the list is empty. 469 ArenaHeader* head() const { 470 check(); 471 return head_; 472 } 473 474 bool isCursorAtHead() const { 475 check(); 476 return cursorp_ == &head_; 477 } 478 479 bool isCursorAtEnd() const { 480 check(); 481 return !*cursorp_; 482 } 483 484 // This can return nullptr. 485 ArenaHeader* arenaAfterCursor() const { 486 check(); 487 return *cursorp_; 488 } 489 490 // This returns the arena after the cursor and moves the cursor past it. 491 ArenaHeader* takeNextArena() { 492 check(); 493 ArenaHeader* aheader = *cursorp_; 494 if (!aheader) 495 return nullptr; 496 cursorp_ = &aheader->next; 497 check(); 498 return aheader; 499 } 500 501 // This does two things. 502 // - Inserts |a| at the cursor. 503 // - Leaves the cursor sitting just before |a|, if |a| is not full, or just 504 // after |a|, if |a| is full. 505 // 506 void insertAtCursor(ArenaHeader* a) { 507 check(); 508 a->next = *cursorp_; 509 *cursorp_ = a; 510 // At this point, the cursor is sitting before |a|. Move it after |a| 511 // if necessary. 512 if (!a->hasFreeThings()) 513 cursorp_ = &a->next; 514 check(); 515 } 516 517 // This inserts |other|, which must be full, at the cursor of |this|. 518 ArenaList& insertListWithCursorAtEnd(const ArenaList& other) { 519 check(); 520 other.check(); 521 MOZ_ASSERT(other.isCursorAtEnd()); 522 if (other.isCursorAtHead()) 523 return *this; 524 // Insert the full arenas of |other| after those of |this|. 525 *other.cursorp_ = *cursorp_; 526 *cursorp_ = other.head_; 527 cursorp_ = other.cursorp_; 528 check(); 529 return *this; 530 } 531 532 ArenaHeader* removeRemainingArenas(ArenaHeader** arenap); 533 ArenaHeader** pickArenasToRelocate(size_t& arenaTotalOut, size_t& relocTotalOut); 534 ArenaHeader* relocateArenas(ArenaHeader* toRelocate, ArenaHeader* relocated, 535 SliceBudget& sliceBudget, gcstats::Statistics& stats); 536 }; 537 538 /* 539 * A class that holds arenas in sorted order by appending arenas to specific 540 * segments. Each segment has a head and a tail, which can be linked up to 541 * other segments to create a contiguous ArenaList. 542 */ 543 class SortedArenaList 544 { 545 public: 546 // The minimum size, in bytes, of a GC thing. 547 static const size_t MinThingSize = 16; 548 549 static_assert(ArenaSize <= 4096, "When increasing the Arena size, please consider how"\ 550 " this will affect the size of a SortedArenaList."); 551 552 static_assert(MinThingSize >= 16, "When decreasing the minimum thing size, please consider"\ 553 " how this will affect the size of a SortedArenaList."); 554 555 private: 556 // The maximum number of GC things that an arena can hold. 557 static const size_t MaxThingsPerArena = (ArenaSize - sizeof(ArenaHeader)) / MinThingSize; 558 559 size_t thingsPerArena_; 560 SortedArenaListSegment segments[MaxThingsPerArena + 1]; 561 562 // Convenience functions to get the nth head and tail. 563 ArenaHeader* headAt(size_t n) { return segments[n].head; } 564 ArenaHeader** tailAt(size_t n) { return segments[n].tailp; } 565 566 public: 567 explicit SortedArenaList(size_t thingsPerArena = MaxThingsPerArena) { 568 reset(thingsPerArena); 569 } 570 571 void setThingsPerArena(size_t thingsPerArena) { 572 MOZ_ASSERT(thingsPerArena && thingsPerArena <= MaxThingsPerArena); 573 thingsPerArena_ = thingsPerArena; 574 } 575 576 // Resets the first |thingsPerArena| segments of this list for further use. 577 void reset(size_t thingsPerArena = MaxThingsPerArena) { 578 setThingsPerArena(thingsPerArena); 579 // Initialize the segments. 580 for (size_t i = 0; i <= thingsPerArena; ++i) 581 segments[i].clear(); 582 } 583 584 // Inserts a header, which has room for |nfree| more things, in its segment. 585 void insertAt(ArenaHeader* aheader, size_t nfree) { 586 MOZ_ASSERT(nfree <= thingsPerArena_); 587 segments[nfree].append(aheader); 588 } 589 590 // Remove all empty arenas, inserting them as a linked list. 591 void extractEmpty(ArenaHeader** empty) { 592 SortedArenaListSegment& segment = segments[thingsPerArena_]; 593 if (segment.head) { 594 *segment.tailp = *empty; 595 *empty = segment.head; 596 segment.clear(); 597 } 598 } 599 600 // Links up the tail of each non-empty segment to the head of the next 601 // non-empty segment, creating a contiguous list that is returned as an 602 // ArenaList. This is not a destructive operation: neither the head nor tail 603 // of any segment is modified. However, note that the ArenaHeaders in the 604 // resulting ArenaList should be treated as read-only unless the 605 // SortedArenaList is no longer needed: inserting or removing arenas would 606 // invalidate the SortedArenaList. 607 ArenaList toArenaList() { 608 // Link the non-empty segment tails up to the non-empty segment heads. 609 size_t tailIndex = 0; 610 for (size_t headIndex = 1; headIndex <= thingsPerArena_; ++headIndex) { 611 if (headAt(headIndex)) { 612 segments[tailIndex].linkTo(headAt(headIndex)); 613 tailIndex = headIndex; 614 } 615 } 616 // Point the tail of the final non-empty segment at null. Note that if 617 // the list is empty, this will just set segments[0].head to null. 618 segments[tailIndex].linkTo(nullptr); 619 // Create an ArenaList with head and cursor set to the head and tail of 620 // the first segment (if that segment is empty, only the head is used). 621 return ArenaList(segments[0]); 622 } 623 }; 624 625 class ArenaLists 626 { 627 JSRuntime* runtime_; 628 629 /* 630 * For each arena kind its free list is represented as the first span with 631 * free things. Initially all the spans are initialized as empty. After we 632 * find a new arena with available things we move its first free span into 633 * the list and set the arena as fully allocated. way we do not need to 634 * update the arena header after the initial allocation. When starting the 635 * GC we only move the head of the of the list of spans back to the arena 636 * only for the arena that was not fully allocated. 637 */ 638 AllAllocKindArray<FreeList> freeLists; 639 640 AllAllocKindArray<ArenaList> arenaLists; 641 642 enum BackgroundFinalizeStateEnum { BFS_DONE, BFS_RUN }; 643 644 typedef mozilla::Atomic<BackgroundFinalizeStateEnum, mozilla::ReleaseAcquire> 645 BackgroundFinalizeState; 646 647 /* The current background finalization state, accessed atomically. */ 648 AllAllocKindArray<BackgroundFinalizeState> backgroundFinalizeState; 649 650 /* For each arena kind, a list of arenas remaining to be swept. */ 651 AllAllocKindArray<ArenaHeader*> arenaListsToSweep; 652 653 /* During incremental sweeping, a list of the arenas already swept. */ 654 AllocKind incrementalSweptArenaKind; 655 ArenaList incrementalSweptArenas; 656 657 // Arena lists which have yet to be swept, but need additional foreground 658 // processing before they are swept. 659 ArenaHeader* gcShapeArenasToUpdate; 660 ArenaHeader* gcAccessorShapeArenasToUpdate; 661 ArenaHeader* gcScriptArenasToUpdate; 662 ArenaHeader* gcObjectGroupArenasToUpdate; 663 664 // While sweeping type information, these lists save the arenas for the 665 // objects which have already been finalized in the foreground (which must 666 // happen at the beginning of the GC), so that type sweeping can determine 667 // which of the object pointers are marked. 668 ObjectAllocKindArray<ArenaList> savedObjectArenas; 669 ArenaHeader* savedEmptyObjectArenas; 670 671 public: 672 explicit ArenaLists(JSRuntime* rt) : runtime_(rt) { 673 for (auto i : AllAllocKinds()) 674 freeLists[i].initAsEmpty(); 675 for (auto i : AllAllocKinds()) 676 backgroundFinalizeState[i] = BFS_DONE; 677 for (auto i : AllAllocKinds()) 678 arenaListsToSweep[i] = nullptr; 679 incrementalSweptArenaKind = AllocKind::LIMIT; 680 gcShapeArenasToUpdate = nullptr; 681 gcAccessorShapeArenasToUpdate = nullptr; 682 gcScriptArenasToUpdate = nullptr; 683 gcObjectGroupArenasToUpdate = nullptr; 684 savedEmptyObjectArenas = nullptr; 685 } 686 687 ~ArenaLists(); 688 689 static uintptr_t getFreeListOffset(AllocKind thingKind) { 690 uintptr_t offset = offsetof(ArenaLists, freeLists); 691 return offset + size_t(thingKind) * sizeof(FreeList); 692 } 693 694 const FreeList* getFreeList(AllocKind thingKind) const { 695 return &freeLists[thingKind]; 696 } 697 698 ArenaHeader* getFirstArena(AllocKind thingKind) const { 699 return arenaLists[thingKind].head(); 700 } 701 702 ArenaHeader* getFirstArenaToSweep(AllocKind thingKind) const { 703 return arenaListsToSweep[thingKind]; 704 } 705 706 ArenaHeader* getFirstSweptArena(AllocKind thingKind) const { 707 if (thingKind != incrementalSweptArenaKind) 708 return nullptr; 709 return incrementalSweptArenas.head(); 710 } 711 712 ArenaHeader* getArenaAfterCursor(AllocKind thingKind) const { 713 return arenaLists[thingKind].arenaAfterCursor(); 714 } 715 716 bool arenaListsAreEmpty() const { 717 for (auto i : AllAllocKinds()) { 718 /* 719 * The arena cannot be empty if the background finalization is not yet 720 * done. 721 */ 722 if (backgroundFinalizeState[i] != BFS_DONE) 723 return false; 724 if (!arenaLists[i].isEmpty()) 725 return false; 726 } 727 return true; 728 } 729 730 void unmarkAll() { 731 for (auto i : AllAllocKinds()) { 732 /* The background finalization must have stopped at this point. */ 733 MOZ_ASSERT(backgroundFinalizeState[i] == BFS_DONE); 734 for (ArenaHeader* aheader = arenaLists[i].head(); aheader; aheader = aheader->next) 735 aheader->unmarkAll(); 736 } 737 } 738 739 bool doneBackgroundFinalize(AllocKind kind) const { 740 return backgroundFinalizeState[kind] == BFS_DONE; 741 } 742 743 bool needBackgroundFinalizeWait(AllocKind kind) const { 744 return backgroundFinalizeState[kind] != BFS_DONE; 745 } 746 747 /* 748 * Return the free list back to the arena so the GC finalization will not 749 * run the finalizers over unitialized bytes from free things. 750 */ 751 void purge() { 752 for (auto i : AllAllocKinds()) 753 purge(i); 754 } 755 756 void purge(AllocKind i) { 757 FreeList* freeList = &freeLists[i]; 758 if (!freeList->isEmpty()) { 759 ArenaHeader* aheader = freeList->arenaHeader(); 760 aheader->setFirstFreeSpan(freeList->getHead()); 761 freeList->initAsEmpty(); 762 } 763 } 764 765 inline void prepareForIncrementalGC(JSRuntime* rt); 766 767 /* 768 * Temporarily copy the free list heads to the arenas so the code can see 769 * the proper value in ArenaHeader::freeList when accessing the latter 770 * outside the GC. 771 */ 772 void copyFreeListsToArenas() { 773 for (auto i : AllAllocKinds()) 774 copyFreeListToArena(i); 775 } 776 777 void copyFreeListToArena(AllocKind thingKind) { 778 FreeList* freeList = &freeLists[thingKind]; 779 if (!freeList->isEmpty()) { 780 ArenaHeader* aheader = freeList->arenaHeader(); 781 MOZ_ASSERT(!aheader->hasFreeThings()); 782 aheader->setFirstFreeSpan(freeList->getHead()); 783 } 784 } 785 786 /* 787 * Clear the free lists in arenas that were temporarily set there using 788 * copyToArenas. 789 */ 790 void clearFreeListsInArenas() { 791 for (auto i : AllAllocKinds()) 792 clearFreeListInArena(i); 793 } 794 795 void clearFreeListInArena(AllocKind kind) { 796 FreeList* freeList = &freeLists[kind]; 797 if (!freeList->isEmpty()) { 798 ArenaHeader* aheader = freeList->arenaHeader(); 799 MOZ_ASSERT(freeList->isSameNonEmptySpan(aheader->getFirstFreeSpan())); 800 aheader->setAsFullyUsed(); 801 } 802 } 803 804 /* 805 * Check that the free list is either empty or were synchronized with the 806 * arena using copyToArena(). 807 */ 808 bool isSynchronizedFreeList(AllocKind kind) { 809 FreeList* freeList = &freeLists[kind]; 810 if (freeList->isEmpty()) 811 return true; 812 ArenaHeader* aheader = freeList->arenaHeader(); 813 if (aheader->hasFreeThings()) { 814 /* 815 * If the arena has a free list, it must be the same as one in 816 * lists. 817 */ 818 MOZ_ASSERT(freeList->isSameNonEmptySpan(aheader->getFirstFreeSpan())); 819 return true; 820 } 821 return false; 822 } 823 824 /* Check if |aheader|'s arena is in use. */ 825 bool arenaIsInUse(ArenaHeader* aheader, AllocKind kind) const { 826 MOZ_ASSERT(aheader); 827 const FreeList& freeList = freeLists[kind]; 828 if (freeList.isEmpty()) 829 return false; 830 return aheader == freeList.arenaHeader(); 831 } 832 833 MOZ_ALWAYS_INLINE TenuredCell* allocateFromFreeList(AllocKind thingKind, size_t thingSize) { 834 return freeLists[thingKind].allocate(thingSize); 835 } 836 837 /* 838 * Moves all arenas from |fromArenaLists| into |this|. 839 */ 840 void adoptArenas(JSRuntime* runtime, ArenaLists* fromArenaLists); 841 842 /* True if the ArenaHeader in question is found in this ArenaLists */ 843 bool containsArena(JSRuntime* runtime, ArenaHeader* arenaHeader); 844 845 void checkEmptyFreeLists() { 846 #ifdef DEBUG 847 for (auto i : AllAllocKinds()) 848 checkEmptyFreeList(i); 849 #endif 850 } 851 852 void checkEmptyFreeList(AllocKind kind) { 853 MOZ_ASSERT(freeLists[kind].isEmpty()); 854 } 855 856 bool relocateArenas(Zone* zone, ArenaHeader*& relocatedListOut, JS::gcreason::Reason reason, 857 SliceBudget& sliceBudget, gcstats::Statistics& stats); 858 859 void queueForegroundObjectsForSweep(FreeOp* fop); 860 void queueForegroundThingsForSweep(FreeOp* fop); 861 862 void mergeForegroundSweptObjectArenas(); 863 864 bool foregroundFinalize(FreeOp* fop, AllocKind thingKind, SliceBudget& sliceBudget, 865 SortedArenaList& sweepList); 866 static void backgroundFinalize(FreeOp* fop, ArenaHeader* listHead, ArenaHeader** empty); 867 868 // When finalizing arenas, whether to keep empty arenas on the list or 869 // release them immediately. 870 enum KeepArenasEnum { 871 RELEASE_ARENAS, 872 KEEP_ARENAS 873 }; 874 875 private: 876 inline void finalizeNow(FreeOp* fop, const FinalizePhase& phase); 877 inline void queueForForegroundSweep(FreeOp* fop, const FinalizePhase& phase); 878 inline void queueForBackgroundSweep(FreeOp* fop, const FinalizePhase& phase); 879 880 inline void finalizeNow(FreeOp* fop, AllocKind thingKind, 881 KeepArenasEnum keepArenas, ArenaHeader** empty = nullptr); 882 inline void forceFinalizeNow(FreeOp* fop, AllocKind thingKind, 883 KeepArenasEnum keepArenas, ArenaHeader** empty = nullptr); 884 inline void queueForForegroundSweep(FreeOp* fop, AllocKind thingKind); 885 inline void queueForBackgroundSweep(FreeOp* fop, AllocKind thingKind); 886 inline void mergeSweptArenas(AllocKind thingKind); 887 888 TenuredCell* allocateFromArena(JS::Zone* zone, AllocKind thingKind, 889 AutoMaybeStartBackgroundAllocation& maybeStartBGAlloc); 890 891 enum ArenaAllocMode { HasFreeThings = true, IsEmpty = false }; 892 template <ArenaAllocMode hasFreeThings> 893 TenuredCell* allocateFromArenaInner(JS::Zone* zone, ArenaHeader* aheader, AllocKind kind); 894 895 inline void normalizeBackgroundFinalizeState(AllocKind thingKind); 896 897 friend class GCRuntime; 898 friend class js::Nursery; 899 friend class js::TenuringTracer; 900 }; 901 902 /* The number of GC cycles an empty chunk can survive before been released. */ 903 const size_t MAX_EMPTY_CHUNK_AGE = 4; 904 905 } /* namespace gc */ 906 907 extern bool 908 InitGC(JSRuntime* rt, uint32_t maxbytes); 909 910 extern void 911 FinishGC(JSRuntime* rt); 912 913 class InterpreterFrame; 914 915 extern void 916 MarkCompartmentActive(js::InterpreterFrame* fp); 917 918 extern void 919 TraceRuntime(JSTracer* trc); 920 921 extern void 922 ReleaseAllJITCode(FreeOp* op); 923 924 extern void 925 PrepareForDebugGC(JSRuntime* rt); 926 927 /* Functions for managing cross compartment gray pointers. */ 928 929 extern void 930 DelayCrossCompartmentGrayMarking(JSObject* src); 931 932 extern void 933 NotifyGCNukeWrapper(JSObject* o); 934 935 extern unsigned 936 NotifyGCPreSwap(JSObject* a, JSObject* b); 937 938 extern void 939 NotifyGCPostSwap(JSObject* a, JSObject* b, unsigned preResult); 940 941 /* 942 * Helper state for use when JS helper threads sweep and allocate GC thing kinds 943 * that can be swept and allocated off the main thread. 944 * 945 * In non-threadsafe builds, all actual sweeping and allocation is performed 946 * on the main thread, but GCHelperState encapsulates this from clients as 947 * much as possible. 948 */ 949 class GCHelperState 950 { 951 enum State { 952 IDLE, 953 SWEEPING 954 }; 955 956 // Associated runtime. 957 JSRuntime* const rt; 958 959 // Condvar for notifying the main thread when work has finished. This is 960 // associated with the runtime's GC lock --- the worker thread state 961 // condvars can't be used here due to lock ordering issues. 962 PRCondVar* done; 963 964 // Activity for the helper to do, protected by the GC lock. 965 State state_; 966 967 // Thread which work is being performed on, or null. 968 PRThread* thread; 969 970 void startBackgroundThread(State newState); 971 void waitForBackgroundThread(); 972 973 State state(); 974 void setState(State state); 975 976 bool shrinkFlag; 977 978 friend class js::gc::ArenaLists; 979 980 static void freeElementsAndArray(void** array, void** end) { 981 MOZ_ASSERT(array <= end); 982 for (void** p = array; p != end; ++p) 983 js_free(*p); 984 js_free(array); 985 } 986 987 void doSweep(AutoLockGC& lock); 988 989 public: 990 explicit GCHelperState(JSRuntime* rt) 991 : rt(rt), 992 done(nullptr), 993 state_(IDLE), 994 thread(nullptr), 995 shrinkFlag(false) 996 { } 997 998 bool init(); 999 void finish(); 1000 1001 void work(); 1002 1003 void maybeStartBackgroundSweep(const AutoLockGC& lock); 1004 void startBackgroundShrink(const AutoLockGC& lock); 1005 1006 /* Must be called without the GC lock taken. */ 1007 void waitBackgroundSweepEnd(); 1008 1009 bool onBackgroundThread(); 1010 1011 /* 1012 * Outside the GC lock may give true answer when in fact the sweeping has 1013 * been done. 1014 */ 1015 bool isBackgroundSweeping() const { 1016 return state_ == SWEEPING; 1017 } 1018 1019 bool shouldShrink() const { 1020 MOZ_ASSERT(isBackgroundSweeping()); 1021 return shrinkFlag; 1022 } 1023 }; 1024 1025 // A generic task used to dispatch work to the helper thread system. 1026 // Users should derive from GCParallelTask add what data they need and 1027 // override |run|. 1028 class GCParallelTask 1029 { 1030 // The state of the parallel computation. 1031 enum TaskState { 1032 NotStarted, 1033 Dispatched, 1034 Finished, 1035 } state; 1036 1037 // Amount of time this task took to execute. 1038 uint64_t duration_; 1039 1040 protected: 1041 // A flag to signal a request for early completion of the off-thread task. 1042 mozilla::Atomic<bool> cancel_; 1043 1044 virtual void run() = 0; 1045 1046 public: 1047 GCParallelTask() : state(NotStarted), duration_(0) {} 1048 1049 // Derived classes must override this to ensure that join() gets called 1050 // before members get destructed. 1051 virtual ~GCParallelTask(); 1052 1053 // Time spent in the most recent invocation of this task. 1054 int64_t duration() const { return duration_; } 1055 1056 // The simple interface to a parallel task works exactly like pthreads. 1057 bool start(); 1058 void join(); 1059 1060 // If multiple tasks are to be started or joined at once, it is more 1061 // efficient to take the helper thread lock once and use these methods. 1062 bool startWithLockHeld(); 1063 void joinWithLockHeld(); 1064 1065 // Instead of dispatching to a helper, run the task on the main thread. 1066 void runFromMainThread(JSRuntime* rt); 1067 1068 // Dispatch a cancelation request. 1069 enum CancelMode { CancelNoWait, CancelAndWait}; 1070 void cancel(CancelMode mode = CancelNoWait) { 1071 cancel_ = true; 1072 if (mode == CancelAndWait) 1073 join(); 1074 } 1075 1076 // Check if a task is actively running. 1077 bool isRunning() const; 1078 1079 // This should be friended to HelperThread, but cannot be because it 1080 // would introduce several circular dependencies. 1081 public: 1082 virtual void runFromHelperThread(); 1083 }; 1084 1085 typedef void (*IterateChunkCallback)(JSRuntime* rt, void* data, gc::Chunk* chunk); 1086 typedef void (*IterateZoneCallback)(JSRuntime* rt, void* data, JS::Zone* zone); 1087 typedef void (*IterateArenaCallback)(JSRuntime* rt, void* data, gc::Arena* arena, 1088 JS::TraceKind traceKind, size_t thingSize); 1089 typedef void (*IterateCellCallback)(JSRuntime* rt, void* data, void* thing, 1090 JS::TraceKind traceKind, size_t thingSize); 1091 1092 /* 1093 * This function calls |zoneCallback| on every zone, |compartmentCallback| on 1094 * every compartment, |arenaCallback| on every in-use arena, and |cellCallback| 1095 * on every in-use cell in the GC heap. 1096 */ 1097 extern void 1098 IterateZonesCompartmentsArenasCells(JSRuntime* rt, void* data, 1099 IterateZoneCallback zoneCallback, 1100 JSIterateCompartmentCallback compartmentCallback, 1101 IterateArenaCallback arenaCallback, 1102 IterateCellCallback cellCallback); 1103 1104 /* 1105 * This function is like IterateZonesCompartmentsArenasCells, but does it for a 1106 * single zone. 1107 */ 1108 extern void 1109 IterateZoneCompartmentsArenasCells(JSRuntime* rt, Zone* zone, void* data, 1110 IterateZoneCallback zoneCallback, 1111 JSIterateCompartmentCallback compartmentCallback, 1112 IterateArenaCallback arenaCallback, 1113 IterateCellCallback cellCallback); 1114 1115 /* 1116 * Invoke chunkCallback on every in-use chunk. 1117 */ 1118 extern void 1119 IterateChunks(JSRuntime* rt, void* data, IterateChunkCallback chunkCallback); 1120 1121 typedef void (*IterateScriptCallback)(JSRuntime* rt, void* data, JSScript* script); 1122 1123 /* 1124 * Invoke scriptCallback on every in-use script for 1125 * the given compartment or for all compartments if it is null. 1126 */ 1127 extern void 1128 IterateScripts(JSRuntime* rt, JSCompartment* compartment, 1129 void* data, IterateScriptCallback scriptCallback); 1130 1131 extern void 1132 FinalizeStringRT(JSRuntime* rt, JSString* str); 1133 1134 JSCompartment* 1135 NewCompartment(JSContext* cx, JS::Zone* zone, JSPrincipals* principals, 1136 const JS::CompartmentOptions& options); 1137 1138 namespace gc { 1139 1140 /* 1141 * Merge all contents of source into target. This can only be used if source is 1142 * the only compartment in its zone. 1143 */ 1144 void 1145 MergeCompartments(JSCompartment* source, JSCompartment* target); 1146 1147 /* 1148 * This structure overlays a Cell in the Nursery and re-purposes its memory 1149 * for managing the Nursery collection process. 1150 */ 1151 class RelocationOverlay 1152 { 1153 /* The low bit is set so this should never equal a normal pointer. */ 1154 static const uintptr_t Relocated = uintptr_t(0xbad0bad1); 1155 1156 // Arrange the fields of the RelocationOverlay so that JSObject's group 1157 // pointer is not overwritten during compacting. 1158 1159 /* A list entry to track all relocated things. */ 1160 RelocationOverlay* next_; 1161 1162 /* Set to Relocated when moved. */ 1163 uintptr_t magic_; 1164 1165 /* The location |this| was moved to. */ 1166 Cell* newLocation_; 1167 1168 public: 1169 static RelocationOverlay* fromCell(Cell* cell) { 1170 return reinterpret_cast<RelocationOverlay*>(cell); 1171 } 1172 1173 bool isForwarded() const { 1174 return magic_ == Relocated; 1175 } 1176 1177 Cell* forwardingAddress() const { 1178 MOZ_ASSERT(isForwarded()); 1179 return newLocation_; 1180 } 1181 1182 void forwardTo(Cell* cell) { 1183 MOZ_ASSERT(!isForwarded()); 1184 static_assert(offsetof(JSObject, group_) == offsetof(RelocationOverlay, next_), 1185 "next pointer and group should be at same location, " 1186 "so that group is not overwritten during compacting"); 1187 newLocation_ = cell; 1188 magic_ = Relocated; 1189 } 1190 1191 RelocationOverlay*& nextRef() { 1192 MOZ_ASSERT(isForwarded()); 1193 return next_; 1194 } 1195 1196 RelocationOverlay* next() const { 1197 MOZ_ASSERT(isForwarded()); 1198 return next_; 1199 } 1200 1201 static bool isCellForwarded(Cell* cell) { 1202 return fromCell(cell)->isForwarded(); 1203 } 1204 }; 1205 1206 /* Functions for checking and updating things that might be moved by compacting GC. */ 1207 1208 template <typename T> 1209 struct MightBeForwarded 1210 { 1211 static_assert(mozilla::IsBaseOf<Cell, T>::value, 1212 "T must derive from Cell"); 1213 static_assert(!mozilla::IsSame<Cell, T>::value && !mozilla::IsSame<TenuredCell, T>::value, 1214 "T must not be Cell or TenuredCell"); 1215 1216 static const bool value = mozilla::IsBaseOf<JSObject, T>::value; 1217 }; 1218 1219 template <typename T> 1220 inline bool 1221 IsForwarded(T* t) 1222 { 1223 RelocationOverlay* overlay = RelocationOverlay::fromCell(t); 1224 if (!MightBeForwarded<T>::value) { 1225 MOZ_ASSERT(!overlay->isForwarded()); 1226 return false; 1227 } 1228 1229 return overlay->isForwarded(); 1230 } 1231 1232 struct IsForwardedFunctor : public BoolDefaultAdaptor<Value, false> { 1233 template <typename T> bool operator()(T* t) { return IsForwarded(t); } 1234 }; 1235 1236 inline bool 1237 IsForwarded(const JS::Value& value) 1238 { 1239 return DispatchTyped(IsForwardedFunctor(), value); 1240 } 1241 1242 template <typename T> 1243 inline T* 1244 Forwarded(T* t) 1245 { 1246 RelocationOverlay* overlay = RelocationOverlay::fromCell(t); 1247 MOZ_ASSERT(overlay->isForwarded()); 1248 return reinterpret_cast<T*>(overlay->forwardingAddress()); 1249 } 1250 1251 struct ForwardedFunctor : public IdentityDefaultAdaptor<Value> { 1252 template <typename T> inline Value operator()(T* t) { 1253 return js::gc::RewrapTaggedPointer<Value, T*>::wrap(Forwarded(t)); 1254 } 1255 }; 1256 1257 inline Value 1258 Forwarded(const JS::Value& value) 1259 { 1260 return DispatchTyped(ForwardedFunctor(), value); 1261 } 1262 1263 template <typename T> 1264 inline T 1265 MaybeForwarded(T t) 1266 { 1267 return IsForwarded(t) ? Forwarded(t) : t; 1268 } 1269 1270 #ifdef JSGC_HASH_TABLE_CHECKS 1271 1272 template <typename T> 1273 inline void 1274 CheckGCThingAfterMovingGC(T* t) 1275 { 1276 if (t) { 1277 MOZ_RELEASE_ASSERT(!IsInsideNursery(t)); 1278 MOZ_RELEASE_ASSERT(!RelocationOverlay::isCellForwarded(t)); 1279 } 1280 } 1281 1282 template <typename T> 1283 inline void 1284 CheckGCThingAfterMovingGC(const ReadBarriered<T*>& t) 1285 { 1286 CheckGCThingAfterMovingGC(t.unbarrieredGet()); 1287 } 1288 1289 struct CheckValueAfterMovingGCFunctor : public VoidDefaultAdaptor<Value> { 1290 template <typename T> void operator()(T* t) { CheckGCThingAfterMovingGC(t); } 1291 }; 1292 1293 inline void 1294 CheckValueAfterMovingGC(const JS::Value& value) 1295 { 1296 DispatchTyped(CheckValueAfterMovingGCFunctor(), value); 1297 } 1298 1299 #endif // JSGC_HASH_TABLE_CHECKS 1300 1301 const int ZealPokeValue = 1; 1302 const int ZealAllocValue = 2; 1303 const int ZealFrameGCValue = 3; 1304 const int ZealVerifierPreValue = 4; 1305 const int ZealFrameVerifierPreValue = 5; 1306 const int ZealStackRootingValue = 6; 1307 const int ZealGenerationalGCValue = 7; 1308 const int ZealIncrementalRootsThenFinish = 8; 1309 const int ZealIncrementalMarkAllThenFinish = 9; 1310 const int ZealIncrementalMultipleSlices = 10; 1311 const int ZealCheckHashTablesOnMinorGC = 13; 1312 const int ZealCompactValue = 14; 1313 const int ZealLimit = 14; 1314 1315 enum VerifierType { 1316 PreBarrierVerifier 1317 }; 1318 1319 #ifdef JS_GC_ZEAL 1320 1321 extern const char* ZealModeHelpText; 1322 1323 /* Check that write barriers have been used correctly. See jsgc.cpp. */ 1324 void 1325 VerifyBarriers(JSRuntime* rt, VerifierType type); 1326 1327 void 1328 MaybeVerifyBarriers(JSContext* cx, bool always = false); 1329 1330 #else 1331 1332 static inline void 1333 VerifyBarriers(JSRuntime* rt, VerifierType type) 1334 { 1335 } 1336 1337 static inline void 1338 MaybeVerifyBarriers(JSContext* cx, bool always = false) 1339 { 1340 } 1341 1342 #endif 1343 1344 /* 1345 * Instances of this class set the |JSRuntime::suppressGC| flag for the duration 1346 * that they are live. Use of this class is highly discouraged. Please carefully 1347 * read the comment in vm/Runtime.h above |suppressGC| and take all appropriate 1348 * precautions before instantiating this class. 1349 */ 1350 class MOZ_RAII AutoSuppressGC 1351 { 1352 int32_t& suppressGC_; 1353 1354 public: 1355 explicit AutoSuppressGC(ExclusiveContext* cx); 1356 explicit AutoSuppressGC(JSCompartment* comp); 1357 explicit AutoSuppressGC(JSRuntime* rt); 1358 1359 ~AutoSuppressGC() 1360 { 1361 suppressGC_--; 1362 } 1363 }; 1364 1365 // A singly linked list of zones. 1366 class ZoneList 1367 { 1368 static Zone * const End; 1369 1370 Zone* head; 1371 Zone* tail; 1372 1373 public: 1374 ZoneList(); 1375 ~ZoneList(); 1376 1377 bool isEmpty() const; 1378 Zone* front() const; 1379 1380 void append(Zone* zone); 1381 void transferFrom(ZoneList& other); 1382 void removeFront(); 1383 void clear(); 1384 1385 private: 1386 explicit ZoneList(Zone* singleZone); 1387 void check() const; 1388 1389 ZoneList(const ZoneList& other) = delete; 1390 ZoneList& operator=(const ZoneList& other) = delete; 1391 }; 1392 1393 JSObject* 1394 NewMemoryStatisticsObject(JSContext* cx); 1395 1396 } /* namespace gc */ 1397 1398 #ifdef DEBUG 1399 /* Use this to avoid assertions when manipulating the wrapper map. */ 1400 class MOZ_RAII AutoDisableProxyCheck 1401 { 1402 gc::GCRuntime& gc; 1403 1404 public: 1405 explicit AutoDisableProxyCheck(JSRuntime* rt); 1406 ~AutoDisableProxyCheck(); 1407 }; 1408 #else 1409 struct MOZ_RAII AutoDisableProxyCheck 1410 { 1411 explicit AutoDisableProxyCheck(JSRuntime* rt) {} 1412 }; 1413 #endif 1414 1415 struct MOZ_RAII AutoDisableCompactingGC 1416 { 1417 explicit AutoDisableCompactingGC(JSRuntime* rt); 1418 ~AutoDisableCompactingGC(); 1419 1420 private: 1421 gc::GCRuntime& gc; 1422 }; 1423 1424 void 1425 PurgeJITCaches(JS::Zone* zone); 1426 1427 // This is the same as IsInsideNursery, but not inlined. 1428 bool 1429 UninlinedIsInsideNursery(const gc::Cell* cell); 1430 1431 } /* namespace js */ 1432 1433 #endif /* jsgc_h */ 1434