1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6
7 #include "gc/AtomMarking-inl.h"
8
9 #include <type_traits>
10
11 #include "gc/PublicIterators.h"
12 #include "vm/Realm.h"
13
14 #include "gc/GC-inl.h"
15 #include "gc/Heap-inl.h"
16
17 namespace js {
18 namespace gc {
19
20 // [SMDOC] GC Atom Marking
21 //
22 // Things in the atoms zone (which includes atomized strings and other things,
23 // all of which we will refer to as 'atoms' here) may be pointed to freely by
24 // things in other zones. To avoid the need to perform garbage collections of
25 // the entire runtime to collect atoms, we compute a separate atom mark bitmap
26 // for each zone that is always an overapproximation of the atoms that zone is
27 // using. When an atom is not in the mark bitmap for any zone, it can be
28 // destroyed.
29 //
30 // To minimize interference with the rest of the GC, atom marking and sweeping
31 // is done by manipulating the mark bitmaps in the chunks used for the atoms.
32 // When the atoms zone is being collected, the mark bitmaps for the chunk(s)
33 // used by the atoms are updated normally during marking. After marking
34 // finishes, the chunk mark bitmaps are translated to a more efficient atom mark
35 // bitmap (see below) that is stored on the zones which the GC collected
36 // (computeBitmapFromChunkMarkBits). Before sweeping begins, the chunk mark
37 // bitmaps are updated with any atoms that might be referenced by zones which
38 // weren't collected (markAtomsUsedByUncollectedZones). The GC sweeping will
39 // then release all atoms which are not marked by any zone.
40 //
41 // The representation of atom mark bitmaps is as follows:
42 //
43 // Each arena in the atoms zone has an atomBitmapStart() value indicating the
44 // word index into the bitmap of the first thing in the arena. Each arena uses
45 // ArenaBitmapWords of data to store its bitmap, which uses the same
46 // representation as chunk mark bitmaps: one bit is allocated per Cell, with
47 // bits for space between things being unused when things are larger than a
48 // single Cell.
49
registerArena(Arena * arena,const AutoLockGC & lock)50 void AtomMarkingRuntime::registerArena(Arena* arena, const AutoLockGC& lock) {
51 MOZ_ASSERT(arena->getThingSize() != 0);
52 MOZ_ASSERT(arena->getThingSize() % CellAlignBytes == 0);
53 MOZ_ASSERT(arena->zone->isAtomsZone());
54
55 // We need to find a range of bits from the atoms bitmap for this arena.
56
57 // Look for a free range of bits compatible with this arena.
58 if (freeArenaIndexes.ref().length()) {
59 arena->atomBitmapStart() = freeArenaIndexes.ref().popCopy();
60 return;
61 }
62
63 // Allocate a range of bits from the end for this arena.
64 arena->atomBitmapStart() = allocatedWords;
65 allocatedWords += ArenaBitmapWords;
66 }
67
unregisterArena(Arena * arena,const AutoLockGC & lock)68 void AtomMarkingRuntime::unregisterArena(Arena* arena, const AutoLockGC& lock) {
69 MOZ_ASSERT(arena->zone->isAtomsZone());
70
71 // Leak these atom bits if we run out of memory.
72 (void)freeArenaIndexes.ref().emplaceBack(arena->atomBitmapStart());
73 }
74
computeBitmapFromChunkMarkBits(JSRuntime * runtime,DenseBitmap & bitmap)75 bool AtomMarkingRuntime::computeBitmapFromChunkMarkBits(JSRuntime* runtime,
76 DenseBitmap& bitmap) {
77 MOZ_ASSERT(CurrentThreadIsPerformingGC());
78 MOZ_ASSERT(!runtime->hasHelperThreadZones());
79
80 if (!bitmap.ensureSpace(allocatedWords)) {
81 return false;
82 }
83
84 Zone* atomsZone = runtime->unsafeAtomsZone();
85 for (auto thingKind : AllAllocKinds()) {
86 for (ArenaIter aiter(atomsZone, thingKind); !aiter.done(); aiter.next()) {
87 Arena* arena = aiter.get();
88 MarkBitmapWord* chunkWords = arena->chunk()->markBits.arenaBits(arena);
89 bitmap.copyBitsFrom(arena->atomBitmapStart(), ArenaBitmapWords,
90 chunkWords);
91 }
92 }
93
94 return true;
95 }
96
refineZoneBitmapForCollectedZone(Zone * zone,const DenseBitmap & bitmap)97 void AtomMarkingRuntime::refineZoneBitmapForCollectedZone(
98 Zone* zone, const DenseBitmap& bitmap) {
99 MOZ_ASSERT(zone->isCollectingFromAnyThread());
100
101 if (zone->isAtomsZone()) {
102 return;
103 }
104
105 // Take the bitwise and between the two mark bitmaps to get the best new
106 // overapproximation we can. |bitmap| might include bits that are not in
107 // the zone's mark bitmap, if additional zones were collected by the GC.
108 zone->markedAtoms().bitwiseAndWith(bitmap);
109 }
110
111 // Set any bits in the chunk mark bitmaps for atoms which are marked in bitmap.
112 template <typename Bitmap>
BitwiseOrIntoChunkMarkBits(JSRuntime * runtime,Bitmap & bitmap)113 static void BitwiseOrIntoChunkMarkBits(JSRuntime* runtime, Bitmap& bitmap) {
114 // Make sure that by copying the mark bits for one arena in word sizes we
115 // do not affect the mark bits for other arenas.
116 static_assert(ArenaBitmapBits == ArenaBitmapWords * JS_BITS_PER_WORD,
117 "ArenaBitmapWords must evenly divide ArenaBitmapBits");
118
119 Zone* atomsZone = runtime->unsafeAtomsZone();
120 for (auto thingKind : AllAllocKinds()) {
121 for (ArenaIter aiter(atomsZone, thingKind); !aiter.done(); aiter.next()) {
122 Arena* arena = aiter.get();
123 MarkBitmapWord* chunkWords = arena->chunk()->markBits.arenaBits(arena);
124 bitmap.bitwiseOrRangeInto(arena->atomBitmapStart(), ArenaBitmapWords,
125 chunkWords);
126 }
127 }
128 }
129
markAtomsUsedByUncollectedZones(JSRuntime * runtime)130 void AtomMarkingRuntime::markAtomsUsedByUncollectedZones(JSRuntime* runtime) {
131 MOZ_ASSERT(CurrentThreadIsPerformingGC());
132 MOZ_ASSERT(!runtime->hasHelperThreadZones());
133
134 // Try to compute a simple union of the zone atom bitmaps before updating
135 // the chunk mark bitmaps. If this allocation fails then fall back to
136 // updating the chunk mark bitmaps separately for each zone.
137 DenseBitmap markedUnion;
138 if (markedUnion.ensureSpace(allocatedWords)) {
139 for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) {
140 // We only need to update the chunk mark bits for zones which were
141 // not collected in the current GC. Atoms which are referenced by
142 // collected zones have already been marked.
143 if (!zone->isCollectingFromAnyThread()) {
144 zone->markedAtoms().bitwiseOrInto(markedUnion);
145 }
146 }
147 BitwiseOrIntoChunkMarkBits(runtime, markedUnion);
148 } else {
149 for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) {
150 if (!zone->isCollectingFromAnyThread()) {
151 BitwiseOrIntoChunkMarkBits(runtime, zone->markedAtoms());
152 }
153 }
154 }
155 }
156
157 template <typename T>
markAtom(JSContext * cx,T * thing)158 void AtomMarkingRuntime::markAtom(JSContext* cx, T* thing) {
159 return inlinedMarkAtom(cx, thing);
160 }
161
162 template void AtomMarkingRuntime::markAtom(JSContext* cx, JSAtom* thing);
163 template void AtomMarkingRuntime::markAtom(JSContext* cx, JS::Symbol* thing);
164
markId(JSContext * cx,jsid id)165 void AtomMarkingRuntime::markId(JSContext* cx, jsid id) {
166 if (id.isAtom()) {
167 markAtom(cx, id.toAtom());
168 return;
169 }
170 if (id.isSymbol()) {
171 markAtom(cx, id.toSymbol());
172 return;
173 }
174 MOZ_ASSERT(!id.isGCThing());
175 }
176
markAtomValue(JSContext * cx,const Value & value)177 void AtomMarkingRuntime::markAtomValue(JSContext* cx, const Value& value) {
178 if (value.isString()) {
179 if (value.toString()->isAtom()) {
180 markAtom(cx, &value.toString()->asAtom());
181 }
182 return;
183 }
184 if (value.isSymbol()) {
185 markAtom(cx, value.toSymbol());
186 return;
187 }
188 MOZ_ASSERT_IF(value.isGCThing(), value.isObject() ||
189 value.isPrivateGCThing() ||
190 value.isBigInt());
191 }
192
adoptMarkedAtoms(Zone * target,Zone * source)193 void AtomMarkingRuntime::adoptMarkedAtoms(Zone* target, Zone* source) {
194 MOZ_ASSERT(CurrentThreadCanAccessZone(source));
195 MOZ_ASSERT(CurrentThreadCanAccessZone(target));
196 target->markedAtoms().bitwiseOrWith(source->markedAtoms());
197 }
198
199 #ifdef DEBUG
200 template <typename T>
atomIsMarked(Zone * zone,T * thing)201 bool AtomMarkingRuntime::atomIsMarked(Zone* zone, T* thing) {
202 static_assert(std::is_same_v<T, JSAtom> || std::is_same_v<T, JS::Symbol>,
203 "Should only be called with JSAtom* or JS::Symbol* argument");
204
205 MOZ_ASSERT(thing);
206 MOZ_ASSERT(!IsInsideNursery(thing));
207 MOZ_ASSERT(thing->zoneFromAnyThread()->isAtomsZone());
208
209 if (!zone->runtimeFromAnyThread()->permanentAtomsPopulated()) {
210 return true;
211 }
212
213 if (thing->isPermanentAndMayBeShared()) {
214 return true;
215 }
216
217 if constexpr (std::is_same_v<T, JSAtom>) {
218 JSRuntime* rt = zone->runtimeFromAnyThread();
219 if (rt->atoms().atomIsPinned(rt, thing)) {
220 return true;
221 }
222 }
223
224 size_t bit = GetAtomBit(&thing->asTenured());
225 return zone->markedAtoms().getBit(bit);
226 }
227
228 template bool AtomMarkingRuntime::atomIsMarked(Zone* zone, JSAtom* thing);
229 template bool AtomMarkingRuntime::atomIsMarked(Zone* zone, JS::Symbol* thing);
230
231 template <>
atomIsMarked(Zone * zone,TenuredCell * thing)232 bool AtomMarkingRuntime::atomIsMarked(Zone* zone, TenuredCell* thing) {
233 if (!thing) {
234 return true;
235 }
236
237 if (thing->is<JSString>()) {
238 JSString* str = thing->as<JSString>();
239 if (!str->isAtom()) {
240 return true;
241 }
242 return atomIsMarked(zone, &str->asAtom());
243 }
244
245 if (thing->is<JS::Symbol>()) {
246 return atomIsMarked(zone, thing->as<JS::Symbol>());
247 }
248
249 return true;
250 }
251
idIsMarked(Zone * zone,jsid id)252 bool AtomMarkingRuntime::idIsMarked(Zone* zone, jsid id) {
253 if (id.isAtom()) {
254 return atomIsMarked(zone, id.toAtom());
255 }
256
257 if (id.isSymbol()) {
258 return atomIsMarked(zone, id.toSymbol());
259 }
260
261 MOZ_ASSERT(!id.isGCThing());
262 return true;
263 }
264
valueIsMarked(Zone * zone,const Value & value)265 bool AtomMarkingRuntime::valueIsMarked(Zone* zone, const Value& value) {
266 if (value.isString()) {
267 if (value.toString()->isAtom()) {
268 return atomIsMarked(zone, &value.toString()->asAtom());
269 }
270 return true;
271 }
272
273 if (value.isSymbol()) {
274 return atomIsMarked(zone, value.toSymbol());
275 }
276
277 MOZ_ASSERT_IF(value.isGCThing(), value.isObject() ||
278 value.isPrivateGCThing() ||
279 value.isBigInt());
280 return true;
281 }
282
283 #endif // DEBUG
284
285 } // namespace gc
286
287 #ifdef DEBUG
288
AtomIsMarked(Zone * zone,JSAtom * atom)289 bool AtomIsMarked(Zone* zone, JSAtom* atom) {
290 return zone->runtimeFromAnyThread()->gc.atomMarking.atomIsMarked(zone, atom);
291 }
292
AtomIsMarked(Zone * zone,jsid id)293 bool AtomIsMarked(Zone* zone, jsid id) {
294 return zone->runtimeFromAnyThread()->gc.atomMarking.idIsMarked(zone, id);
295 }
296
AtomIsMarked(Zone * zone,const Value & value)297 bool AtomIsMarked(Zone* zone, const Value& value) {
298 return zone->runtimeFromAnyThread()->gc.atomMarking.valueIsMarked(zone,
299 value);
300 }
301
302 #endif // DEBUG
303
304 } // namespace js
305