1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6
7 /*
8 * JS atom table.
9 */
10
11 #include "vm/JSAtom-inl.h"
12
13 #include "mozilla/ArrayUtils.h"
14 #include "mozilla/EndianUtils.h"
15 #include "mozilla/HashFunctions.h" // mozilla::HashStringKnownLength
16 #include "mozilla/RangedPtr.h"
17
18 #include <iterator>
19 #include <string.h>
20
21 #include "jstypes.h"
22
23 #include "gc/GC.h"
24 #include "gc/Marking.h"
25 #include "gc/MaybeRooted.h"
26 #include "js/CharacterEncoding.h"
27 #include "js/friend/ErrorMessages.h" // js::GetErrorMessage, JSMSG_*
28 #include "js/Symbol.h"
29 #include "util/Text.h"
30 #include "vm/JSContext.h"
31 #include "vm/SymbolType.h"
32 #include "vm/WellKnownAtom.h" // js_*_str
33 #include "vm/Xdr.h"
34
35 #include "gc/AtomMarking-inl.h"
36 #include "vm/JSContext-inl.h"
37 #include "vm/JSObject-inl.h"
38 #include "vm/Realm-inl.h"
39 #include "vm/StringType-inl.h"
40
41 using namespace js;
42
43 using mozilla::Maybe;
44 using mozilla::Nothing;
45 using mozilla::RangedPtr;
46
47 template <typename CharT>
48 extern void InflateUTF8CharsToBufferAndTerminate(const JS::UTF8Chars src,
49 CharT* dst, size_t dstLen,
50 JS::SmallestEncoding encoding);
51
52 template <typename CharT>
53 extern bool UTF8EqualsChars(const JS::UTF8Chars utf8, const CharT* chars);
54
55 extern bool GetUTF8AtomizationData(JSContext* cx, const JS::UTF8Chars utf8,
56 size_t* outlen,
57 JS::SmallestEncoding* encoding,
58 HashNumber* hashNum);
59
60 struct js::AtomHasher::Lookup {
61 union {
62 const JS::Latin1Char* latin1Chars;
63 const char16_t* twoByteChars;
64 LittleEndianChars littleEndianChars;
65 const char* utf8Bytes;
66 };
67 enum { TwoByteChar, LittleEndianTwoByte, Latin1, UTF8 } type;
68 size_t length;
69 size_t byteLength;
70 const JSAtom* atom; /* Optional. */
71 JS::AutoCheckCannotGC nogc;
72
73 HashNumber hash;
74
Lookupjs::AtomHasher::Lookup75 MOZ_ALWAYS_INLINE Lookup(const char* utf8Bytes, size_t byteLen, size_t length,
76 HashNumber hash)
77 : utf8Bytes(utf8Bytes),
78 type(UTF8),
79 length(length),
80 byteLength(byteLen),
81 atom(nullptr),
82 hash(hash) {}
83
Lookupjs::AtomHasher::Lookup84 MOZ_ALWAYS_INLINE Lookup(const char16_t* chars, size_t length)
85 : twoByteChars(chars),
86 type(TwoByteChar),
87 length(length),
88 atom(nullptr),
89 hash(mozilla::HashString(chars, length)) {}
90
Lookupjs::AtomHasher::Lookup91 MOZ_ALWAYS_INLINE Lookup(const JS::Latin1Char* chars, size_t length)
92 : latin1Chars(chars),
93 type(Latin1),
94 length(length),
95 atom(nullptr),
96 hash(mozilla::HashString(chars, length)) {}
97
Lookupjs::AtomHasher::Lookup98 MOZ_ALWAYS_INLINE Lookup(HashNumber hash, const char16_t* chars,
99 size_t length)
100 : twoByteChars(chars),
101 type(TwoByteChar),
102 length(length),
103 atom(nullptr),
104 hash(hash) {
105 MOZ_ASSERT(hash == mozilla::HashString(chars, length));
106 }
107
Lookupjs::AtomHasher::Lookup108 MOZ_ALWAYS_INLINE Lookup(HashNumber hash, const JS::Latin1Char* chars,
109 size_t length)
110 : latin1Chars(chars),
111 type(Latin1),
112 length(length),
113 atom(nullptr),
114 hash(hash) {
115 MOZ_ASSERT(hash == mozilla::HashString(chars, length));
116 }
117
Lookupjs::AtomHasher::Lookup118 inline explicit Lookup(const JSAtom* atom)
119 : type(atom->hasLatin1Chars() ? Latin1 : TwoByteChar),
120 length(atom->length()),
121 atom(atom),
122 hash(atom->hash()) {
123 if (type == Latin1) {
124 latin1Chars = atom->latin1Chars(nogc);
125 MOZ_ASSERT(mozilla::HashString(latin1Chars, length) == hash);
126 } else {
127 MOZ_ASSERT(type == TwoByteChar);
128 twoByteChars = atom->twoByteChars(nogc);
129 MOZ_ASSERT(mozilla::HashString(twoByteChars, length) == hash);
130 }
131 }
132
Lookupjs::AtomHasher::Lookup133 MOZ_ALWAYS_INLINE Lookup(LittleEndianChars chars, size_t length)
134 : littleEndianChars(chars),
135 type(LittleEndianTwoByte),
136 length(length),
137 atom(nullptr),
138 hash(mozilla::HashStringKnownLength(chars, length)) {}
139 };
140
hash(const Lookup & l)141 inline HashNumber js::AtomHasher::hash(const Lookup& l) { return l.hash; }
142
match(const AtomStateEntry & entry,const Lookup & lookup)143 MOZ_ALWAYS_INLINE bool js::AtomHasher::match(const AtomStateEntry& entry,
144 const Lookup& lookup) {
145 JSAtom* key = entry.asPtrUnbarriered();
146 if (lookup.atom) {
147 return lookup.atom == key;
148 }
149 if (key->length() != lookup.length || key->hash() != lookup.hash) {
150 return false;
151 }
152
153 auto EqualsLittleEndianChars = [&lookup](auto keyChars) {
154 for (size_t i = 0, len = lookup.length; i < len; i++) {
155 if (keyChars[i] != lookup.littleEndianChars[i]) {
156 return false;
157 }
158 }
159 return true;
160 };
161
162 if (key->hasLatin1Chars()) {
163 const Latin1Char* keyChars = key->latin1Chars(lookup.nogc);
164 switch (lookup.type) {
165 case Lookup::Latin1:
166 return mozilla::ArrayEqual(keyChars, lookup.latin1Chars, lookup.length);
167 case Lookup::TwoByteChar:
168 return EqualChars(keyChars, lookup.twoByteChars, lookup.length);
169 case Lookup::LittleEndianTwoByte:
170 return EqualsLittleEndianChars(keyChars);
171 case Lookup::UTF8: {
172 JS::UTF8Chars utf8(lookup.utf8Bytes, lookup.byteLength);
173 return UTF8EqualsChars(utf8, keyChars);
174 }
175 }
176 }
177
178 const char16_t* keyChars = key->twoByteChars(lookup.nogc);
179 switch (lookup.type) {
180 case Lookup::Latin1:
181 return EqualChars(lookup.latin1Chars, keyChars, lookup.length);
182 case Lookup::TwoByteChar:
183 return mozilla::ArrayEqual(keyChars, lookup.twoByteChars, lookup.length);
184 case Lookup::LittleEndianTwoByte:
185 return EqualsLittleEndianChars(keyChars);
186 case Lookup::UTF8: {
187 JS::UTF8Chars utf8(lookup.utf8Bytes, lookup.byteLength);
188 return UTF8EqualsChars(utf8, keyChars);
189 }
190 }
191
192 MOZ_ASSERT_UNREACHABLE("AtomHasher::match unknown type");
193 return false;
194 }
195
asPtr(JSContext * cx) const196 inline JSAtom* js::AtomStateEntry::asPtr(JSContext* cx) const {
197 JSAtom* atom = asPtrUnbarriered();
198 if (!cx->isHelperThreadContext()) {
199 gc::ReadBarrier(atom);
200 }
201 return atom;
202 }
203
AtomToPrintableString(JSContext * cx,JSAtom * atom)204 UniqueChars js::AtomToPrintableString(JSContext* cx, JSAtom* atom) {
205 return QuoteString(cx, atom);
206 }
207
208 // Use a low initial capacity for the permanent atoms table to avoid penalizing
209 // runtimes that create a small number of atoms.
210 static const uint32_t JS_PERMANENT_ATOM_SIZE = 64;
211
readonlyThreadsafeLookup(const AtomSet::Lookup & l) const212 MOZ_ALWAYS_INLINE AtomSet::Ptr js::FrozenAtomSet::readonlyThreadsafeLookup(
213 const AtomSet::Lookup& l) const {
214 return mSet->readonlyThreadsafeLookup(l);
215 }
216
initializeAtoms(JSContext * cx)217 bool JSRuntime::initializeAtoms(JSContext* cx) {
218 MOZ_ASSERT(!atoms_);
219 MOZ_ASSERT(!permanentAtomsDuringInit_);
220 MOZ_ASSERT(!permanentAtoms_);
221
222 if (parentRuntime) {
223 permanentAtoms_ = parentRuntime->permanentAtoms_;
224
225 staticStrings = parentRuntime->staticStrings;
226 commonNames = parentRuntime->commonNames;
227 emptyString = parentRuntime->emptyString;
228 wellKnownSymbols = parentRuntime->wellKnownSymbols;
229
230 atoms_ = js_new<AtomsTable>();
231 if (!atoms_) {
232 return false;
233 }
234
235 return atoms_->init();
236 }
237
238 permanentAtomsDuringInit_ = js_new<AtomSet>(JS_PERMANENT_ATOM_SIZE);
239 if (!permanentAtomsDuringInit_) {
240 return false;
241 }
242
243 staticStrings = js_new<StaticStrings>();
244 if (!staticStrings || !staticStrings->init(cx)) {
245 return false;
246 }
247
248 // The bare symbol names are already part of the well-known set, but their
249 // descriptions are not, so enumerate them here and add them to the initial
250 // permanent atoms set below.
251 static const WellKnownAtomInfo symbolDescInfo[] = {
252 #define COMMON_NAME_INFO(NAME) \
253 {uint32_t(sizeof("Symbol." #NAME) - 1), \
254 mozilla::HashStringKnownLength("Symbol." #NAME, \
255 sizeof("Symbol." #NAME) - 1), \
256 "Symbol." #NAME},
257 JS_FOR_EACH_WELL_KNOWN_SYMBOL(COMMON_NAME_INFO)
258 #undef COMMON_NAME_INFO
259 };
260
261 commonNames = js_new<JSAtomState>();
262 if (!commonNames) {
263 return false;
264 }
265
266 ImmutablePropertyNamePtr* names =
267 reinterpret_cast<ImmutablePropertyNamePtr*>(commonNames.ref());
268 for (size_t i = 0; i < uint32_t(WellKnownAtomId::Limit); i++) {
269 const auto& info = wellKnownAtomInfos[i];
270 JSAtom* atom = Atomize(cx, info.hash, info.content, info.length, PinAtom);
271 if (!atom) {
272 return false;
273 }
274 names->init(atom->asPropertyName());
275 names++;
276 }
277
278 for (const auto& info : symbolDescInfo) {
279 JSAtom* atom = Atomize(cx, info.hash, info.content, info.length, PinAtom);
280 if (!atom) {
281 return false;
282 }
283 names->init(atom->asPropertyName());
284 names++;
285 }
286 MOZ_ASSERT(uintptr_t(names) == uintptr_t(commonNames + 1));
287
288 emptyString = commonNames->empty;
289
290 // Create the well-known symbols.
291 auto wks = js_new<WellKnownSymbols>();
292 if (!wks) {
293 return false;
294 }
295
296 // Prevent GC until we have fully initialized the well known symbols table.
297 // Faster than zeroing the array and null checking during every GC.
298 gc::AutoSuppressGC nogc(cx);
299
300 ImmutablePropertyNamePtr* descriptions =
301 commonNames->wellKnownSymbolDescriptions();
302 ImmutableSymbolPtr* symbols = reinterpret_cast<ImmutableSymbolPtr*>(wks);
303 for (size_t i = 0; i < JS::WellKnownSymbolLimit; i++) {
304 HandlePropertyName description = descriptions[i];
305 JS::Symbol* symbol = JS::Symbol::new_(cx, JS::SymbolCode(i), description);
306 if (!symbol) {
307 ReportOutOfMemory(cx);
308 return false;
309 }
310 symbols[i].init(symbol);
311 }
312
313 wellKnownSymbols = wks;
314 return true;
315 }
316
finishAtoms()317 void JSRuntime::finishAtoms() {
318 js_delete(atoms_.ref());
319
320 if (!parentRuntime) {
321 js_delete(permanentAtomsDuringInit_.ref());
322 js_delete(permanentAtoms_.ref());
323 js_delete(staticStrings.ref());
324 js_delete(commonNames.ref());
325 js_delete(wellKnownSymbols.ref());
326 }
327
328 atoms_ = nullptr;
329 permanentAtomsDuringInit_ = nullptr;
330 permanentAtoms_ = nullptr;
331 staticStrings = nullptr;
332 commonNames = nullptr;
333 wellKnownSymbols = nullptr;
334 emptyString = nullptr;
335 }
336
337 class AtomsTable::AutoLock {
338 Mutex* lock = nullptr;
339
340 public:
AutoLock(JSRuntime * rt,Mutex & aLock)341 MOZ_ALWAYS_INLINE explicit AutoLock(JSRuntime* rt, Mutex& aLock) {
342 if (rt->hasHelperThreadZones()) {
343 lock = &aLock;
344 lock->lock();
345 }
346 }
347
~AutoLock()348 MOZ_ALWAYS_INLINE ~AutoLock() {
349 if (lock) {
350 lock->unlock();
351 }
352 }
353 };
354
Partition(uint32_t index)355 AtomsTable::Partition::Partition(uint32_t index)
356 : lock(
357 MutexId{mutexid::AtomsTable.name, mutexid::AtomsTable.order + index}),
358 atoms(InitialTableSize),
359 atomsAddedWhileSweeping(nullptr) {}
360
~Partition()361 AtomsTable::Partition::~Partition() { MOZ_ASSERT(!atomsAddedWhileSweeping); }
362
~AtomsTable()363 AtomsTable::~AtomsTable() {
364 for (size_t i = 0; i < PartitionCount; i++) {
365 js_delete(partitions[i]);
366 }
367 }
368
init()369 bool AtomsTable::init() {
370 for (size_t i = 0; i < PartitionCount; i++) {
371 partitions[i] = js_new<Partition>(i);
372 if (!partitions[i]) {
373 return false;
374 }
375 }
376 return true;
377 }
378
lockAll()379 void AtomsTable::lockAll() {
380 MOZ_ASSERT(!allPartitionsLocked);
381
382 for (size_t i = 0; i < PartitionCount; i++) {
383 partitions[i]->lock.lock();
384 }
385
386 #ifdef DEBUG
387 allPartitionsLocked = true;
388 #endif
389 }
390
unlockAll()391 void AtomsTable::unlockAll() {
392 MOZ_ASSERT(allPartitionsLocked);
393
394 for (size_t i = 0; i < PartitionCount; i++) {
395 partitions[PartitionCount - i - 1]->lock.unlock();
396 }
397
398 #ifdef DEBUG
399 allPartitionsLocked = false;
400 #endif
401 }
402
403 MOZ_ALWAYS_INLINE size_t
getPartitionIndex(const AtomHasher::Lookup & lookup)404 AtomsTable::getPartitionIndex(const AtomHasher::Lookup& lookup) {
405 size_t index = lookup.hash >> (32 - PartitionShift);
406 MOZ_ASSERT(index < PartitionCount);
407 return index;
408 }
409
tracePinnedAtomsInSet(JSTracer * trc,AtomSet & atoms)410 inline void AtomsTable::tracePinnedAtomsInSet(JSTracer* trc, AtomSet& atoms) {
411 for (auto r = atoms.all(); !r.empty(); r.popFront()) {
412 const AtomStateEntry& entry = r.front();
413 MOZ_DIAGNOSTIC_ASSERT(entry.asPtrUnbarriered());
414 if (entry.isPinned()) {
415 JSAtom* atom = entry.asPtrUnbarriered();
416 TraceRoot(trc, &atom, "interned_atom");
417 MOZ_ASSERT(entry.asPtrUnbarriered() == atom);
418 }
419 }
420 }
421
tracePinnedAtoms(JSTracer * trc,const AutoAccessAtomsZone & access)422 void AtomsTable::tracePinnedAtoms(JSTracer* trc,
423 const AutoAccessAtomsZone& access) {
424 for (size_t i = 0; i < PartitionCount; i++) {
425 Partition& part = *partitions[i];
426 tracePinnedAtomsInSet(trc, part.atoms);
427 if (part.atomsAddedWhileSweeping) {
428 tracePinnedAtomsInSet(trc, *part.atomsAddedWhileSweeping);
429 }
430 }
431 }
432
TraceAtoms(JSTracer * trc,const AutoAccessAtomsZone & access)433 void js::TraceAtoms(JSTracer* trc, const AutoAccessAtomsZone& access) {
434 JSRuntime* rt = trc->runtime();
435 if (rt->permanentAtomsPopulated()) {
436 rt->atoms().tracePinnedAtoms(trc, access);
437 }
438 }
439
TracePermanentAtoms(JSTracer * trc,AtomSet::Range atoms)440 static void TracePermanentAtoms(JSTracer* trc, AtomSet::Range atoms) {
441 for (; !atoms.empty(); atoms.popFront()) {
442 const AtomStateEntry& entry = atoms.front();
443 JSAtom* atom = entry.asPtrUnbarriered();
444 MOZ_ASSERT(atom->isPermanentAtom());
445 TraceProcessGlobalRoot(trc, atom, "permanent atom");
446 }
447 }
448
tracePermanentAtoms(JSTracer * trc)449 void JSRuntime::tracePermanentAtoms(JSTracer* trc) {
450 // Permanent atoms only need to be traced in the runtime which owns them.
451 if (parentRuntime) {
452 return;
453 }
454
455 // Static strings are not included in the permanent atoms table.
456 if (staticStrings) {
457 staticStrings->trace(trc);
458 }
459
460 if (permanentAtomsDuringInit_) {
461 TracePermanentAtoms(trc, permanentAtomsDuringInit_->all());
462 }
463
464 if (permanentAtoms_) {
465 TracePermanentAtoms(trc, permanentAtoms_->all());
466 }
467 }
468
TraceWellKnownSymbols(JSTracer * trc)469 void js::TraceWellKnownSymbols(JSTracer* trc) {
470 JSRuntime* rt = trc->runtime();
471
472 if (rt->parentRuntime) {
473 return;
474 }
475
476 if (WellKnownSymbols* wks = rt->wellKnownSymbols) {
477 for (size_t i = 0; i < JS::WellKnownSymbolLimit; i++) {
478 TraceProcessGlobalRoot(trc, wks->get(i).get(), "well_known_symbol");
479 }
480 }
481 }
482
traceWeak(JSTracer * trc)483 void AtomsTable::traceWeak(JSTracer* trc) {
484 JSRuntime* rt = trc->runtime();
485 for (size_t i = 0; i < PartitionCount; i++) {
486 AutoLock lock(rt, partitions[i]->lock);
487 AtomSet& atoms = partitions[i]->atoms;
488 for (AtomSet::Enum e(atoms); !e.empty(); e.popFront()) {
489 JSAtom* atom = e.front().asPtrUnbarriered();
490 MOZ_DIAGNOSTIC_ASSERT(atom);
491 if (!TraceManuallyBarrieredWeakEdge(trc, &atom,
492 "AtomsTable::partitions::atoms")) {
493 e.removeFront();
494 } else {
495 MOZ_ASSERT(atom == e.front().asPtrUnbarriered());
496 }
497 }
498 }
499 }
500
SweepIterator(AtomsTable & atoms)501 AtomsTable::SweepIterator::SweepIterator(AtomsTable& atoms)
502 : atoms(atoms), partitionIndex(0) {
503 startSweepingPartition();
504 settle();
505 }
506
startSweepingPartition()507 inline void AtomsTable::SweepIterator::startSweepingPartition() {
508 MOZ_ASSERT(atoms.partitions[partitionIndex]->atomsAddedWhileSweeping);
509 atomsIter.emplace(atoms.partitions[partitionIndex]->atoms);
510 }
511
finishSweepingPartition()512 inline void AtomsTable::SweepIterator::finishSweepingPartition() {
513 atomsIter.reset();
514 atoms.mergeAtomsAddedWhileSweeping(*atoms.partitions[partitionIndex]);
515 }
516
settle()517 inline void AtomsTable::SweepIterator::settle() {
518 MOZ_ASSERT(!empty());
519
520 while (atomsIter->empty()) {
521 finishSweepingPartition();
522 partitionIndex++;
523 if (empty()) {
524 return;
525 }
526 startSweepingPartition();
527 }
528 }
529
empty() const530 inline bool AtomsTable::SweepIterator::empty() const {
531 return partitionIndex == PartitionCount;
532 }
533
front() const534 inline AtomStateEntry AtomsTable::SweepIterator::front() const {
535 MOZ_ASSERT(!empty());
536 return atomsIter->front();
537 }
538
removeFront()539 inline void AtomsTable::SweepIterator::removeFront() {
540 MOZ_ASSERT(!empty());
541 return atomsIter->removeFront();
542 }
543
popFront()544 inline void AtomsTable::SweepIterator::popFront() {
545 MOZ_ASSERT(!empty());
546 atomsIter->popFront();
547 settle();
548 }
549
startIncrementalSweep()550 bool AtomsTable::startIncrementalSweep() {
551 MOZ_ASSERT(JS::RuntimeHeapIsCollecting());
552
553 bool ok = true;
554 for (size_t i = 0; i < PartitionCount; i++) {
555 auto& part = *partitions[i];
556
557 auto newAtoms = js_new<AtomSet>();
558 if (!newAtoms) {
559 ok = false;
560 break;
561 }
562
563 MOZ_ASSERT(!part.atomsAddedWhileSweeping);
564 part.atomsAddedWhileSweeping = newAtoms;
565 }
566
567 if (!ok) {
568 for (size_t i = 0; i < PartitionCount; i++) {
569 auto& part = *partitions[i];
570 js_delete(part.atomsAddedWhileSweeping);
571 part.atomsAddedWhileSweeping = nullptr;
572 }
573 }
574
575 return ok;
576 }
577
mergeAtomsAddedWhileSweeping(Partition & part)578 void AtomsTable::mergeAtomsAddedWhileSweeping(Partition& part) {
579 // Add atoms that were added to the secondary table while we were sweeping
580 // the main table.
581
582 AutoEnterOOMUnsafeRegion oomUnsafe;
583
584 auto newAtoms = part.atomsAddedWhileSweeping;
585 part.atomsAddedWhileSweeping = nullptr;
586
587 for (auto r = newAtoms->all(); !r.empty(); r.popFront()) {
588 if (!part.atoms.putNew(AtomHasher::Lookup(r.front().asPtrUnbarriered()),
589 r.front())) {
590 oomUnsafe.crash("Adding atom from secondary table after sweep");
591 }
592 }
593
594 js_delete(newAtoms);
595 }
596
sweepIncrementally(SweepIterator & atomsToSweep,SliceBudget & budget)597 bool AtomsTable::sweepIncrementally(SweepIterator& atomsToSweep,
598 SliceBudget& budget) {
599 // Sweep the table incrementally until we run out of work or budget.
600 while (!atomsToSweep.empty()) {
601 budget.step();
602 if (budget.isOverBudget()) {
603 return false;
604 }
605
606 AtomStateEntry entry = atomsToSweep.front();
607 JSAtom* atom = entry.asPtrUnbarriered();
608 MOZ_DIAGNOSTIC_ASSERT(atom);
609 if (IsAboutToBeFinalizedUnbarriered(&atom)) {
610 MOZ_ASSERT(!entry.isPinned());
611 atomsToSweep.removeFront();
612 } else {
613 MOZ_ASSERT(atom == entry.asPtrUnbarriered());
614 }
615 atomsToSweep.popFront();
616 }
617
618 for (size_t i = 0; i < PartitionCount; i++) {
619 MOZ_ASSERT(!partitions[i]->atomsAddedWhileSweeping);
620 }
621
622 return true;
623 }
624
sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const625 size_t AtomsTable::sizeOfIncludingThis(
626 mozilla::MallocSizeOf mallocSizeOf) const {
627 size_t size = sizeof(AtomsTable);
628 for (size_t i = 0; i < PartitionCount; i++) {
629 size += sizeof(Partition);
630 size += partitions[i]->atoms.shallowSizeOfExcludingThis(mallocSizeOf);
631 }
632 return size;
633 }
634
initMainAtomsTables(JSContext * cx)635 bool JSRuntime::initMainAtomsTables(JSContext* cx) {
636 MOZ_ASSERT(!parentRuntime);
637 MOZ_ASSERT(!permanentAtomsPopulated());
638
639 // The permanent atoms table has now been populated.
640 permanentAtoms_ =
641 js_new<FrozenAtomSet>(permanentAtomsDuringInit_); // Takes ownership.
642 permanentAtomsDuringInit_ = nullptr;
643
644 // Initialize the main atoms table.
645 MOZ_ASSERT(!atoms_);
646 atoms_ = js_new<AtomsTable>();
647 return atoms_ && atoms_->init();
648 }
649
650 template <typename Chars>
651 static MOZ_ALWAYS_INLINE JSAtom* AtomizeAndCopyCharsFromLookup(
652 JSContext* cx, Chars chars, size_t length, const AtomHasher::Lookup& lookup,
653 PinningBehavior pin, const Maybe<uint32_t>& indexValue);
654
655 template <typename CharT, typename = std::enable_if_t<!std::is_const_v<CharT>>>
AtomizeAndCopyCharsFromLookup(JSContext * cx,CharT * chars,size_t length,const AtomHasher::Lookup & lookup,PinningBehavior pin,const Maybe<uint32_t> & indexValue)656 static MOZ_ALWAYS_INLINE JSAtom* AtomizeAndCopyCharsFromLookup(
657 JSContext* cx, CharT* chars, size_t length,
658 const AtomHasher::Lookup& lookup, PinningBehavior pin,
659 const Maybe<uint32_t>& indexValue) {
660 return AtomizeAndCopyCharsFromLookup(cx, const_cast<const CharT*>(chars),
661 length, lookup, pin, indexValue);
662 }
663
664 template <typename Chars>
665 static MOZ_NEVER_INLINE JSAtom* PermanentlyAtomizeAndCopyChars(
666 JSContext* cx, Maybe<AtomSet::AddPtr>& zonePtr, Chars chars, size_t length,
667 const Maybe<uint32_t>& indexValue, const AtomHasher::Lookup& lookup);
668
669 template <typename CharT, typename = std::enable_if_t<!std::is_const_v<CharT>>>
PermanentlyAtomizeAndCopyChars(JSContext * cx,Maybe<AtomSet::AddPtr> & zonePtr,CharT * chars,size_t length,const Maybe<uint32_t> & indexValue,const AtomHasher::Lookup & lookup)670 static JSAtom* PermanentlyAtomizeAndCopyChars(
671 JSContext* cx, Maybe<AtomSet::AddPtr>& zonePtr, CharT* chars, size_t length,
672 const Maybe<uint32_t>& indexValue, const AtomHasher::Lookup& lookup) {
673 return PermanentlyAtomizeAndCopyChars(
674 cx, zonePtr, const_cast<const CharT*>(chars), length, indexValue, lookup);
675 }
676
677 template <typename Chars>
AtomizeAndCopyCharsFromLookup(JSContext * cx,Chars chars,size_t length,const AtomHasher::Lookup & lookup,PinningBehavior pin,const Maybe<uint32_t> & indexValue)678 static MOZ_ALWAYS_INLINE JSAtom* AtomizeAndCopyCharsFromLookup(
679 JSContext* cx, Chars chars, size_t length, const AtomHasher::Lookup& lookup,
680 PinningBehavior pin, const Maybe<uint32_t>& indexValue) {
681 // Try the per-Zone cache first. If we find the atom there we can avoid the
682 // atoms lock, the markAtom call, and the multiple HashSet lookups below.
683 // We don't use the per-Zone cache if we want a pinned atom: handling that
684 // is more complicated and pinning atoms is relatively uncommon.
685 Zone* zone = cx->zone();
686 Maybe<AtomSet::AddPtr> zonePtr;
687 if (MOZ_LIKELY(zone && pin == DoNotPinAtom)) {
688 zonePtr.emplace(zone->atomCache().lookupForAdd(lookup));
689 if (zonePtr.ref()) {
690 // The cache is purged on GC so if we're in the middle of an
691 // incremental GC we should have barriered the atom when we put
692 // it in the cache.
693 JSAtom* atom = zonePtr.ref()->asPtrUnbarriered();
694 MOZ_ASSERT(AtomIsMarked(zone, atom));
695 return atom;
696 }
697 }
698
699 // This function can be called during initialization, while the permanent
700 // atoms table is being created. In this case all atoms created are added to
701 // the permanent atoms table.
702 if (!cx->permanentAtomsPopulated()) {
703 return PermanentlyAtomizeAndCopyChars(cx, zonePtr, chars, length,
704 indexValue, lookup);
705 }
706
707 AtomSet::Ptr pp = cx->permanentAtoms().readonlyThreadsafeLookup(lookup);
708 if (pp) {
709 JSAtom* atom = pp->asPtr(cx);
710 if (zonePtr && MOZ_UNLIKELY(!zone->atomCache().add(
711 *zonePtr, AtomStateEntry(atom, false)))) {
712 ReportOutOfMemory(cx);
713 return nullptr;
714 }
715
716 return atom;
717 }
718
719 // Validate the length before taking an atoms partition lock, as throwing an
720 // exception here may reenter this code.
721 if (MOZ_UNLIKELY(!JSString::validateLength(cx, length))) {
722 return nullptr;
723 }
724
725 JSAtom* atom = cx->atoms().atomizeAndCopyChars(cx, chars, length, pin,
726 indexValue, lookup);
727 if (!atom) {
728 return nullptr;
729 }
730
731 if (MOZ_UNLIKELY(!cx->atomMarking().inlinedMarkAtomFallible(cx, atom))) {
732 ReportOutOfMemory(cx);
733 return nullptr;
734 }
735
736 if (zonePtr && MOZ_UNLIKELY(!zone->atomCache().add(
737 *zonePtr, AtomStateEntry(atom, false)))) {
738 ReportOutOfMemory(cx);
739 return nullptr;
740 }
741
742 return atom;
743 }
744
745 template <typename Chars>
746 static MOZ_ALWAYS_INLINE JSAtom* AllocateNewAtom(
747 JSContext* cx, Chars chars, size_t length,
748 const Maybe<uint32_t>& indexValue, const AtomHasher::Lookup& lookup);
749
750 template <typename CharT, typename = std::enable_if_t<!std::is_const_v<CharT>>>
AllocateNewAtom(JSContext * cx,CharT * chars,size_t length,const Maybe<uint32_t> & indexValue,const AtomHasher::Lookup & lookup)751 static MOZ_ALWAYS_INLINE JSAtom* AllocateNewAtom(
752 JSContext* cx, CharT* chars, size_t length,
753 const Maybe<uint32_t>& indexValue, const AtomHasher::Lookup& lookup) {
754 return AllocateNewAtom(cx, const_cast<const CharT*>(chars), length,
755 indexValue, lookup);
756 }
757
758 template <typename Chars>
atomizeAndCopyChars(JSContext * cx,Chars chars,size_t length,PinningBehavior pin,const Maybe<uint32_t> & indexValue,const AtomHasher::Lookup & lookup)759 MOZ_ALWAYS_INLINE JSAtom* AtomsTable::atomizeAndCopyChars(
760 JSContext* cx, Chars chars, size_t length, PinningBehavior pin,
761 const Maybe<uint32_t>& indexValue, const AtomHasher::Lookup& lookup) {
762 Partition& part = *partitions[getPartitionIndex(lookup)];
763 AutoLock lock(cx->runtime(), part.lock);
764
765 AtomSet& atoms = part.atoms;
766 AtomSet* atomsAddedWhileSweeping = part.atomsAddedWhileSweeping;
767 AtomSet::AddPtr p;
768
769 if (!atomsAddedWhileSweeping) {
770 p = atoms.lookupForAdd(lookup);
771 } else {
772 // We're currently sweeping the main atoms table and all new atoms will
773 // be added to a secondary table. Check this first.
774 p = atomsAddedWhileSweeping->lookupForAdd(lookup);
775
776 // If that fails check the main table but check if any atom found there
777 // is dead.
778 if (!p) {
779 if (AtomSet::AddPtr p2 = atoms.lookupForAdd(lookup)) {
780 JSAtom* atom = p2->asPtrUnbarriered();
781 if (!IsAboutToBeFinalizedUnbarriered(&atom)) {
782 p = p2;
783 }
784 }
785 }
786 }
787
788 if (p) {
789 JSAtom* atom = p->asPtr(cx);
790 if (pin && !p->isPinned()) {
791 p->setPinned(true);
792 }
793 return atom;
794 }
795
796 JSAtom* atom = AllocateNewAtom(cx, chars, length, indexValue, lookup);
797 if (!atom) {
798 return nullptr;
799 }
800
801 // We have held the lock since looking up p, and the operations we've done
802 // since then can't GC; therefore the atoms table has not been modified and
803 // p is still valid.
804 AtomSet* addSet =
805 part.atomsAddedWhileSweeping ? part.atomsAddedWhileSweeping : &atoms;
806 if (MOZ_UNLIKELY(!addSet->add(p, AtomStateEntry(atom, bool(pin))))) {
807 ReportOutOfMemory(cx); /* SystemAllocPolicy does not report OOM. */
808 return nullptr;
809 }
810
811 return atom;
812 }
813
814 /* |chars| must not point into an inline or short string. */
815 template <typename CharT>
AtomizeAndCopyChars(JSContext * cx,const CharT * chars,size_t length,PinningBehavior pin,const Maybe<uint32_t> & indexValue)816 static MOZ_ALWAYS_INLINE JSAtom* AtomizeAndCopyChars(
817 JSContext* cx, const CharT* chars, size_t length, PinningBehavior pin,
818 const Maybe<uint32_t>& indexValue) {
819 if (JSAtom* s = cx->staticStrings().lookup(chars, length)) {
820 return s;
821 }
822
823 AtomHasher::Lookup lookup(chars, length);
824 return AtomizeAndCopyCharsFromLookup(cx, chars, length, lookup, pin,
825 indexValue);
826 }
827
828 template <typename Chars>
PermanentlyAtomizeAndCopyChars(JSContext * cx,Maybe<AtomSet::AddPtr> & zonePtr,Chars chars,size_t length,const Maybe<uint32_t> & indexValue,const AtomHasher::Lookup & lookup)829 static MOZ_NEVER_INLINE JSAtom* PermanentlyAtomizeAndCopyChars(
830 JSContext* cx, Maybe<AtomSet::AddPtr>& zonePtr, Chars chars, size_t length,
831 const Maybe<uint32_t>& indexValue, const AtomHasher::Lookup& lookup) {
832 MOZ_ASSERT(!cx->permanentAtomsPopulated());
833 MOZ_ASSERT(CurrentThreadCanAccessRuntime(cx->runtime()));
834
835 JSRuntime* rt = cx->runtime();
836 AtomSet& atoms = *rt->permanentAtomsDuringInit();
837 AtomSet::AddPtr p = atoms.lookupForAdd(lookup);
838 if (p) {
839 return p->asPtr(cx);
840 }
841
842 JSAtom* atom = AllocateNewAtom(cx, chars, length, indexValue, lookup);
843 if (!atom) {
844 return nullptr;
845 }
846
847 atom->morphIntoPermanentAtom();
848
849 // We are single threaded at this point, and the operations we've done since
850 // then can't GC; therefore the atoms table has not been modified and p is
851 // still valid.
852 if (!atoms.add(p, AtomStateEntry(atom, true))) {
853 ReportOutOfMemory(cx); /* SystemAllocPolicy does not report OOM. */
854 return nullptr;
855 }
856
857 if (zonePtr && MOZ_UNLIKELY(!cx->zone()->atomCache().add(
858 *zonePtr, AtomStateEntry(atom, false)))) {
859 ReportOutOfMemory(cx);
860 return nullptr;
861 }
862
863 return atom;
864 }
865
866 struct AtomizeUTF8CharsWrapper {
867 JS::UTF8Chars utf8;
868 JS::SmallestEncoding encoding;
869
AtomizeUTF8CharsWrapperAtomizeUTF8CharsWrapper870 AtomizeUTF8CharsWrapper(const JS::UTF8Chars& chars,
871 JS::SmallestEncoding minEncode)
872 : utf8(chars), encoding(minEncode) {}
873 };
874
875 // MakeLinearStringForAtomization has 4 variants.
876 // This is used by Latin1Char and char16_t.
877 template <typename CharT>
MakeLinearStringForAtomization(JSContext * cx,const CharT * chars,size_t length)878 static MOZ_ALWAYS_INLINE JSLinearString* MakeLinearStringForAtomization(
879 JSContext* cx, const CharT* chars, size_t length) {
880 return NewStringCopyN<NoGC>(cx, chars, length, gc::TenuredHeap);
881 }
882
883 // MakeLinearStringForAtomization has one further variant -- a non-template
884 // overload accepting LittleEndianChars.
MakeLinearStringForAtomization(JSContext * cx,LittleEndianChars chars,size_t length)885 static MOZ_ALWAYS_INLINE JSLinearString* MakeLinearStringForAtomization(
886 JSContext* cx, LittleEndianChars chars, size_t length) {
887 return NewStringFromLittleEndianNoGC(cx, chars, length, gc::TenuredHeap);
888 }
889
890 template <typename CharT>
MakeUTF8AtomHelper(JSContext * cx,const AtomizeUTF8CharsWrapper * chars,size_t length)891 static MOZ_ALWAYS_INLINE JSLinearString* MakeUTF8AtomHelper(
892 JSContext* cx, const AtomizeUTF8CharsWrapper* chars, size_t length) {
893 if (JSInlineString::lengthFits<CharT>(length)) {
894 CharT* storage;
895 JSInlineString* str =
896 AllocateInlineString<NoGC>(cx, length, &storage, gc::TenuredHeap);
897 if (!str) {
898 return nullptr;
899 }
900
901 InflateUTF8CharsToBufferAndTerminate(chars->utf8, storage, length,
902 chars->encoding);
903 return str;
904 }
905
906 // MakeAtomUTF8Helper is called from deep in the Atomization path, which
907 // expects functions to fail gracefully with nullptr on OOM, without throwing.
908 //
909 // Flat strings are null-terminated. Leave room with length + 1
910 UniquePtr<CharT[], JS::FreePolicy> newStr(
911 js_pod_arena_malloc<CharT>(js::StringBufferArena, length + 1));
912 if (!newStr) {
913 return nullptr;
914 }
915
916 InflateUTF8CharsToBufferAndTerminate(chars->utf8, newStr.get(), length,
917 chars->encoding);
918
919 return JSLinearString::new_<NoGC>(cx, std::move(newStr), length,
920 gc::TenuredHeap);
921 }
922
923 // Another 2 variants of MakeLinearStringForAtomization.
MakeLinearStringForAtomization(JSContext * cx,const AtomizeUTF8CharsWrapper * chars,size_t length)924 static MOZ_ALWAYS_INLINE JSLinearString* MakeLinearStringForAtomization(
925 JSContext* cx, const AtomizeUTF8CharsWrapper* chars, size_t length) {
926 if (length == 0) {
927 return cx->emptyString();
928 }
929
930 if (chars->encoding == JS::SmallestEncoding::UTF16) {
931 return MakeUTF8AtomHelper<char16_t>(cx, chars, length);
932 }
933 return MakeUTF8AtomHelper<JS::Latin1Char>(cx, chars, length);
934 }
935
936 template <typename Chars>
AllocateNewAtom(JSContext * cx,Chars chars,size_t length,const Maybe<uint32_t> & indexValue,const AtomHasher::Lookup & lookup)937 static MOZ_ALWAYS_INLINE JSAtom* AllocateNewAtom(
938 JSContext* cx, Chars chars, size_t length,
939 const Maybe<uint32_t>& indexValue, const AtomHasher::Lookup& lookup) {
940 AutoAllocInAtomsZone ac(cx);
941
942 JSLinearString* linear = MakeLinearStringForAtomization(cx, chars, length);
943 if (!linear) {
944 // Grudgingly forgo last-ditch GC. The alternative would be to release
945 // the lock, manually GC here, and retry from the top.
946 ReportOutOfMemory(cx);
947 return nullptr;
948 }
949
950 JSAtom* atom = linear->morphAtomizedStringIntoAtom(lookup.hash);
951 MOZ_ASSERT(atom->hash() == lookup.hash);
952
953 if (indexValue) {
954 atom->setIsIndex(*indexValue);
955 } else {
956 // We need to call isIndexSlow directly to avoid the flag check in isIndex,
957 // because we still have to initialize that flag.
958 uint32_t index;
959 if (atom->isIndexSlow(&index)) {
960 atom->setIsIndex(index);
961 }
962 }
963
964 return atom;
965 }
966
AtomizeString(JSContext * cx,JSString * str,js::PinningBehavior pin)967 JSAtom* js::AtomizeString(JSContext* cx, JSString* str,
968 js::PinningBehavior pin /* = js::DoNotPinAtom */) {
969 if (str->isAtom()) {
970 JSAtom& atom = str->asAtom();
971 /* N.B. static atoms are effectively always interned. */
972 if (pin == PinAtom && !atom.isPermanentAtom()) {
973 cx->runtime()->atoms().maybePinExistingAtom(cx, &atom);
974 }
975
976 return &atom;
977 }
978
979 JSLinearString* linear = str->ensureLinear(cx);
980 if (!linear) {
981 return nullptr;
982 }
983
984 if (cx->isMainThreadContext() && pin == DoNotPinAtom) {
985 if (JSAtom* atom = cx->caches().stringToAtomCache.lookup(linear)) {
986 return atom;
987 }
988 }
989
990 Maybe<uint32_t> indexValue;
991 if (str->hasIndexValue()) {
992 indexValue.emplace(str->getIndexValue());
993 }
994
995 JS::AutoCheckCannotGC nogc;
996 JSAtom* atom = linear->hasLatin1Chars()
997 ? AtomizeAndCopyChars(cx, linear->latin1Chars(nogc),
998 linear->length(), pin, indexValue)
999 : AtomizeAndCopyChars(cx, linear->twoByteChars(nogc),
1000 linear->length(), pin, indexValue);
1001 if (!atom) {
1002 return nullptr;
1003 }
1004
1005 if (cx->isMainThreadContext() && pin == DoNotPinAtom) {
1006 cx->caches().stringToAtomCache.maybePut(linear, atom);
1007 }
1008
1009 return atom;
1010 }
1011
AtomIsPinned(JSContext * cx,JSAtom * atom)1012 bool js::AtomIsPinned(JSContext* cx, JSAtom* atom) {
1013 JSRuntime* rt = cx->runtime();
1014 return rt->atoms().atomIsPinned(rt, atom);
1015 }
1016
atomIsPinned(JSRuntime * rt,JSAtom * atom)1017 bool AtomsTable::atomIsPinned(JSRuntime* rt, JSAtom* atom) {
1018 MOZ_ASSERT(atom);
1019
1020 if (atom->isPermanentAtom()) {
1021 return true;
1022 }
1023
1024 AtomHasher::Lookup lookup(atom);
1025
1026 AtomsTable::Partition& part = *partitions[getPartitionIndex(lookup)];
1027 AtomsTable::AutoLock lock(rt, part.lock);
1028 AtomSet::Ptr p = part.atoms.lookup(lookup);
1029 if (!p && part.atomsAddedWhileSweeping) {
1030 p = part.atomsAddedWhileSweeping->lookup(lookup);
1031 }
1032
1033 MOZ_ASSERT(p); // Non-permanent atoms must exist in atoms table.
1034 MOZ_ASSERT(p->asPtrUnbarriered() == atom);
1035
1036 return p->isPinned();
1037 }
1038
maybePinExistingAtom(JSContext * cx,JSAtom * atom)1039 void AtomsTable::maybePinExistingAtom(JSContext* cx, JSAtom* atom) {
1040 MOZ_ASSERT(atom);
1041 MOZ_ASSERT(!atom->isPermanentAtom());
1042
1043 AtomHasher::Lookup lookup(atom);
1044
1045 AtomsTable::Partition& part = *partitions[getPartitionIndex(lookup)];
1046 AtomsTable::AutoLock lock(cx->runtime(), part.lock);
1047 AtomSet::Ptr p = part.atoms.lookup(lookup);
1048 if (!p && part.atomsAddedWhileSweeping) {
1049 p = part.atomsAddedWhileSweeping->lookup(lookup);
1050 }
1051
1052 MOZ_ASSERT(p); // Non-permanent atoms must exist in atoms table.
1053 MOZ_ASSERT(p->asPtrUnbarriered() == atom);
1054
1055 p->setPinned(true);
1056 }
1057
Atomize(JSContext * cx,const char * bytes,size_t length,PinningBehavior pin,const Maybe<uint32_t> & indexValue)1058 JSAtom* js::Atomize(JSContext* cx, const char* bytes, size_t length,
1059 PinningBehavior pin, const Maybe<uint32_t>& indexValue) {
1060 const Latin1Char* chars = reinterpret_cast<const Latin1Char*>(bytes);
1061 return AtomizeAndCopyChars(cx, chars, length, pin, indexValue);
1062 }
1063
Atomize(JSContext * cx,HashNumber hash,const char * bytes,size_t length,PinningBehavior pin)1064 JSAtom* js::Atomize(JSContext* cx, HashNumber hash, const char* bytes,
1065 size_t length, PinningBehavior pin) {
1066 const Latin1Char* chars = reinterpret_cast<const Latin1Char*>(bytes);
1067 if (JSAtom* s = cx->staticStrings().lookup(chars, length)) {
1068 return s;
1069 }
1070
1071 AtomHasher::Lookup lookup(hash, chars, length);
1072 return AtomizeAndCopyCharsFromLookup(cx, chars, length, lookup, pin,
1073 Nothing());
1074 }
1075
1076 template <typename CharT>
AtomizeChars(JSContext * cx,const CharT * chars,size_t length,PinningBehavior pin)1077 JSAtom* js::AtomizeChars(JSContext* cx, const CharT* chars, size_t length,
1078 PinningBehavior pin) {
1079 return AtomizeAndCopyChars(cx, chars, length, pin, Nothing());
1080 }
1081
1082 template JSAtom* js::AtomizeChars(JSContext* cx, const Latin1Char* chars,
1083 size_t length, PinningBehavior pin);
1084
1085 template JSAtom* js::AtomizeChars(JSContext* cx, const char16_t* chars,
1086 size_t length, PinningBehavior pin);
1087
1088 /* |chars| must not point into an inline or short string. */
1089 template <typename CharT>
AtomizeChars(JSContext * cx,HashNumber hash,const CharT * chars,size_t length)1090 JSAtom* js::AtomizeChars(JSContext* cx, HashNumber hash, const CharT* chars,
1091 size_t length) {
1092 if (JSAtom* s = cx->staticStrings().lookup(chars, length)) {
1093 return s;
1094 }
1095
1096 AtomHasher::Lookup lookup(hash, chars, length);
1097 return AtomizeAndCopyCharsFromLookup(
1098 cx, chars, length, lookup, PinningBehavior::DoNotPinAtom, Nothing());
1099 }
1100
1101 template JSAtom* js::AtomizeChars(JSContext* cx, HashNumber hash,
1102 const Latin1Char* chars, size_t length);
1103
1104 template JSAtom* js::AtomizeChars(JSContext* cx, HashNumber hash,
1105 const char16_t* chars, size_t length);
1106
AtomizeUTF8Chars(JSContext * cx,const char * utf8Chars,size_t utf8ByteLength)1107 JSAtom* js::AtomizeUTF8Chars(JSContext* cx, const char* utf8Chars,
1108 size_t utf8ByteLength) {
1109 {
1110 // Permanent atoms,|JSRuntime::atoms_|, and static strings are disjoint
1111 // sets. |AtomizeAndCopyCharsFromLookup| only consults the first two sets,
1112 // so we must map any static strings ourselves. See bug 1575947.
1113 StaticStrings& statics = cx->staticStrings();
1114
1115 // Handle all pure-ASCII UTF-8 static strings.
1116 if (JSAtom* s = statics.lookup(utf8Chars, utf8ByteLength)) {
1117 return s;
1118 }
1119
1120 // The only non-ASCII static strings are the single-code point strings
1121 // U+0080 through U+00FF, encoded as
1122 //
1123 // 0b1100'00xx 0b10xx'xxxx
1124 //
1125 // where the encoded code point is the concatenation of the 'x' bits -- and
1126 // where the highest 'x' bit is necessarily 1 (because U+0080 through U+00FF
1127 // all contain an 0x80 bit).
1128 if (utf8ByteLength == 2) {
1129 auto first = static_cast<uint8_t>(utf8Chars[0]);
1130 if ((first & 0b1111'1110) == 0b1100'0010) {
1131 auto second = static_cast<uint8_t>(utf8Chars[1]);
1132 if (mozilla::IsTrailingUnit(mozilla::Utf8Unit(second))) {
1133 uint8_t unit =
1134 static_cast<uint8_t>(first << 6) | (second & 0b0011'1111);
1135
1136 MOZ_ASSERT(StaticStrings::hasUnit(unit));
1137 return statics.getUnit(unit);
1138 }
1139 }
1140
1141 // Fallthrough code handles the cases where the two units aren't a Latin-1
1142 // code point or are invalid.
1143 }
1144 }
1145
1146 size_t length;
1147 HashNumber hash;
1148 JS::SmallestEncoding forCopy;
1149 JS::UTF8Chars utf8(utf8Chars, utf8ByteLength);
1150 if (!GetUTF8AtomizationData(cx, utf8, &length, &forCopy, &hash)) {
1151 return nullptr;
1152 }
1153
1154 AtomizeUTF8CharsWrapper chars(utf8, forCopy);
1155 AtomHasher::Lookup lookup(utf8Chars, utf8ByteLength, length, hash);
1156 return AtomizeAndCopyCharsFromLookup(cx, &chars, length, lookup, DoNotPinAtom,
1157 Nothing());
1158 }
1159
IndexToIdSlow(JSContext * cx,uint32_t index,MutableHandleId idp)1160 bool js::IndexToIdSlow(JSContext* cx, uint32_t index, MutableHandleId idp) {
1161 MOZ_ASSERT(index > JSID_INT_MAX);
1162
1163 char16_t buf[UINT32_CHAR_BUFFER_LENGTH];
1164 RangedPtr<char16_t> end(std::end(buf), buf, std::end(buf));
1165 RangedPtr<char16_t> start = BackfillIndexInCharBuffer(index, end);
1166
1167 JSAtom* atom = AtomizeChars(cx, start.get(), end - start);
1168 if (!atom) {
1169 return false;
1170 }
1171
1172 idp.set(JS::PropertyKey::fromNonIntAtom(atom));
1173 return true;
1174 }
1175
1176 template <AllowGC allowGC>
ToAtomSlow(JSContext * cx,typename MaybeRooted<Value,allowGC>::HandleType arg)1177 static JSAtom* ToAtomSlow(
1178 JSContext* cx, typename MaybeRooted<Value, allowGC>::HandleType arg) {
1179 MOZ_ASSERT(!arg.isString());
1180
1181 Value v = arg;
1182 if (!v.isPrimitive()) {
1183 MOZ_ASSERT(!cx->isHelperThreadContext());
1184 if (!allowGC) {
1185 return nullptr;
1186 }
1187 RootedValue v2(cx, v);
1188 if (!ToPrimitive(cx, JSTYPE_STRING, &v2)) {
1189 return nullptr;
1190 }
1191 v = v2;
1192 }
1193
1194 if (v.isString()) {
1195 JSAtom* atom = AtomizeString(cx, v.toString());
1196 if (!allowGC && !atom) {
1197 cx->recoverFromOutOfMemory();
1198 }
1199 return atom;
1200 }
1201 if (v.isInt32()) {
1202 JSAtom* atom = Int32ToAtom(cx, v.toInt32());
1203 if (!allowGC && !atom) {
1204 cx->recoverFromOutOfMemory();
1205 }
1206 return atom;
1207 }
1208 if (v.isDouble()) {
1209 JSAtom* atom = NumberToAtom(cx, v.toDouble());
1210 if (!allowGC && !atom) {
1211 cx->recoverFromOutOfMemory();
1212 }
1213 return atom;
1214 }
1215 if (v.isBoolean()) {
1216 return v.toBoolean() ? cx->names().true_ : cx->names().false_;
1217 }
1218 if (v.isNull()) {
1219 return cx->names().null;
1220 }
1221 if (v.isSymbol()) {
1222 MOZ_ASSERT(!cx->isHelperThreadContext());
1223 if (allowGC) {
1224 JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
1225 JSMSG_SYMBOL_TO_STRING);
1226 }
1227 return nullptr;
1228 }
1229 if (v.isBigInt()) {
1230 RootedBigInt i(cx, v.toBigInt());
1231 JSAtom* atom = BigIntToAtom<allowGC>(cx, i);
1232 if (!allowGC && !atom) {
1233 cx->recoverFromOutOfMemory();
1234 }
1235 return atom;
1236 }
1237 MOZ_ASSERT(v.isUndefined());
1238 return cx->names().undefined;
1239 }
1240
1241 template <AllowGC allowGC>
ToAtom(JSContext * cx,typename MaybeRooted<Value,allowGC>::HandleType v)1242 JSAtom* js::ToAtom(JSContext* cx,
1243 typename MaybeRooted<Value, allowGC>::HandleType v) {
1244 if (!v.isString()) {
1245 return ToAtomSlow<allowGC>(cx, v);
1246 }
1247
1248 JSString* str = v.toString();
1249 if (str->isAtom()) {
1250 return &str->asAtom();
1251 }
1252
1253 JSAtom* atom = AtomizeString(cx, str);
1254 if (!atom && !allowGC) {
1255 MOZ_ASSERT_IF(!cx->isHelperThreadContext(), cx->isThrowingOutOfMemory());
1256 cx->recoverFromOutOfMemory();
1257 }
1258 return atom;
1259 }
1260
1261 template JSAtom* js::ToAtom<CanGC>(JSContext* cx, HandleValue v);
1262
1263 template JSAtom* js::ToAtom<NoGC>(JSContext* cx, const Value& v);
1264
AtomizeLittleEndianTwoByteChars(JSContext * cx,const uint8_t * leTwoByte,size_t length)1265 static JSAtom* AtomizeLittleEndianTwoByteChars(JSContext* cx,
1266 const uint8_t* leTwoByte,
1267 size_t length) {
1268 LittleEndianChars chars(leTwoByte);
1269
1270 if (JSAtom* s = cx->staticStrings().lookup(chars, length)) {
1271 return s;
1272 }
1273
1274 AtomHasher::Lookup lookup(chars, length);
1275 return AtomizeAndCopyCharsFromLookup(cx, chars, length, lookup, DoNotPinAtom,
1276 Nothing());
1277 }
1278
1279 template <XDRMode mode>
XDRAtomOrNull(XDRState<mode> * xdr,MutableHandleAtom atomp)1280 XDRResult js::XDRAtomOrNull(XDRState<mode>* xdr, MutableHandleAtom atomp) {
1281 uint8_t isNull = false;
1282 if (mode == XDR_ENCODE) {
1283 if (!atomp) {
1284 isNull = true;
1285 }
1286 }
1287
1288 MOZ_TRY(xdr->codeUint8(&isNull));
1289
1290 if (!isNull) {
1291 MOZ_TRY(XDRAtom(xdr, atomp));
1292 } else if (mode == XDR_DECODE) {
1293 atomp.set(nullptr);
1294 }
1295
1296 return Ok();
1297 }
1298
1299 template XDRResult js::XDRAtomOrNull(XDRState<XDR_DECODE>* xdr,
1300 MutableHandleAtom atomp);
1301
1302 template XDRResult js::XDRAtomOrNull(XDRState<XDR_ENCODE>* xdr,
1303 MutableHandleAtom atomp);
1304
1305 template <XDRMode mode>
XDRAtom(XDRState<mode> * xdr,MutableHandleAtom atomp)1306 XDRResult js::XDRAtom(XDRState<mode>* xdr, MutableHandleAtom atomp) {
1307 bool latin1 = false;
1308 uint32_t length = 0;
1309 uint32_t lengthAndEncoding = 0;
1310
1311 if (mode == XDR_ENCODE) {
1312 JS::AutoCheckCannotGC nogc;
1313 static_assert(JSString::MAX_LENGTH <= INT32_MAX,
1314 "String length must fit in 31 bits");
1315 latin1 = atomp->hasLatin1Chars();
1316 length = atomp->length();
1317 lengthAndEncoding = (length << 1) | uint32_t(latin1);
1318 MOZ_TRY(xdr->codeUint32(&lengthAndEncoding));
1319 if (latin1) {
1320 return xdr->codeChars(
1321 const_cast<JS::Latin1Char*>(atomp->latin1Chars(nogc)), length);
1322 }
1323 return xdr->codeChars(const_cast<char16_t*>(atomp->twoByteChars(nogc)),
1324 length);
1325 }
1326
1327 MOZ_ASSERT(mode == XDR_DECODE);
1328 /* Avoid JSString allocation for already existing atoms. See bug 321985. */
1329 JSContext* cx = xdr->cx();
1330 JSAtom* atom = nullptr;
1331 MOZ_TRY(xdr->codeUint32(&lengthAndEncoding));
1332 length = lengthAndEncoding >> 1;
1333 latin1 = lengthAndEncoding & 0x1;
1334
1335 if (latin1) {
1336 const Latin1Char* chars = nullptr;
1337 if (length) {
1338 const uint8_t* ptr;
1339 size_t nbyte = length * sizeof(Latin1Char);
1340 MOZ_TRY(xdr->readData(&ptr, nbyte));
1341 chars = reinterpret_cast<const Latin1Char*>(ptr);
1342 }
1343 atom = AtomizeChars(cx, chars, length);
1344 } else {
1345 const uint8_t* twoByteCharsLE = nullptr;
1346 if (length) {
1347 size_t nbyte = length * sizeof(char16_t);
1348 MOZ_TRY(xdr->readData(&twoByteCharsLE, nbyte));
1349 }
1350 atom = AtomizeLittleEndianTwoByteChars(cx, twoByteCharsLE, length);
1351 }
1352
1353 if (!atom) {
1354 return xdr->fail(JS::TranscodeResult::Throw);
1355 }
1356 atomp.set(atom);
1357 return Ok();
1358 }
1359
1360 template XDRResult js::XDRAtom(XDRState<XDR_DECODE>* xdr,
1361 MutableHandleAtom atomp);
1362
1363 template XDRResult js::XDRAtom(XDRState<XDR_ENCODE>* xdr,
1364 MutableHandleAtom atomp);
1365
ClassName(JSProtoKey key,JSContext * cx)1366 Handle<PropertyName*> js::ClassName(JSProtoKey key, JSContext* cx) {
1367 return ClassName(key, cx->names());
1368 }
1369
AutoLockAllAtoms(JSRuntime * rt)1370 js::AutoLockAllAtoms::AutoLockAllAtoms(JSRuntime* rt) : runtime(rt) {
1371 MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime));
1372 if (runtime->hasHelperThreadZones()) {
1373 runtime->atoms().lockAll();
1374 }
1375 }
1376
~AutoLockAllAtoms()1377 js::AutoLockAllAtoms::~AutoLockAllAtoms() {
1378 MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime));
1379 if (runtime->hasHelperThreadZones()) {
1380 runtime->atoms().unlockAll();
1381 }
1382 }
1383