tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

AtomMarking.cpp (18680B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "gc/AtomMarking-inl.h"
      8 
      9 #include <type_traits>
     10 
     11 #include "gc/GCLock.h"
     12 #include "gc/PublicIterators.h"
     13 
     14 #include "gc/GC-inl.h"
     15 #include "gc/Heap-inl.h"
     16 #include "gc/PrivateIterators-inl.h"
     17 
     18 namespace js {
     19 namespace gc {
     20 
     21 // [SMDOC] GC Atom Marking
     22 //
     23 // Things in the atoms zone (which includes atomized strings, symbols, and other
     24 // things, all of which we will refer to as 'atoms' here) may be pointed to
     25 // freely by things in other zones. To avoid the need to perform garbage
     26 // collections of the entire runtime to collect atoms, we compute a separate
     27 // atom mark bitmap for each zone that is always an overapproximation of the
     28 // atoms that zone is using. When an atom is not in the mark bitmap for any
     29 // zone, it can be destroyed.
     30 //
     31 // (These bitmaps can only be calculated exactly if we collect a single zone
     32 // since they are based on the marking state at the end of a GC which may have
     33 // marked multiple zones.)
     34 //
     35 // To minimize interference with the rest of the GC, atom marking and sweeping
     36 // is done by manipulating the mark bitmaps in the chunks used for the atoms.
     37 // When the atoms zone is being collected, the mark bitmaps for the chunk(s)
     38 // used by the atoms are updated normally during marking.
     39 //
     40 // After marking has finished and before sweeping begins, two things happen:
     41 //
     42 //  1) The atom marking bitmaps for collected zones are updated to remove atoms
     43 //     that GC marking has found are not referenced by any collected zone (see
     44 //     refineZoneBitmapsForCollectedZones). This improves our approximation.
     45 //
     46 //  2) The chunk mark bitmaps are updated with any atoms that might be
     47 //     referenced by zones which weren't collected (see
     48 //     markAtomsUsedByUncollectedZones).
     49 //
     50 // GC sweeping will then release all atoms which are not marked by any zone.
     51 //
     52 // The representation of atom mark bitmaps is as follows:
     53 //
     54 // Each arena in the atoms zone has an atomBitmapStart() value indicating the
     55 // word index into the bitmap of the first thing in the arena. Each arena uses
     56 // ArenaBitmapWords of data to store its bitmap, which uses the same
     57 // representation as chunk mark bitmaps: at least two bits per cell (see
     58 // CellBytesPerMarkBit and MarkBitsPerCell).
     59 
     60 size_t AtomMarkingRuntime::allocateIndex(GCRuntime* gc) {
     61  // We need to find a range of bits from the atoms bitmap for this arena.
     62 
     63  // Try to merge background swept free indexes if necessary.
     64  if (freeArenaIndexes.ref().empty()) {
     65    mergePendingFreeArenaIndexes(gc);
     66  }
     67 
     68  // Look for a free range of bits compatible with this arena.
     69  if (!freeArenaIndexes.ref().empty()) {
     70    return freeArenaIndexes.ref().popCopy();
     71  }
     72 
     73  // Allocate a range of bits from the end for this arena.
     74  size_t index = allocatedWords;
     75  allocatedWords += ArenaBitmapWords;
     76  return index;
     77 }
     78 
     79 void AtomMarkingRuntime::freeIndex(size_t index, const AutoLockGC& lock) {
     80  MOZ_ASSERT((index % ArenaBitmapWords) == 0);
     81  MOZ_ASSERT(index < allocatedWords);
     82 
     83  bool wasEmpty = pendingFreeArenaIndexes.ref().empty();
     84  MOZ_ASSERT_IF(wasEmpty, !hasPendingFreeArenaIndexes);
     85 
     86  if (!pendingFreeArenaIndexes.ref().append(index)) {
     87    // Leak these atom bits if we run out of memory.
     88    return;
     89  }
     90 
     91  if (wasEmpty) {
     92    hasPendingFreeArenaIndexes = true;
     93  }
     94 }
     95 
     96 void AtomMarkingRuntime::mergePendingFreeArenaIndexes(GCRuntime* gc) {
     97  MOZ_ASSERT(CurrentThreadCanAccessRuntime(gc->rt));
     98  if (!hasPendingFreeArenaIndexes) {
     99    return;
    100  }
    101 
    102  AutoLockGC lock(gc);
    103  MOZ_ASSERT(!pendingFreeArenaIndexes.ref().empty());
    104 
    105  hasPendingFreeArenaIndexes = false;
    106 
    107  if (freeArenaIndexes.ref().empty()) {
    108    std::swap(freeArenaIndexes.ref(), pendingFreeArenaIndexes.ref());
    109    return;
    110  }
    111 
    112  // Leak these atom bits if we run out of memory.
    113  (void)freeArenaIndexes.ref().appendAll(pendingFreeArenaIndexes.ref());
    114  pendingFreeArenaIndexes.ref().clear();
    115 }
    116 
    117 // Return whether more than one zone is being collected.
    118 static bool MultipleNonAtomZonesAreBeingCollected(GCRuntime* gc) {
    119  size_t count = 0;
    120  for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
    121    if (!zone->isAtomsZone()) {
    122      count++;
    123      if (count == 2) {
    124        return true;
    125      }
    126    }
    127  }
    128 
    129  return false;
    130 }
    131 
    132 void AtomMarkingRuntime::refineZoneBitmapsForCollectedZones(GCRuntime* gc) {
    133  // If there is more than one zone to update, it's more efficient to copy the
    134  // chunk mark bits from each arena into a single dense bitmap and then use
    135  // that to refine the atom marking bitmap for each zone.
    136  DenseBitmap marked;
    137  if (MultipleNonAtomZonesAreBeingCollected(gc) &&
    138      computeBitmapFromChunkMarkBits(gc, marked)) {
    139    for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
    140      if (!zone->isAtomsZone()) {
    141        refineZoneBitmapForCollectedZone(zone, marked);
    142      }
    143    }
    144    return;
    145  }
    146 
    147  // If there's only one zone (or on OOM), refine the mark bits for each arena
    148  // with the zones' atom marking bitmaps directly.
    149  for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
    150    if (!zone->isAtomsZone()) {
    151      for (auto thingKind : AllAllocKinds()) {
    152        for (ArenaIterInGC aiter(gc->atomsZone(), thingKind); !aiter.done();
    153             aiter.next()) {
    154          refineZoneBitmapForCollectedZone(zone, aiter);
    155        }
    156      }
    157    }
    158  }
    159 }
    160 
    161 // Refining atom marking bitmaps:
    162 //
    163 // The atom marking bitmap for a zone records an overapproximation of the mark
    164 // color for each atom referenced by that zone. After collection we refine this
    165 // based on the actual final mark state. The final mark state is the maximum of
    166 // the mark colours of all references for each atom. Therefore we refine the
    167 // bitmap by setting it to the minimum of itself and the actual mark state for
    168 // each atom.
    169 //
    170 // To find the minimum we use bitwise AND. For trace kinds that can only be
    171 // marked black this works on its own. For kinds that can be marked gray we must
    172 // preprocess the mark bitmap so that both mark bits are set for black cells.
    173 
    174 // Masks of bit positions in a mark bitmap word that can be ColorBit::BlackBit
    175 // or GrayOrBlackBit, which alternative throughout the word.
    176 #if JS_BITS_PER_WORD == 32
    177 static constexpr uintptr_t BlackBitMask = 0x55555555;
    178 #else
    179 static constexpr uintptr_t BlackBitMask = 0x5555555555555555;
    180 #endif
    181 static constexpr uintptr_t GrayOrBlackBitMask = ~BlackBitMask;
    182 
    183 static void PropagateBlackBitsToGrayOrBlackBits(DenseBitmap& bitmap,
    184                                                Arena* arena) {
    185  // This only works if the gray bit and black bits are in the same word,
    186  // which is true for symbols.
    187  MOZ_ASSERT(
    188      TraceKindCanBeMarkedGray(MapAllocToTraceKind(arena->getAllocKind())));
    189  MOZ_ASSERT((arena->getThingSize() / CellBytesPerMarkBit) % 2 == 0);
    190 
    191  bitmap.forEachWord(
    192      arena->atomBitmapStart(), ArenaBitmapWords,
    193      [](uintptr_t& word) { word |= (word & BlackBitMask) << 1; });
    194 }
    195 
    196 static void PropagateBlackBitsToGrayOrBlackBits(
    197    uintptr_t (&words)[ArenaBitmapWords]) {
    198  for (size_t i = 0; i < ArenaBitmapWords; i++) {
    199    words[i] |= (words[i] & BlackBitMask) << 1;
    200  }
    201 }
    202 
    203 static void PropagateGrayOrBlackBitsToBlackBits(SparseBitmap& bitmap,
    204                                                Arena* arena) {
    205  // This only works if the gray bit and black bits are in the same word,
    206  // which is true for symbols.
    207  MOZ_ASSERT(
    208      TraceKindCanBeMarkedGray(MapAllocToTraceKind(arena->getAllocKind())));
    209  MOZ_ASSERT((arena->getThingSize() / CellBytesPerMarkBit) % 2 == 0);
    210 
    211  bitmap.forEachWord(
    212      arena->atomBitmapStart(), ArenaBitmapWords,
    213      [](uintptr_t& word) { word |= (word & GrayOrBlackBitMask) >> 1; });
    214 }
    215 
    216 #ifdef DEBUG
    217 static bool ArenaContainsGrayCells(Arena* arena) {
    218  for (ArenaCellIter cell(arena); !cell.done(); cell.next()) {
    219    if (cell->isMarkedGray()) {
    220      return true;
    221    }
    222  }
    223  return false;
    224 }
    225 #endif
    226 
    227 bool AtomMarkingRuntime::computeBitmapFromChunkMarkBits(GCRuntime* gc,
    228                                                        DenseBitmap& bitmap) {
    229  MOZ_ASSERT(CurrentThreadIsPerformingGC());
    230 
    231  if (!bitmap.ensureSpace(allocatedWords)) {
    232    return false;
    233  }
    234 
    235  Zone* atomsZone = gc->atomsZone();
    236  for (auto thingKind : AllAllocKinds()) {
    237    for (ArenaIterInGC aiter(atomsZone, thingKind); !aiter.done();
    238         aiter.next()) {
    239      Arena* arena = aiter.get();
    240      AtomicBitmapWord* chunkWords = arena->chunk()->markBits.arenaBits(arena);
    241      bitmap.copyBitsFrom(arena->atomBitmapStart(), ArenaBitmapWords,
    242                          chunkWords);
    243 
    244      if (thingKind == AllocKind::JITCODE) {
    245        // The atoms zone can contain JitCode used for compiled self-hosted JS,
    246        // however these cells are never marked gray so we can skip this step.
    247        MOZ_ASSERT(!ArenaContainsGrayCells(arena));
    248      } else if (TraceKindCanBeMarkedGray(MapAllocToTraceKind(thingKind))) {
    249        // Ensure both mark bits are set for black cells so we can compute the
    250        // minimum of each mark color by bitwise AND.
    251        PropagateBlackBitsToGrayOrBlackBits(bitmap, arena);
    252      }
    253    }
    254  }
    255 
    256  return true;
    257 }
    258 
    259 void AtomMarkingRuntime::refineZoneBitmapForCollectedZone(
    260    Zone* zone, const DenseBitmap& bitmap) {
    261  MOZ_ASSERT(zone->isCollectingFromAnyThread());
    262  MOZ_ASSERT(!zone->isAtomsZone());
    263 
    264  // Take the bitwise and between the two mark bitmaps to get the best new
    265  // overapproximation we can. |bitmap| might include bits that are not in
    266  // the zone's mark bitmap, if additional zones were collected by the GC.
    267  zone->markedAtoms().bitwiseAndWith(bitmap);
    268 }
    269 
    270 void AtomMarkingRuntime::refineZoneBitmapForCollectedZone(Zone* zone,
    271                                                          Arena* arena) {
    272  MOZ_ASSERT(zone->isCollectingFromAnyThread());
    273  MOZ_ASSERT(!zone->isAtomsZone());
    274 
    275  AtomicBitmapWord* chunkWords = arena->chunk()->markBits.arenaBits(arena);
    276 
    277  AllocKind kind = arena->getAllocKind();
    278  if (kind == AllocKind::JITCODE) {
    279    // The atoms zone can contain JitCode used for compiled self-hosted JS,
    280    // however these cells are never marked gray so we can skip this step.
    281    MOZ_ASSERT(!ArenaContainsGrayCells(arena));
    282  } else if (TraceKindCanBeMarkedGray(MapAllocToTraceKind(kind))) {
    283    uintptr_t words[ArenaBitmapWords];
    284    memcpy(words, chunkWords, sizeof(words));
    285    PropagateBlackBitsToGrayOrBlackBits(words);
    286    zone->markedAtoms().bitwiseAndRangeWith(arena->atomBitmapStart(),
    287                                            ArenaBitmapWords, words);
    288    return;
    289  }
    290 
    291  zone->markedAtoms().bitwiseAndRangeWith(arena->atomBitmapStart(),
    292                                          ArenaBitmapWords, chunkWords);
    293 }
    294 
    295 // Set any bits in the chunk mark bitmaps for atoms which are marked in bitmap.
    296 template <typename Bitmap>
    297 static void BitwiseOrIntoChunkMarkBits(Zone* atomsZone, Bitmap& bitmap) {
    298  // Make sure that by copying the mark bits for one arena in word sizes we
    299  // do not affect the mark bits for other arenas.
    300  static_assert(ArenaBitmapBits == ArenaBitmapWords * JS_BITS_PER_WORD,
    301                "ArenaBitmapWords must evenly divide ArenaBitmapBits");
    302 
    303  for (auto thingKind : AllAllocKinds()) {
    304    for (ArenaIterInGC aiter(atomsZone, thingKind); !aiter.done();
    305         aiter.next()) {
    306      Arena* arena = aiter.get();
    307      AtomicBitmapWord* chunkWords = arena->chunk()->markBits.arenaBits(arena);
    308      bitmap.bitwiseOrRangeInto(arena->atomBitmapStart(), ArenaBitmapWords,
    309                                chunkWords);
    310    }
    311  }
    312 }
    313 
    314 UniquePtr<DenseBitmap> AtomMarkingRuntime::getOrMarkAtomsUsedByUncollectedZones(
    315    GCRuntime* gc) {
    316  MOZ_ASSERT(CurrentThreadIsPerformingGC());
    317 
    318  UniquePtr<DenseBitmap> markedUnion = MakeUnique<DenseBitmap>();
    319  if (!markedUnion || !markedUnion->ensureSpace(allocatedWords)) {
    320    // On failure, mark the atoms immediately.
    321    for (ZonesIter zone(gc, SkipAtoms); !zone.done(); zone.next()) {
    322      if (!zone->isCollecting()) {
    323        BitwiseOrIntoChunkMarkBits(gc->atomsZone(), zone->markedAtoms());
    324      }
    325    }
    326    return nullptr;
    327  }
    328 
    329  for (ZonesIter zone(gc, SkipAtoms); !zone.done(); zone.next()) {
    330    if (!zone->isCollecting()) {
    331      zone->markedAtoms().bitwiseOrInto(*markedUnion);
    332    }
    333  }
    334 
    335  return markedUnion;
    336 }
    337 
    338 void AtomMarkingRuntime::markAtomsUsedByUncollectedZones(
    339    GCRuntime* gc, UniquePtr<DenseBitmap> markedUnion) {
    340  BitwiseOrIntoChunkMarkBits(gc->atomsZone(), *markedUnion);
    341 }
    342 
    343 void AtomMarkingRuntime::unmarkAllGrayReferences(GCRuntime* gc) {
    344  for (ZonesIter sourceZone(gc, SkipAtoms); !sourceZone.done();
    345       sourceZone.next()) {
    346    MOZ_ASSERT(!sourceZone->isAtomsZone());
    347    auto& bitmap = sourceZone->markedAtoms();
    348    for (ArenaIter arena(gc->atomsZone(), AllocKind::SYMBOL); !arena.done();
    349         arena.next()) {
    350      PropagateGrayOrBlackBitsToBlackBits(bitmap, arena);
    351    }
    352 #ifdef DEBUG
    353    for (auto cell = gc->atomsZone()->cellIter<JS::Symbol>(); !cell.done();
    354         cell.next()) {
    355      MOZ_ASSERT(getAtomMarkColor(sourceZone, cell.get()) != CellColor::Gray);
    356    }
    357 #endif
    358  }
    359 }
    360 
    361 template <typename T>
    362 void AtomMarkingRuntime::markAtom(JSContext* cx, T* thing) {
    363  // Trigger a read barrier on the atom, in case there is an incremental
    364  // GC in progress. This is necessary if the atom is being marked
    365  // because a reference to it was obtained from another zone which is
    366  // not being collected by the incremental GC.
    367  ReadBarrier(thing);
    368 
    369  return inlinedMarkAtom(cx->zone(), thing);
    370 }
    371 
    372 template void AtomMarkingRuntime::markAtom(JSContext* cx, JSAtom* thing);
    373 template void AtomMarkingRuntime::markAtom(JSContext* cx, JS::Symbol* thing);
    374 
    375 void AtomMarkingRuntime::markId(JSContext* cx, jsid id) {
    376  if (id.isAtom()) {
    377    markAtom(cx, id.toAtom());
    378    return;
    379  }
    380  if (id.isSymbol()) {
    381    markAtom(cx, id.toSymbol());
    382    return;
    383  }
    384  MOZ_ASSERT(!id.isGCThing());
    385 }
    386 
    387 void AtomMarkingRuntime::markAtomValue(JSContext* cx, const Value& value) {
    388  if (value.isString()) {
    389    if (value.toString()->isAtom()) {
    390      markAtom(cx, &value.toString()->asAtom());
    391    }
    392    return;
    393  }
    394  if (value.isSymbol()) {
    395    markAtom(cx, value.toSymbol());
    396    return;
    397  }
    398  MOZ_ASSERT_IF(value.isGCThing(), value.isObject() ||
    399                                       value.isPrivateGCThing() ||
    400                                       value.isBigInt());
    401 }
    402 
    403 template <typename T>
    404 CellColor AtomMarkingRuntime::getAtomMarkColor(Zone* zone, T* thing) {
    405  static_assert(std::is_same_v<T, JSAtom> || std::is_same_v<T, JS::Symbol>,
    406                "Should only be called with JSAtom* or JS::Symbol* argument");
    407 
    408  MOZ_ASSERT(thing);
    409  MOZ_ASSERT(!IsInsideNursery(thing));
    410  MOZ_ASSERT(thing->zoneFromAnyThread()->isAtomsZone());
    411 
    412  if (!zone->runtimeFromAnyThread()->permanentAtomsPopulated()) {
    413    return CellColor::Black;
    414  }
    415 
    416  if (thing->isPermanentAndMayBeShared()) {
    417    return CellColor::Black;
    418  }
    419 
    420  if constexpr (std::is_same_v<T, JSAtom>) {
    421    if (thing->isPinned()) {
    422      return CellColor::Black;
    423    }
    424  }
    425 
    426  size_t bit = getAtomBit(&thing->asTenured());
    427 
    428  size_t blackBit = bit + size_t(ColorBit::BlackBit);
    429  size_t grayOrBlackBit = bit + size_t(ColorBit::GrayOrBlackBit);
    430 
    431  SparseBitmap& bitmap = zone->markedAtoms();
    432 
    433  MOZ_ASSERT_IF((std::is_same_v<T, JSAtom>),
    434                !bitmap.readonlyThreadsafeGetBit(grayOrBlackBit));
    435  MOZ_ASSERT_IF((std::is_same_v<T, JS::Symbol>) &&
    436                    bitmap.readonlyThreadsafeGetBit(blackBit),
    437                bitmap.readonlyThreadsafeGetBit(grayOrBlackBit));
    438 
    439  if (bitmap.readonlyThreadsafeGetBit(blackBit)) {
    440    return CellColor::Black;
    441  }
    442 
    443  if constexpr (std::is_same_v<T, JS::Symbol>) {
    444    if (bitmap.readonlyThreadsafeGetBit(grayOrBlackBit)) {
    445      return CellColor::Gray;
    446    }
    447  }
    448 
    449  return CellColor::White;
    450 }
    451 
    452 template CellColor AtomMarkingRuntime::getAtomMarkColor(Zone* zone,
    453                                                        JSAtom* thing);
    454 template CellColor AtomMarkingRuntime::getAtomMarkColor(Zone* zone,
    455                                                        JS::Symbol* thing);
    456 
    457 CellColor AtomMarkingRuntime::getAtomMarkColorForIndex(Zone* zone,
    458                                                       size_t bitIndex) {
    459  MOZ_ASSERT(zone->runtimeFromAnyThread()->permanentAtomsPopulated());
    460 
    461  size_t blackBit = bitIndex + size_t(ColorBit::BlackBit);
    462  size_t grayOrBlackBit = bitIndex + size_t(ColorBit::GrayOrBlackBit);
    463 
    464  SparseBitmap& bitmap = zone->markedAtoms();
    465  bool blackBitSet = bitmap.readonlyThreadsafeGetBit(blackBit);
    466  bool grayOrBlackBitSet = bitmap.readonlyThreadsafeGetBit(grayOrBlackBit);
    467 
    468  if (blackBitSet) {
    469    return CellColor::Black;
    470  }
    471 
    472  if (grayOrBlackBitSet) {
    473    return CellColor::Gray;
    474  }
    475 
    476  return CellColor::White;
    477 }
    478 
    479 #ifdef DEBUG
    480 
    481 template <>
    482 CellColor AtomMarkingRuntime::getAtomMarkColor(Zone* zone, TenuredCell* thing) {
    483  MOZ_ASSERT(thing);
    484  MOZ_ASSERT(thing->zoneFromAnyThread()->isAtomsZone());
    485 
    486  if (thing->is<JSString>()) {
    487    JSString* str = thing->as<JSString>();
    488    return getAtomMarkColor(zone, &str->asAtom());
    489  }
    490 
    491  if (thing->is<JS::Symbol>()) {
    492    return getAtomMarkColor(zone, thing->as<JS::Symbol>());
    493  }
    494 
    495  MOZ_CRASH("Unexpected atom kind");
    496 }
    497 
    498 bool AtomMarkingRuntime::idIsMarked(Zone* zone, jsid id) {
    499  if (id.isAtom()) {
    500    return atomIsMarked(zone, id.toAtom());
    501  }
    502 
    503  if (id.isSymbol()) {
    504    return atomIsMarked(zone, id.toSymbol());
    505  }
    506 
    507  MOZ_ASSERT(!id.isGCThing());
    508  return true;
    509 }
    510 
    511 bool AtomMarkingRuntime::valueIsMarked(Zone* zone, const Value& value) {
    512  if (value.isString()) {
    513    if (value.toString()->isAtom()) {
    514      return atomIsMarked(zone, &value.toString()->asAtom());
    515    }
    516    return true;
    517  }
    518 
    519  if (value.isSymbol()) {
    520    return atomIsMarked(zone, value.toSymbol());
    521  }
    522 
    523  MOZ_ASSERT_IF(value.isGCThing(), value.isObject() ||
    524                                       value.isPrivateGCThing() ||
    525                                       value.isBigInt());
    526  return true;
    527 }
    528 
    529 #endif  // DEBUG
    530 
    531 }  // namespace gc
    532 
    533 #ifdef DEBUG
    534 
    535 bool AtomIsMarked(Zone* zone, JSAtom* atom) {
    536  return zone->runtimeFromAnyThread()->gc.atomMarking.atomIsMarked(zone, atom);
    537 }
    538 
    539 bool AtomIsMarked(Zone* zone, jsid id) {
    540  return zone->runtimeFromAnyThread()->gc.atomMarking.idIsMarked(zone, id);
    541 }
    542 
    543 bool AtomIsMarked(Zone* zone, const Value& value) {
    544  return zone->runtimeFromAnyThread()->gc.atomMarking.valueIsMarked(zone,
    545                                                                    value);
    546 }
    547 
    548 #endif  // DEBUG
    549 
    550 }  // namespace js