StoreBuffer.cpp (12521B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- 2 * vim: set ts=8 sts=2 et sw=2 tw=80: 3 * This Source Code Form is subject to the terms of the Mozilla Public 4 * License, v. 2.0. If a copy of the MPL was not distributed with this 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 6 7 #include "gc/StoreBuffer-inl.h" 8 9 #include "mozilla/Assertions.h" 10 11 #include "gc/Statistics.h" 12 #include "vm/MutexIDs.h" 13 #include "vm/Runtime.h" 14 15 using namespace js; 16 using namespace js::gc; 17 18 ArenaCellSet ArenaCellSet::Empty; 19 20 ArenaCellSet::ArenaCellSet(Arena* arena) 21 : arena(arena) 22 #ifdef DEBUG 23 , 24 minorGCNumberAtCreation( 25 arena->zone()->runtimeFromMainThread()->gc.minorGCCount()) 26 #endif 27 { 28 MOZ_ASSERT(arena); 29 MOZ_ASSERT(bits.isAllClear()); 30 } 31 32 template <typename T> 33 StoreBuffer::MonoTypeBuffer<T>::MonoTypeBuffer(MonoTypeBuffer&& other) 34 : stores_(std::move(other.stores_)), 35 maxEntries_(other.maxEntries_), 36 last_(std::move(other.last_)) { 37 other.clear(); 38 } 39 template <typename T> 40 StoreBuffer::MonoTypeBuffer<T>& StoreBuffer::MonoTypeBuffer<T>::operator=( 41 MonoTypeBuffer&& other) { 42 if (&other != this) { 43 this->~MonoTypeBuffer(); 44 new (this) MonoTypeBuffer(std::move(other)); 45 } 46 return *this; 47 } 48 49 template <typename T> 50 void StoreBuffer::MonoTypeBuffer<T>::setSize(size_t entryCount) { 51 MOZ_ASSERT(entryCount != 0); 52 maxEntries_ = entryCount; 53 } 54 55 template <typename T> 56 bool StoreBuffer::MonoTypeBuffer<T>::isEmpty() const { 57 return last_ == T() && stores_.empty(); 58 } 59 60 template <typename T> 61 void StoreBuffer::MonoTypeBuffer<T>::clear() { 62 last_ = T(); 63 stores_.clear(); 64 } 65 66 template <typename T> 67 size_t StoreBuffer::MonoTypeBuffer<T>::sizeOfExcludingThis( 68 mozilla::MallocSizeOf mallocSizeOf) { 69 return stores_.shallowSizeOfExcludingThis(mallocSizeOf); 70 } 71 72 StoreBuffer::WholeCellBuffer::WholeCellBuffer(WholeCellBuffer&& other) 73 : storage_(std::move(other.storage_)), 74 maxSize_(other.maxSize_), 75 sweepHead_(other.sweepHead_), 76 last_(other.last_) { 77 other.sweepHead_ = nullptr; 78 other.last_ = nullptr; 79 } 80 StoreBuffer::WholeCellBuffer& StoreBuffer::WholeCellBuffer::operator=( 81 WholeCellBuffer&& other) { 82 if (&other != this) { 83 this->~WholeCellBuffer(); 84 new (this) WholeCellBuffer(std::move(other)); 85 } 86 return *this; 87 } 88 89 bool StoreBuffer::WholeCellBuffer::init() { 90 MOZ_ASSERT(!sweepHead_); 91 92 if (!storage_) { 93 storage_ = MakeUnique<LifoAlloc>(LifoAllocBlockSize, js::MallocArena); 94 if (!storage_) { 95 return false; 96 } 97 } 98 99 // This prevents LifoAlloc::Enum from crashing with a release 100 // assertion if we ever allocate one entry larger than 101 // LifoAllocBlockSize. 102 storage_->disableOversize(); 103 104 clear(); 105 return true; 106 } 107 108 void StoreBuffer::WholeCellBuffer::setSize(size_t entryCount) { 109 MOZ_ASSERT(entryCount); 110 maxSize_ = entryCount * sizeof(ArenaCellSet); 111 } 112 113 bool StoreBuffer::WholeCellBuffer::isEmpty() const { 114 return !storage_ || storage_->isEmpty(); 115 } 116 117 void StoreBuffer::WholeCellBuffer::clear() { 118 for (LifoAlloc::Enum e(*storage_); !e.empty();) { 119 ArenaCellSet* cellSet = e.read<ArenaCellSet>(); 120 cellSet->arena->bufferedCells() = &ArenaCellSet::Empty; 121 } 122 sweepHead_ = nullptr; 123 124 if (storage_) { 125 storage_->used() ? storage_->releaseAll() : storage_->freeAll(); 126 } 127 128 last_ = nullptr; 129 } 130 131 ArenaCellSet* StoreBuffer::WholeCellBuffer::allocateCellSet(Arena* arena) { 132 MOZ_ASSERT(arena->bufferedCells() == &ArenaCellSet::Empty); 133 134 Zone* zone = arena->zone(); 135 JSRuntime* rt = zone->runtimeFromMainThread(); 136 if (!rt->gc.nursery().isEnabled()) { 137 return nullptr; 138 } 139 140 AutoEnterOOMUnsafeRegion oomUnsafe; 141 auto* cells = storage_->new_<ArenaCellSet>(arena); 142 if (!cells) { 143 oomUnsafe.crash("Failed to allocate ArenaCellSet"); 144 } 145 146 arena->bufferedCells() = cells; 147 148 if (isAboutToOverflow()) { 149 rt->gc.storeBuffer().setAboutToOverflow( 150 JS::GCReason::FULL_WHOLE_CELL_BUFFER); 151 } 152 153 return cells; 154 } 155 156 size_t StoreBuffer::WholeCellBuffer::sizeOfExcludingThis( 157 mozilla::MallocSizeOf mallocSizeOf) { 158 return storage_ ? storage_->sizeOfIncludingThis(mallocSizeOf) : 0; 159 } 160 161 StoreBuffer::GenericBuffer::GenericBuffer(GenericBuffer&& other) 162 : storage_(std::move(other.storage_)), maxSize_(other.maxSize_) {} 163 StoreBuffer::GenericBuffer& StoreBuffer::GenericBuffer::operator=( 164 GenericBuffer&& other) { 165 if (&other != this) { 166 this->~GenericBuffer(); 167 new (this) GenericBuffer(std::move(other)); 168 } 169 return *this; 170 } 171 172 bool StoreBuffer::GenericBuffer::isEmpty() const { 173 return !storage_ || storage_->isEmpty(); 174 } 175 176 void StoreBuffer::GenericBuffer::clear() { 177 if (storage_) { 178 storage_->used() ? storage_->releaseAll() : storage_->freeAll(); 179 } 180 } 181 182 size_t StoreBuffer::GenericBuffer::sizeOfExcludingThis( 183 mozilla::MallocSizeOf mallocSizeOf) { 184 return storage_ ? storage_->sizeOfIncludingThis(mallocSizeOf) : 0; 185 } 186 187 bool StoreBuffer::GenericBuffer::init() { 188 if (!storage_) { 189 storage_ = MakeUnique<LifoAlloc>(LifoAllocBlockSize, js::MallocArena); 190 if (!storage_) { 191 return false; 192 } 193 } 194 195 clear(); 196 return true; 197 } 198 199 void StoreBuffer::GenericBuffer::setSize(size_t entryCount) { 200 MOZ_ASSERT(entryCount != 0); 201 maxSize_ = entryCount * (sizeof(BufferableRef) + sizeof(void*)); 202 } 203 204 void StoreBuffer::GenericBuffer::trace(JSTracer* trc, StoreBuffer* owner) { 205 mozilla::ReentrancyGuard g(*owner); 206 MOZ_ASSERT(owner->isEnabled()); 207 if (!storage_) { 208 return; 209 } 210 211 for (LifoAlloc::Enum e(*storage_); !e.empty();) { 212 unsigned size = *e.read<unsigned>(); 213 BufferableRef* edge = e.read<BufferableRef>(size); 214 edge->trace(trc); 215 } 216 } 217 218 StoreBuffer::StoreBuffer(JSRuntime* rt) 219 : runtime_(rt), 220 nursery_(rt->gc.nursery()), 221 entryCount_(rt->gc.tunables.storeBufferEntries()), 222 entryScaling_(rt->gc.tunables.storeBufferScaling()), 223 aboutToOverflow_(false), 224 enabled_(false), 225 mayHavePointersToDeadCells_(false) 226 #ifdef DEBUG 227 , 228 mEntered(false) 229 #endif 230 { 231 MOZ_ASSERT(entryCount_ != 0); 232 } 233 234 StoreBuffer::StoreBuffer(StoreBuffer&& other) 235 : bufferVal(std::move(other.bufferVal)), 236 bufStrCell(std::move(other.bufStrCell)), 237 bufBigIntCell(std::move(other.bufBigIntCell)), 238 bufGetterSetterCell(std::move(other.bufGetterSetterCell)), 239 bufObjCell(std::move(other.bufObjCell)), 240 bufferSlot(std::move(other.bufferSlot)), 241 bufferWasmAnyRef(std::move(other.bufferWasmAnyRef)), 242 bufferWholeCell(std::move(other.bufferWholeCell)), 243 bufferGeneric(std::move(other.bufferGeneric)), 244 runtime_(other.runtime_), 245 nursery_(other.nursery_), 246 entryCount_(other.entryCount_), 247 entryScaling_(other.entryScaling_), 248 aboutToOverflow_(other.aboutToOverflow_), 249 enabled_(other.enabled_), 250 mayHavePointersToDeadCells_(other.mayHavePointersToDeadCells_) 251 #ifdef DEBUG 252 , 253 mEntered(other.mEntered) 254 #endif 255 { 256 MOZ_ASSERT(entryCount_ != 0); 257 MOZ_ASSERT(enabled_); 258 MOZ_ASSERT(!mEntered); 259 other.disable(); 260 } 261 262 StoreBuffer& StoreBuffer::operator=(StoreBuffer&& other) { 263 if (&other != this) { 264 this->~StoreBuffer(); 265 new (this) StoreBuffer(std::move(other)); 266 } 267 return *this; 268 } 269 270 #ifdef DEBUG 271 void StoreBuffer::checkAccess() const { 272 // The GC runs tasks that may access the storebuffer in parallel and so must 273 // take a lock. The mutator may only access the storebuffer from the main 274 // thread. 275 if (runtime_->heapState() != JS::HeapState::Idle && 276 runtime_->heapState() != JS::HeapState::MinorCollecting) { 277 MOZ_ASSERT(!CurrentThreadIsGCMarking()); 278 runtime_->gc.assertCurrentThreadHasLockedStoreBuffer(); 279 } else { 280 MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_)); 281 } 282 } 283 #endif 284 285 void StoreBuffer::checkEmpty() const { MOZ_ASSERT(isEmpty()); } 286 287 bool StoreBuffer::isEmpty() const { 288 return bufferVal.isEmpty() && bufStrCell.isEmpty() && 289 bufBigIntCell.isEmpty() && bufGetterSetterCell.isEmpty() && 290 bufObjCell.isEmpty() && bufferSlot.isEmpty() && 291 bufferWasmAnyRef.isEmpty() && bufferWholeCell.isEmpty() && 292 bufferGeneric.isEmpty(); 293 } 294 295 bool StoreBuffer::enable() { 296 if (enabled_) { 297 return true; 298 } 299 300 checkEmpty(); 301 if (!bufferWholeCell.init() || !bufferGeneric.init()) { 302 return false; 303 } 304 305 updateSize(); 306 307 enabled_ = true; 308 return true; 309 } 310 311 void StoreBuffer::updateSize() { 312 // The entry counts for the individual buffers are scaled based on the initial 313 // entryCount parameter passed to the constructor. 314 MOZ_ASSERT(entryCount_ != 0); 315 MOZ_ASSERT(entryScaling_ >= 0.0); 316 317 // Scale the entry count linearly based on the size of the nursery. The entry 318 // count parameter specifies the result at a nursery size of 16MB. 319 const double nurseryBaseSize = 16 * 1024 * 1024; 320 double nurserySizeRatio = double(nursery_.capacity()) / nurseryBaseSize; 321 double count = 322 ((nurserySizeRatio - 1.0) * entryScaling_ + 1.0) * double(entryCount_); 323 MOZ_ASSERT(count > 0.0); 324 size_t defaultEntryCount = std::max(size_t(count), size_t(1)); 325 326 size_t slotsEntryCount = std::max(defaultEntryCount / 2, size_t(1)); 327 size_t wholeCellEntryCount = std::max(defaultEntryCount / 10, size_t(1)); 328 size_t genericEntryCount = std::max(defaultEntryCount / 4, size_t(1)); 329 330 bufferVal.setSize(defaultEntryCount); 331 bufStrCell.setSize(defaultEntryCount); 332 bufBigIntCell.setSize(defaultEntryCount); 333 bufGetterSetterCell.setSize(defaultEntryCount); 334 bufObjCell.setSize(defaultEntryCount); 335 bufferSlot.setSize(slotsEntryCount); 336 bufferWasmAnyRef.setSize(defaultEntryCount); 337 bufferWholeCell.setSize(wholeCellEntryCount); 338 bufferGeneric.setSize(genericEntryCount); 339 } 340 341 void StoreBuffer::disable() { 342 checkEmpty(); 343 344 if (!enabled_) { 345 return; 346 } 347 348 aboutToOverflow_ = false; 349 350 enabled_ = false; 351 } 352 353 void StoreBuffer::clear() { 354 if (!enabled_) { 355 return; 356 } 357 358 aboutToOverflow_ = false; 359 mayHavePointersToDeadCells_ = false; 360 361 bufferVal.clear(); 362 bufStrCell.clear(); 363 bufBigIntCell.clear(); 364 bufGetterSetterCell.clear(); 365 bufObjCell.clear(); 366 bufferSlot.clear(); 367 bufferWasmAnyRef.clear(); 368 bufferWholeCell.clear(); 369 bufferGeneric.clear(); 370 } 371 372 void StoreBuffer::setAboutToOverflow(JS::GCReason reason) { 373 if (!aboutToOverflow_) { 374 aboutToOverflow_ = true; 375 runtime_->gc.stats().count(gcstats::COUNT_STOREBUFFER_OVERFLOW); 376 } 377 nursery_.requestMinorGC(reason); 378 } 379 380 void StoreBuffer::traceValues(TenuringTracer& mover) { 381 bufferVal.trace(mover, this); 382 } 383 void StoreBuffer::traceCells(TenuringTracer& mover) { 384 bufStrCell.trace(mover, this); 385 bufBigIntCell.trace(mover, this); 386 bufGetterSetterCell.trace(mover, this); 387 bufObjCell.trace(mover, this); 388 } 389 void StoreBuffer::traceSlots(TenuringTracer& mover) { 390 bufferSlot.trace(mover, this); 391 } 392 void StoreBuffer::traceWasmAnyRefs(TenuringTracer& mover) { 393 bufferWasmAnyRef.trace(mover, this); 394 } 395 void StoreBuffer::traceWholeCells(TenuringTracer& mover) { 396 bufferWholeCell.trace(mover, this); 397 } 398 void StoreBuffer::traceGenericEntries(JSTracer* trc) { 399 bufferGeneric.trace(trc, this); 400 } 401 402 void StoreBuffer::addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf, 403 JS::GCSizes* sizes) { 404 sizes->storeBufferVals += bufferVal.sizeOfExcludingThis(mallocSizeOf); 405 sizes->storeBufferCells += 406 bufStrCell.sizeOfExcludingThis(mallocSizeOf) + 407 bufBigIntCell.sizeOfExcludingThis(mallocSizeOf) + 408 bufGetterSetterCell.sizeOfExcludingThis(mallocSizeOf) + 409 bufObjCell.sizeOfExcludingThis(mallocSizeOf); 410 sizes->storeBufferSlots += bufferSlot.sizeOfExcludingThis(mallocSizeOf); 411 sizes->storeBufferWasmAnyRefs += 412 bufferWasmAnyRef.sizeOfExcludingThis(mallocSizeOf); 413 sizes->storeBufferWholeCells += 414 bufferWholeCell.sizeOfExcludingThis(mallocSizeOf); 415 sizes->storeBufferGenerics += bufferGeneric.sizeOfExcludingThis(mallocSizeOf); 416 } 417 418 void gc::CellHeaderPostWriteBarrier(JSObject** ptr, JSObject* prev, 419 JSObject* next) { 420 InternalBarrierMethods<JSObject*>::postBarrier(ptr, prev, next); 421 } 422 423 template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::ValueEdge>; 424 template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::SlotsEdge>; 425 template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::WasmAnyRefEdge>; 426 427 void js::gc::PostWriteBarrierCell(Cell* cell, Cell* prev, Cell* next) { 428 if (!next || !cell->isTenured()) { 429 return; 430 } 431 432 StoreBuffer* buffer = next->storeBuffer(); 433 if (!buffer || (prev && prev->storeBuffer())) { 434 return; 435 } 436 437 buffer->putWholeCell(cell); 438 }