NameCollections.h (13431B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- 2 * vim: set ts=8 sts=2 et sw=2 tw=80: 3 * This Source Code Form is subject to the terms of the Mozilla Public 4 * License, v. 2.0. If a copy of the MPL was not distributed with this 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 6 7 #ifndef frontend_NameCollections_h 8 #define frontend_NameCollections_h 9 10 #include "mozilla/Assertions.h" // MOZ_ASSERT 11 #include "mozilla/Attributes.h" // MOZ_IMPLICIT 12 13 #include <stddef.h> // size_t 14 #include <stdint.h> // uint32_t, uint64_t 15 #include <type_traits> // std::{true_type, false_type, is_trivial_v, is_trivially_copyable_v, is_trivially_destructible_v} 16 #include <utility> // std::forward 17 18 #include "ds/InlineTable.h" // InlineMap, DefaultKeyPolicy 19 #include "frontend/NameAnalysisTypes.h" // AtomVector, FunctionBoxVector 20 #include "frontend/ParserAtom.h" // TaggedParserAtomIndex, TrivialTaggedParserAtomIndex 21 #include "frontend/TaggedParserAtomIndexHasher.h" // TrivialTaggedParserAtomIndexHasher 22 #include "js/AllocPolicy.h" // SystemAllocPolicy, ReportOutOfMemory 23 #include "js/Utility.h" // js_new, js_delete 24 #include "js/Vector.h" // Vector 25 26 namespace js::frontend { 27 28 class FunctionBox; 29 30 // A pool of recyclable containers for use in the frontend. The Parser and 31 // BytecodeEmitter create many maps for name analysis that are short-lived 32 // (i.e., for the duration of parsing or emitting a lexical scope). Making 33 // them recyclable cuts down significantly on allocator churn. 34 template <typename RepresentativeCollection, typename ConcreteCollectionPool> 35 class CollectionPool { 36 using RecyclableCollections = Vector<void*, 32, SystemAllocPolicy>; 37 38 RecyclableCollections all_; 39 RecyclableCollections recyclable_; 40 41 static RepresentativeCollection* asRepresentative(void* p) { 42 return reinterpret_cast<RepresentativeCollection*>(p); 43 } 44 45 RepresentativeCollection* allocate() { 46 size_t newAllLength = all_.length() + 1; 47 if (!all_.reserve(newAllLength) || !recyclable_.reserve(newAllLength)) { 48 return nullptr; 49 } 50 51 RepresentativeCollection* collection = js_new<RepresentativeCollection>(); 52 if (collection) { 53 all_.infallibleAppend(collection); 54 } 55 return collection; 56 } 57 58 public: 59 ~CollectionPool() { purgeAll(); } 60 61 void purgeAll() { 62 void** end = all_.end(); 63 for (void** it = all_.begin(); it != end; ++it) { 64 js_delete(asRepresentative(*it)); 65 } 66 67 all_.clearAndFree(); 68 recyclable_.clearAndFree(); 69 } 70 71 // Fallibly aquire one of the supported collection types from the pool. 72 template <typename Collection> 73 Collection* acquire(FrontendContext* fc) { 74 ConcreteCollectionPool::template assertInvariants<Collection>(); 75 76 RepresentativeCollection* collection; 77 if (recyclable_.empty()) { 78 collection = allocate(); 79 if (!collection) { 80 ReportOutOfMemory(fc); 81 } 82 } else { 83 collection = asRepresentative(recyclable_.popCopy()); 84 collection->clear(); 85 } 86 return reinterpret_cast<Collection*>(collection); 87 } 88 89 // Release a collection back to the pool. 90 template <typename Collection> 91 void release(Collection** collection) { 92 ConcreteCollectionPool::template assertInvariants<Collection>(); 93 MOZ_ASSERT(*collection); 94 95 #ifdef DEBUG 96 bool ok = false; 97 // Make sure the collection is in |all_| but not already in |recyclable_|. 98 for (void** it = all_.begin(); it != all_.end(); ++it) { 99 if (*it == *collection) { 100 ok = true; 101 break; 102 } 103 } 104 MOZ_ASSERT(ok); 105 for (void** it = recyclable_.begin(); it != recyclable_.end(); ++it) { 106 MOZ_ASSERT(*it != *collection); 107 } 108 #endif 109 110 MOZ_ASSERT(recyclable_.length() < all_.length()); 111 // Reserved in allocateFresh. 112 recyclable_.infallibleAppend(*collection); 113 *collection = nullptr; 114 } 115 }; 116 117 template <typename Wrapped> 118 struct RecyclableAtomMapValueWrapper { 119 using WrappedType = Wrapped; 120 121 union { 122 Wrapped wrapped; 123 uint64_t dummy; 124 }; 125 126 static void assertInvariant() { 127 static_assert(sizeof(Wrapped) <= sizeof(uint64_t), 128 "Can only recycle atom maps with values smaller than uint64"); 129 } 130 131 RecyclableAtomMapValueWrapper() : dummy(0) { assertInvariant(); } 132 133 MOZ_IMPLICIT RecyclableAtomMapValueWrapper(Wrapped w) : wrapped(w) { 134 assertInvariant(); 135 } 136 137 MOZ_IMPLICIT operator Wrapped&() { return wrapped; } 138 139 MOZ_IMPLICIT operator Wrapped&() const { return wrapped; } 140 141 Wrapped* operator->() { return &wrapped; } 142 143 const Wrapped* operator->() const { return &wrapped; } 144 }; 145 146 template <typename MapValue> 147 using RecyclableNameMapBase = 148 InlineMap<TrivialTaggedParserAtomIndex, 149 RecyclableAtomMapValueWrapper<MapValue>, 24, 150 TrivialTaggedParserAtomIndexHasher, SystemAllocPolicy>; 151 152 // Define wrapper methods to accept TaggedParserAtomIndex. 153 template <typename MapValue> 154 class RecyclableNameMap : public RecyclableNameMapBase<MapValue> { 155 using Base = RecyclableNameMapBase<MapValue>; 156 157 public: 158 template <typename... Args> 159 [[nodiscard]] MOZ_ALWAYS_INLINE bool add(typename Base::AddPtr& p, 160 const TaggedParserAtomIndex& key, 161 Args&&... args) { 162 return Base::add(p, TrivialTaggedParserAtomIndex::from(key), 163 std::forward<Args>(args)...); 164 } 165 166 MOZ_ALWAYS_INLINE 167 typename Base::Ptr lookup(const TaggedParserAtomIndex& l) { 168 return Base::lookup(TrivialTaggedParserAtomIndex::from(l)); 169 } 170 171 MOZ_ALWAYS_INLINE 172 typename Base::AddPtr lookupForAdd(const TaggedParserAtomIndex& l) { 173 return Base::lookupForAdd(TrivialTaggedParserAtomIndex::from(l)); 174 } 175 }; 176 177 using DeclaredNameMap = RecyclableNameMap<DeclaredNameInfo>; 178 using NameLocationMap = RecyclableNameMap<NameLocation>; 179 // Cannot use GCThingIndex here because it's not trivial type. 180 using AtomIndexMap = RecyclableNameMap<uint32_t>; 181 182 template <typename RepresentativeTable> 183 class InlineTablePool 184 : public CollectionPool<RepresentativeTable, 185 InlineTablePool<RepresentativeTable>> { 186 template <typename> 187 struct IsRecyclableAtomMapValueWrapper : std::false_type {}; 188 189 template <typename T> 190 struct IsRecyclableAtomMapValueWrapper<RecyclableAtomMapValueWrapper<T>> 191 : std::true_type {}; 192 193 public: 194 template <typename Table> 195 static void assertInvariants() { 196 static_assert( 197 Table::SizeOfInlineEntries == RepresentativeTable::SizeOfInlineEntries, 198 "Only tables with the same size for inline entries are usable in the " 199 "pool."); 200 201 using EntryType = typename Table::Table::Entry; 202 using KeyType = typename EntryType::KeyType; 203 using ValueType = typename EntryType::ValueType; 204 205 static_assert(IsRecyclableAtomMapValueWrapper<ValueType>::value, 206 "Please adjust the static assertions below if you need to " 207 "support other types than RecyclableAtomMapValueWrapper"); 208 209 using WrappedType = typename ValueType::WrappedType; 210 211 // We can't directly check |std::is_trivial<EntryType>|, because neither 212 // mozilla::HashMapEntry nor IsRecyclableAtomMapValueWrapper are trivially 213 // default constructible. Instead we check that the key and the unwrapped 214 // value are trivial and additionally ensure that the entry itself is 215 // trivially copyable and destructible. 216 217 static_assert(std::is_trivial_v<KeyType>, 218 "Only tables with trivial keys are usable in the pool."); 219 static_assert(std::is_trivial_v<WrappedType>, 220 "Only tables with trivial values are usable in the pool."); 221 222 static_assert( 223 std::is_trivially_copyable_v<EntryType>, 224 "Only tables with trivially copyable entries are usable in the pool."); 225 static_assert(std::is_trivially_destructible_v<EntryType>, 226 "Only tables with trivially destructible entries are usable " 227 "in the pool."); 228 } 229 }; 230 231 template <typename RepresentativeVector> 232 class VectorPool : public CollectionPool<RepresentativeVector, 233 VectorPool<RepresentativeVector>> { 234 public: 235 template <typename Vector> 236 static void assertInvariants() { 237 static_assert( 238 Vector::sMaxInlineStorage == RepresentativeVector::sMaxInlineStorage, 239 "Only vectors with the same size for inline entries are usable in the " 240 "pool."); 241 242 using ElementType = typename Vector::ElementType; 243 244 static_assert(std::is_trivial_v<ElementType>, 245 "Only vectors of trivial values are usable in the pool."); 246 static_assert(std::is_trivially_destructible_v<ElementType>, 247 "Only vectors of trivially destructible values are usable in " 248 "the pool."); 249 250 static_assert( 251 sizeof(ElementType) == 252 sizeof(typename RepresentativeVector::ElementType), 253 "Only vectors with same-sized elements are usable in the pool."); 254 } 255 }; 256 257 using AtomVector = Vector<TrivialTaggedParserAtomIndex, 24, SystemAllocPolicy>; 258 259 using FunctionBoxVector = Vector<FunctionBox*, 24, SystemAllocPolicy>; 260 261 class NameCollectionPool { 262 InlineTablePool<AtomIndexMap> mapPool_; 263 VectorPool<AtomVector> atomVectorPool_; 264 VectorPool<FunctionBoxVector> functionBoxVectorPool_; 265 uint32_t activeCompilations_; 266 267 public: 268 NameCollectionPool() : activeCompilations_(0) {} 269 270 bool hasActiveCompilation() const { return activeCompilations_ != 0; } 271 272 void addActiveCompilation() { activeCompilations_++; } 273 274 void removeActiveCompilation() { 275 MOZ_ASSERT(hasActiveCompilation()); 276 activeCompilations_--; 277 } 278 279 template <typename Map> 280 Map* acquireMap(FrontendContext* fc) { 281 MOZ_ASSERT(hasActiveCompilation()); 282 return mapPool_.acquire<Map>(fc); 283 } 284 285 template <typename Map> 286 void releaseMap(Map** map) { 287 MOZ_ASSERT(hasActiveCompilation()); 288 MOZ_ASSERT(map); 289 if (*map) { 290 mapPool_.release(map); 291 } 292 } 293 294 template <typename Vector> 295 inline Vector* acquireVector(FrontendContext* fc); 296 297 template <typename Vector> 298 inline void releaseVector(Vector** vec); 299 300 void purge() { 301 if (!hasActiveCompilation()) { 302 mapPool_.purgeAll(); 303 atomVectorPool_.purgeAll(); 304 functionBoxVectorPool_.purgeAll(); 305 } 306 } 307 }; 308 309 template <> 310 inline AtomVector* NameCollectionPool::acquireVector<AtomVector>( 311 FrontendContext* fc) { 312 MOZ_ASSERT(hasActiveCompilation()); 313 return atomVectorPool_.acquire<AtomVector>(fc); 314 } 315 316 template <> 317 inline void NameCollectionPool::releaseVector<AtomVector>(AtomVector** vec) { 318 MOZ_ASSERT(hasActiveCompilation()); 319 MOZ_ASSERT(vec); 320 if (*vec) { 321 atomVectorPool_.release(vec); 322 } 323 } 324 325 template <> 326 inline FunctionBoxVector* NameCollectionPool::acquireVector<FunctionBoxVector>( 327 FrontendContext* fc) { 328 MOZ_ASSERT(hasActiveCompilation()); 329 return functionBoxVectorPool_.acquire<FunctionBoxVector>(fc); 330 } 331 332 template <> 333 inline void NameCollectionPool::releaseVector<FunctionBoxVector>( 334 FunctionBoxVector** vec) { 335 MOZ_ASSERT(hasActiveCompilation()); 336 MOZ_ASSERT(vec); 337 if (*vec) { 338 functionBoxVectorPool_.release(vec); 339 } 340 } 341 342 template <typename T, template <typename> typename Impl> 343 class PooledCollectionPtr { 344 NameCollectionPool& pool_; 345 T* collection_ = nullptr; 346 347 protected: 348 ~PooledCollectionPtr() { Impl<T>::releaseCollection(pool_, &collection_); } 349 350 T& collection() { 351 MOZ_ASSERT(collection_); 352 return *collection_; 353 } 354 355 const T& collection() const { 356 MOZ_ASSERT(collection_); 357 return *collection_; 358 } 359 360 public: 361 explicit PooledCollectionPtr(NameCollectionPool& pool) : pool_(pool) {} 362 363 bool acquire(FrontendContext* fc) { 364 MOZ_ASSERT(!collection_); 365 collection_ = Impl<T>::acquireCollection(fc, pool_); 366 return !!collection_; 367 } 368 369 explicit operator bool() const { return !!collection_; } 370 371 T* operator->() { return &collection(); } 372 373 const T* operator->() const { return &collection(); } 374 375 T& operator*() { return collection(); } 376 377 const T& operator*() const { return collection(); } 378 }; 379 380 template <typename Map> 381 class PooledMapPtr : public PooledCollectionPtr<Map, PooledMapPtr> { 382 friend class PooledCollectionPtr<Map, PooledMapPtr>; 383 384 static Map* acquireCollection(FrontendContext* fc, NameCollectionPool& pool) { 385 return pool.acquireMap<Map>(fc); 386 } 387 388 static void releaseCollection(NameCollectionPool& pool, Map** ptr) { 389 pool.releaseMap(ptr); 390 } 391 392 using Base = PooledCollectionPtr<Map, PooledMapPtr>; 393 394 public: 395 using Base::Base; 396 397 ~PooledMapPtr() = default; 398 }; 399 400 template <typename Vector> 401 class PooledVectorPtr : public PooledCollectionPtr<Vector, PooledVectorPtr> { 402 friend class PooledCollectionPtr<Vector, PooledVectorPtr>; 403 404 static Vector* acquireCollection(FrontendContext* fc, 405 NameCollectionPool& pool) { 406 return pool.acquireVector<Vector>(fc); 407 } 408 409 static void releaseCollection(NameCollectionPool& pool, Vector** ptr) { 410 pool.releaseVector(ptr); 411 } 412 413 using Base = PooledCollectionPtr<Vector, PooledVectorPtr>; 414 using Base::collection; 415 416 public: 417 using Base::Base; 418 419 ~PooledVectorPtr() = default; 420 421 typename Vector::ElementType& operator[](size_t index) { 422 return collection()[index]; 423 } 424 425 const typename Vector::ElementType& operator[](size_t index) const { 426 return collection()[index]; 427 } 428 }; 429 430 } // namespace js::frontend 431 432 #endif // frontend_NameCollections_h