tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

frame_allocator.rs (16184B)


      1 /* This Source Code Form is subject to the terms of the Mozilla Public
      2 * License, v. 2.0. If a copy of the MPL was not distributed with this
      3 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      4 
      5 ///! A custom allocator for memory allocations that have the lifetime of a frame.
      6 ///!
      7 ///! See also `internal_types::FrameVec`.
      8 ///!
      9 
     10 use allocator_api2::alloc::{Allocator, AllocError, Layout, Global};
     11 
     12 use std::{cell::UnsafeCell, ptr::NonNull, sync::{atomic::{AtomicI32, Ordering}, Arc}};
     13 
     14 use crate::{bump_allocator::{BumpAllocator, ChunkPool, Stats}, internal_types::{FrameId, FrameVec}};
     15 
     16 /// A memory allocator for allocations that have the same lifetime as a built frame.
     17 ///
     18 /// A custom allocator is used because:
     19 /// - The frame is created on a thread and dropped on another thread, which causes
     20 ///   lock contention in jemalloc.
     21 /// - Since all allocations have a very similar lifetime, we can implement much faster
     22 ///   allocation and deallocation with a specialized allocator than can be achieved
     23 ///   with a general purpose allocator.
     24 ///
     25 /// If the allocator is created using `FrameAllocator::fallback()`, it is not
     26 /// attached to a `FrameMemory` and simply falls back to the global allocator. This
     27 /// should only be used to handle deserialization (for wrench replays) and tests.
     28 ///
     29 /// # Safety
     30 ///
     31 /// None of the safety restrictions below apply if the allocator is created using
     32 /// `FrameAllocator::fallback`.
     33 ///
     34 /// `FrameAllocator` can move between thread if and only if it does so along with
     35 /// the `FrameMemory` it is associated to (if any). The opposite is also true: it
     36 /// is safe to move `FrameMemory` between threads if and only if all live frame
     37 /// allocators associated to it move along with it.
     38 ///
     39 /// `FrameAllocator` must be dropped before the `FrameMemory` it is associated to.
     40 ///
     41 /// In other words, `FrameAllocator` should only be used for containers that are
     42 /// in the `Frame` data structure and not stored elsewhere. The `Frame` holds on
     43 /// to its `FrameMemory`, allowing it all to be sent from the frame builder thread
     44 /// to the renderer thread together.
     45 ///
     46 /// Another way to think of it is that the frame is a large self-referential data
     47 /// structure, holding on to its memory and a large number of containers that
     48 /// point into the memory.
     49 pub struct FrameAllocator {
     50    // If this pointer is null, fall back to the global allocator.
     51    inner: *mut FrameInnerAllocator,
     52 
     53    #[cfg(debug_assertions)]
     54    frame_id: Option<FrameId>,
     55 }
     56 
     57 impl FrameAllocator {
     58    /// Creates a `FrameAllocator` that defaults to the global allocator.
     59    ///
     60    /// Should only be used for testing purposes or desrialization in wrench replays.
     61    pub fn fallback() -> Self {
     62        FrameAllocator {
     63            inner: std::ptr::null_mut(),
     64            #[cfg(debug_assertions)]
     65            frame_id: None,
     66        }
     67    }
     68 
     69    /// Shorthand for creating a FrameVec.
     70    #[inline]
     71    pub fn new_vec<T>(self) -> FrameVec<T> {
     72        FrameVec::new_in(self)
     73    }
     74 
     75    /// Shorthand for creating a FrameVec.
     76    #[inline]
     77    pub fn new_vec_with_capacity<T>(self, cap: usize) -> FrameVec<T> {
     78        FrameVec::with_capacity_in(cap, self)
     79    }
     80 
     81    #[inline]
     82    fn allocate_impl(mem: *mut FrameInnerAllocator, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
     83        unsafe {
     84            (*mem).live_alloc_count.fetch_add(1, Ordering::Relaxed);
     85            (*mem).bump.allocate_item(layout)
     86        }
     87    }
     88 
     89    #[inline]
     90    unsafe fn deallocate_impl(mem: *mut FrameInnerAllocator, ptr: NonNull<u8>, layout: Layout) {
     91        (*mem).live_alloc_count.fetch_sub(1, Ordering::Relaxed);
     92        (*mem).bump.deallocate_item(ptr, layout)
     93    }
     94 
     95    #[inline]
     96    unsafe fn grow_impl(mem: *mut FrameInnerAllocator, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
     97        (*mem).bump.grow_item(ptr, old_layout, new_layout)
     98    }
     99 
    100    #[inline]
    101    unsafe fn shrink_impl(mem: *mut FrameInnerAllocator, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
    102        (*mem).bump.shrink_item(ptr, old_layout, new_layout)
    103    }
    104 
    105    #[cold]
    106    #[inline(never)]
    107    fn allocate_fallback(layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
    108        Global.allocate(layout)
    109    }
    110 
    111    #[cold]
    112    #[inline(never)]
    113    fn deallocate_fallback(ptr: NonNull<u8>, layout: Layout) {
    114        unsafe { Global.deallocate(ptr, layout) }
    115    }
    116 
    117    #[cold]
    118    #[inline(never)]
    119    fn grow_fallback(ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
    120        unsafe { Global.grow(ptr, old_layout, new_layout) }
    121    }
    122 
    123    #[cfg(not(debug_assertions))]
    124    fn check_frame_id(&self) {}
    125 
    126    #[cfg(debug_assertions)]
    127    fn check_frame_id(&self) {
    128        if self.inner.is_null() {
    129            return;
    130        }
    131        unsafe {
    132            assert_eq!(self.frame_id, (*self.inner).frame_id);
    133        }
    134    }
    135 }
    136 
    137 impl Clone for FrameAllocator {
    138    fn clone(&self) -> Self {
    139        unsafe {
    140            if let Some(inner) = self.inner.as_mut() {
    141                // When cloning a `FrameAllocator`, we have to decrement the
    142                // counter of dropped references in the inner allocator to
    143                // balance the fact that an extra `FrameAllocator` will be
    144                // dropped (that hasn't been accounted in `FrameMemory`).
    145                inner.references_dropped.fetch_sub(1, Ordering::Relaxed);
    146            }
    147        }
    148 
    149        FrameAllocator {
    150            inner: self.inner,
    151            #[cfg(debug_assertions)]
    152            frame_id: self.frame_id,
    153        }
    154    }
    155 }
    156 
    157 impl Drop for FrameAllocator {
    158    fn drop(&mut self) {
    159        unsafe {
    160            if let Some(inner) = self.inner.as_mut() {
    161                inner.references_dropped.fetch_add(1, Ordering::Relaxed);
    162            }
    163        }
    164    }
    165 }
    166 
    167 unsafe impl Send for FrameAllocator {}
    168 
    169 unsafe impl Allocator for FrameAllocator {
    170    #[inline(never)]
    171    fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
    172        if self.inner.is_null() {
    173            return FrameAllocator::allocate_fallback(layout);
    174        }
    175 
    176        self.check_frame_id();
    177 
    178        FrameAllocator::allocate_impl(self.inner, layout)
    179    }
    180 
    181    #[inline(never)]
    182    unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
    183        if self.inner.is_null() {
    184            return FrameAllocator::deallocate_fallback(ptr, layout);
    185        }
    186 
    187        self.check_frame_id();
    188 
    189        FrameAllocator::deallocate_impl(self.inner, ptr, layout)
    190    }
    191 
    192    #[inline(never)]
    193    unsafe fn grow(
    194        &self,
    195        ptr: NonNull<u8>,
    196        old_layout: Layout,
    197        new_layout: Layout
    198    ) -> Result<NonNull<[u8]>, AllocError> {
    199        if self.inner.is_null() {
    200            return FrameAllocator::grow_fallback(ptr, old_layout, new_layout);
    201        }
    202 
    203        self.check_frame_id();
    204 
    205        FrameAllocator::grow_impl(self.inner, ptr, old_layout, new_layout)
    206    }
    207 
    208    #[inline(never)]
    209    unsafe fn shrink(
    210        &self,
    211        ptr: NonNull<u8>,
    212        old_layout: Layout,
    213        new_layout: Layout
    214    ) -> Result<NonNull<[u8]>, AllocError> {
    215        if self.inner.is_null() {
    216            return FrameAllocator::grow_fallback(ptr, old_layout, new_layout);
    217        }
    218 
    219        self.check_frame_id();
    220 
    221        FrameAllocator::shrink_impl(self.inner, ptr, old_layout, new_layout)
    222    }
    223 }
    224 
    225 #[cfg(feature = "capture")]
    226 impl serde::Serialize for FrameAllocator {
    227    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
    228    where S: serde::Serializer
    229    {
    230        ().serialize(serializer)
    231    }
    232 }
    233 
    234 #[cfg(feature = "replay")]
    235 impl<'de> serde::Deserialize<'de> for FrameAllocator {
    236    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
    237    where
    238        D: serde::Deserializer<'de>,
    239    {
    240        let _ = <() as serde::Deserialize>::deserialize(deserializer)?;
    241        Ok(FrameAllocator::fallback())
    242    }
    243 }
    244 
    245 /// The default impl is required for Deserialize to work in FrameVec.
    246 /// It's fine to fallback to the global allocator when replaying wrench
    247 /// recording but we don't want to accidentally use `FrameAllocator::default()`
    248 /// in regular webrender usage, so we only implement it when the replay
    249 /// feature is enabled.
    250 #[cfg(feature = "replay")]
    251 impl Default for FrameAllocator {
    252    fn default() -> Self {
    253        Self::fallback()
    254    }
    255 }
    256 
    257 /// The backing storage for `FrameAllocator`
    258 ///
    259 /// This object is meant to be stored in the built frame and must not be dropped or
    260 /// recycled before all allocations have been deallocated and all `FrameAllocators`
    261 /// have been dropped. In other words, drop or recycle this after dropping the rest
    262 /// of the built frame.
    263 pub struct FrameMemory {
    264    // Box would be nice but it is not adequate for this purpose because
    265    // it is "no-alias". So we do it the hard way and manage this pointer
    266    // manually.
    267 
    268    /// Safety: The pointed `FrameInnerAllocator` must not move or be deallocated
    269    /// while there are live `FrameAllocator`s pointing to it. This is ensured
    270    /// by respecting that the `FrameMemory` is dropped last and by the
    271    /// `FrameInnerAllocator` not being exposed to the outside world.
    272    /// It is also checked at runtime via the reference count.
    273    allocator: Option<NonNull<FrameInnerAllocator>>,
    274    /// The number of `FrameAllocator`s created during the current frame. This is
    275    /// used to compare aganst the inner allocator's dropped references counter
    276    /// to check that references have all been dropped before freeing or recycling
    277    /// the memory.
    278    references_created: UnsafeCell<i32>,
    279 }
    280 
    281 impl FrameMemory {
    282    /// Creates a fallback FrameMemory that uses the global allocator.
    283    ///
    284    /// This should only be used for testing purposes and to handle the
    285    /// deserialization of webrender recordings.
    286    #[allow(unused)]
    287    pub fn fallback() -> Self {
    288        FrameMemory {
    289            allocator: None,
    290            references_created: UnsafeCell::new(0)
    291        }
    292    }
    293 
    294    /// # Panics
    295    ///
    296    /// A `FrameMemory` must not be dropped until all of the associated
    297    /// `FrameAllocators` as well as their allocations have been dropped,
    298    /// otherwise the `FrameMemory::drop` will panic.
    299    pub fn new(pool: Arc<ChunkPool>, _frame_id: FrameId) -> Self {
    300        let layout = Layout::from_size_align(
    301            std::mem::size_of::<FrameInnerAllocator>(),
    302            std::mem::align_of::<FrameInnerAllocator>(),
    303        ).unwrap();
    304 
    305        let uninit_u8 = Global.allocate(layout).unwrap();
    306 
    307        unsafe {
    308            let allocator: NonNull<FrameInnerAllocator> = uninit_u8.cast();
    309            allocator.as_ptr().write(FrameInnerAllocator {
    310                bump: BumpAllocator::new(pool),
    311 
    312                live_alloc_count: AtomicI32::new(0),
    313                references_dropped: AtomicI32::new(0),
    314                #[cfg(debug_assertions)]
    315                frame_id: Some(_frame_id),
    316            });
    317 
    318            FrameMemory {
    319                allocator: Some(allocator),
    320                references_created: UnsafeCell::new(0),
    321            }
    322        }
    323    }
    324 
    325    /// Create a `FrameAllocator` for the current frame.
    326    pub fn allocator(&self) -> FrameAllocator {
    327        if let Some(alloc) = &self.allocator {
    328            unsafe { *self.references_created.get() += 1 };
    329 
    330            return FrameAllocator {
    331                inner: alloc.as_ptr(),
    332                #[cfg(debug_assertions)]
    333                frame_id: unsafe { alloc.as_ref().frame_id },
    334            };
    335        }
    336 
    337        FrameAllocator::fallback()
    338    }
    339 
    340    /// Shorthand for creating a FrameVec.
    341    #[inline]
    342    pub fn new_vec<T>(&self) -> FrameVec<T> {
    343        FrameVec::new_in(self.allocator())
    344    }
    345 
    346    /// Shorthand for creating a FrameVec.
    347    #[inline]
    348    pub fn new_vec_with_capacity<T>(&self, cap: usize) -> FrameVec<T> {
    349        FrameVec::with_capacity_in(cap, self.allocator())
    350    }
    351 
    352    /// Panics if there are still live allocations or `FrameAllocator`s.
    353    pub fn assert_memory_reusable(&self) {
    354        if let Some(ptr) = self.allocator {
    355            unsafe {
    356                // If this assert blows up, it means an allocation is still alive.
    357                assert_eq!(ptr.as_ref().live_alloc_count.load(Ordering::Acquire), 0);
    358                // If this assert blows up, it means one or several FrameAllocators
    359                // from the previous frame are still alive.
    360                let references_created = *self.references_created.get();
    361                assert_eq!(ptr.as_ref().references_dropped.load(Ordering::Acquire), references_created);
    362            }
    363        }
    364    }
    365 
    366    #[allow(unused)]
    367    pub fn get_stats(&self) -> Stats {
    368        unsafe {
    369            self.allocator.map(|ptr| (*ptr.as_ptr()).bump.get_stats()).unwrap_or_else(Stats::default)
    370        }
    371    }
    372 }
    373 
    374 impl Drop for FrameMemory {
    375    fn drop(&mut self) {
    376        self.assert_memory_reusable();
    377 
    378        let layout = Layout::new::<FrameInnerAllocator>();
    379 
    380        unsafe {
    381            if let Some(ptr) = &mut self.allocator {
    382                std::ptr::drop_in_place(ptr.as_ptr());
    383                Global.deallocate(ptr.cast(), layout);
    384            }
    385        }
    386    }
    387 }
    388 
    389 unsafe impl Send for FrameMemory {}
    390 
    391 #[cfg(feature = "capture")]
    392 impl serde::Serialize for FrameMemory {
    393    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
    394    where S: serde::Serializer
    395    {
    396        ().serialize(serializer)
    397    }
    398 }
    399 
    400 #[cfg(feature = "replay")]
    401 impl<'de> serde::Deserialize<'de> for FrameMemory {
    402    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
    403    where
    404        D: serde::Deserializer<'de>,
    405    {
    406        let _ = <() as serde::Deserialize>::deserialize(deserializer)?;
    407        Ok(FrameMemory::fallback())
    408    }
    409 }
    410 
    411 struct FrameInnerAllocator {
    412    bump: BumpAllocator,
    413 
    414    // Strictly speaking the live allocation and reference count do not need to
    415    // be atomic if the allocator is used correctly (the thread that
    416    // allocates/deallocates is also the thread where the allocator is).
    417    // Since the point of keeping track of the number of live allocations is to
    418    // check that the allocator is indeed used correctly, we stay on the safe
    419    // side for now.
    420 
    421    live_alloc_count: AtomicI32,
    422    /// We count the number of references dropped here and compare it against the
    423    /// number of references created by the `AllocatorMemory` when we need to check
    424    /// that the memory can be safely reused or released.
    425    /// This looks and is very similar to a reference counting scheme (`Arc`). The
    426    /// main differences are that we don't want the reference count to drive the
    427    /// lifetime of the allocator (only to check when we require all references to
    428    /// have been dropped), and we do half as many the atomic operations since we only
    429    /// count drops and not creations.
    430    references_dropped: AtomicI32,
    431    #[cfg(debug_assertions)]
    432    frame_id: Option<FrameId>,
    433 }
    434 
    435 #[test]
    436 fn frame_memory_simple() {
    437    use std::sync::mpsc::channel;
    438 
    439    let chunk_pool = Arc::new(ChunkPool::new());
    440    let memory = FrameMemory::new(chunk_pool, FrameId::first());
    441 
    442    let alloc = memory.allocator();
    443    let a2 = memory.allocator();
    444    let a3 = memory.allocator();
    445    let v1: FrameVec<u32> = memory.new_vec_with_capacity(10);
    446    let v2: FrameVec<u32> = memory.new_vec_with_capacity(256);
    447    let v3: FrameVec<u32> = memory.new_vec_with_capacity(1024 * 128);
    448    let v4: FrameVec<u32> = memory.new_vec_with_capacity(128);
    449    let mut v5 = alloc.clone().new_vec();
    450    for i in 0..256u32 {
    451        v5.push(i);
    452    }
    453    let v6 = v2.clone().clone().clone().clone();
    454    let mut frame = alloc.new_vec();
    455    frame.push(v1);
    456    frame.push(v2);
    457    frame.push(v3);
    458    frame.push(v4);
    459    frame.push(v5);
    460    frame.push(v6);
    461    let (tx, rx) = channel();
    462    tx.send(frame).unwrap();
    463 
    464    let handle = std::thread::spawn(move || {
    465        let mut frame = rx.recv().unwrap();
    466        frame.push(memory.new_vec_with_capacity(10));
    467        std::mem::drop(a3);
    468        std::mem::drop(a2);
    469        std::mem::drop(frame);
    470 
    471        memory.assert_memory_reusable();
    472    });
    473 
    474    handle.join().unwrap();
    475 }