From 2e469e282aa39364743f181a6e8f04bbb9ac54b9 Mon Sep 17 00:00:00 2001 From: Adam Ierymenko Date: Thu, 29 Sep 2022 09:13:12 -0400 Subject: [PATCH] Docs and cleanup, and implement clone for pooled object if the object implements clone. --- utils/src/pool.rs | 57 ++++++++++++++++++++++++++++++++++------------- 1 file changed, 41 insertions(+), 16 deletions(-) diff --git a/utils/src/pool.rs b/utils/src/pool.rs index 5fb5acf5c..583db344b 100644 --- a/utils/src/pool.rs +++ b/utils/src/pool.rs @@ -14,14 +14,9 @@ pub trait PoolFactory { /// Container for pooled objects that have been checked out of the pool. /// -/// When this is dropped the object is returned to the pool or if the pool or is -/// dropped if the pool has been dropped. There is also an into_raw() and from_raw() -/// functionality that allows conversion to/from naked pointers to O for -/// interoperation with C/C++ APIs. This in addition to being as slim as possible is -/// why we implemented our own pool. -/// -/// Note that pooled objects are not clonable. If you want to share them use Rc<> -/// or Arc<>. +/// Objects are automagically returned to the pool when Pooled<> is dropped if the pool still exists. +/// If the pool itself is gone objects are freed. Two methods for conversion to/from raw pointers are +/// available for interoperation with foreign APIs. #[repr(transparent)] pub struct Pooled>(NonNull>); @@ -34,6 +29,7 @@ struct PoolEntry> { impl> Pooled { /// Create a pooled object wrapper around an object but with no pool to return it to. /// The object will be freed when this pooled container is dropped. + #[inline] pub fn naked(o: O) -> Self { unsafe { Self(NonNull::new_unchecked(Box::into_raw(Box::new(PoolEntry:: { @@ -44,9 +40,10 @@ impl> Pooled { } /// Get a raw pointer to the object wrapped by this pooled object container. - /// The returned raw pointer MUST be restored into a Pooled instance with - /// from_raw() or memory will leak. - #[inline(always)] + /// + /// The returned pointer MUST be returned to the pooling system with from_raw() or memory + /// will leak. + #[inline] pub unsafe fn into_raw(self) -> *mut O { // Verify that the structure is not padded before 'obj'. assert_eq!( @@ -60,10 +57,10 @@ impl> Pooled { } /// Restore a raw pointer from into_raw() into a Pooled object. - /// The supplied pointer MUST have been obtained from a Pooled object or - /// undefined behavior will occur. Pointers from other sources can't be used - /// here. None is returned if the pointer is null. - #[inline(always)] + /// + /// The supplied pointer MUST have been obtained from a Pooled object. None is returned + /// if the pointer is null. + #[inline] pub unsafe fn from_raw(raw: *mut O) -> Option { if !raw.is_null() { Some(Self(NonNull::new_unchecked(raw.cast()))) @@ -73,6 +70,32 @@ impl> Pooled { } } +impl> Clone for Pooled +where + O: Clone, +{ + #[inline] + fn clone(&self) -> Self { + let internal = unsafe { &mut *self.0.as_ptr() }; + if let Some(p) = internal.return_pool.upgrade() { + if let Some(o) = p.pool.lock().pop() { + let mut o = Self(o); + *o.as_mut() = self.as_ref().clone(); + o + } else { + Pooled::(unsafe { + NonNull::new_unchecked(Box::into_raw(Box::new(PoolEntry:: { + obj: self.as_ref().clone(), + return_pool: Arc::downgrade(&p), + }))) + }) + } + } else { + Self::naked(self.as_ref().clone()) + } + } +} + unsafe impl> Send for Pooled where O: Send {} unsafe impl> Sync for Pooled where O: Sync {} @@ -109,7 +132,7 @@ impl> AsMut for Pooled { impl> Drop for Pooled { #[inline] fn drop(&mut self) { - let internal = unsafe { self.0.as_mut() }; + let internal = unsafe { &mut *self.0.as_ptr() }; if let Some(p) = internal.return_pool.upgrade() { p.factory.reset(&mut internal.obj); p.pool.lock().push(self.0); @@ -130,6 +153,7 @@ struct PoolInner> { } impl> Pool { + #[inline] pub fn new(initial_stack_capacity: usize, factory: F) -> Self { Self(Arc::new(PoolInner:: { factory, @@ -156,6 +180,7 @@ impl> Pool { /// If get() is called after this new objects will be allocated, and any outstanding /// objects will still be returned on drop unless the pool itself is dropped. This can /// be done to free some memory if there has been a spike in memory use. + #[inline] pub fn purge(&self) { for o in self.0.pool.lock().drain(..) { drop(unsafe { Box::from_raw(o.as_ptr()) })