mirror of
https://github.com/zerotier/ZeroTierOne.git
synced 2025-07-26 20:22:51 +02:00
ran cargo fmt
This commit is contained in:
parent
f6540e129a
commit
7ec194a6d1
2 changed files with 39 additions and 79 deletions
|
@ -1,9 +1,12 @@
|
||||||
use std::sync::{Mutex, RwLock, RwLockReadGuard, atomic::{AtomicU32, Ordering, AtomicPtr}};
|
|
||||||
use std::mem::{self, MaybeUninit, ManuallyDrop};
|
|
||||||
use std::ptr::{self, NonNull};
|
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
|
use std::mem::{self, ManuallyDrop, MaybeUninit};
|
||||||
use std::num::NonZeroU64;
|
use std::num::NonZeroU64;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
use std::ptr::{self, NonNull};
|
||||||
|
use std::sync::{
|
||||||
|
atomic::{AtomicPtr, AtomicU32, Ordering},
|
||||||
|
Mutex, RwLock, RwLockReadGuard,
|
||||||
|
};
|
||||||
|
|
||||||
const DEFAULT_L: usize = 64;
|
const DEFAULT_L: usize = 64;
|
||||||
|
|
||||||
|
@ -47,7 +50,6 @@ impl<T, const L: usize> Pool<T, L> {
|
||||||
Pool(Mutex::new((ptr::null_mut(), 1, ptr::null_mut(), usize::MAX)))
|
Pool(Mutex::new((ptr::null_mut(), 1, ptr::null_mut(), usize::MAX)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn create_arr() -> [MaybeUninit<Slot<T>>; L] {
|
fn create_arr() -> [MaybeUninit<Slot<T>>; L] {
|
||||||
unsafe { MaybeUninit::<[MaybeUninit<Slot<T>>; L]>::uninit().assume_init() }
|
unsafe { MaybeUninit::<[MaybeUninit<Slot<T>>; L]>::uninit().assume_init() }
|
||||||
|
@ -68,10 +70,7 @@ impl<T, const L: usize> Pool<T, L> {
|
||||||
slot_ptr
|
slot_ptr
|
||||||
} else {
|
} else {
|
||||||
if head_size >= L {
|
if head_size >= L {
|
||||||
let new = Box::leak(Box::new(PoolMem {
|
let new = Box::leak(Box::new(PoolMem { pre: head_arena, mem: Self::create_arr() }));
|
||||||
pre: head_arena,
|
|
||||||
mem: Self::create_arr(),
|
|
||||||
}));
|
|
||||||
head_arena = new;
|
head_arena = new;
|
||||||
head_size = 0;
|
head_size = 0;
|
||||||
}
|
}
|
||||||
|
@ -122,7 +121,6 @@ impl<T, const L: usize> Drop for Pool<T, L> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait StaticPool<T, const L: usize = DEFAULT_L> {
|
pub trait StaticPool<T, const L: usize = DEFAULT_L> {
|
||||||
|
|
||||||
/// Must return a pointer to an instance of a `Pool<T, L>` with a static lifetime. That pointer must be cast to a `*const ()` to make the borrow-checker happy.
|
/// Must return a pointer to an instance of a `Pool<T, L>` with a static lifetime. That pointer must be cast to a `*const ()` to make the borrow-checker happy.
|
||||||
///
|
///
|
||||||
/// **Safety**: The returned pointer must have originally been a `&'static Pool<T, L>` reference. So it must have had a matching `T` and `L` and it must have the static lifetime.
|
/// **Safety**: The returned pointer must have originally been a `&'static Pool<T, L>` reference. So it must have had a matching `T` and `L` and it must have the static lifetime.
|
||||||
|
@ -135,17 +133,19 @@ pub trait StaticPool<T, const L: usize = DEFAULT_L> {
|
||||||
///
|
///
|
||||||
/// This `PoolArc` supports the ability to generate weak, non-owning references to the allocated `T`.
|
/// This `PoolArc` supports the ability to generate weak, non-owning references to the allocated `T`.
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn alloc(obj: T) -> PoolArc<T, Self, L> where Self: Sized {
|
fn alloc(obj: T) -> PoolArc<T, Self, L>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
unsafe {
|
unsafe {
|
||||||
PoolArc {
|
PoolArc {
|
||||||
ptr: (*Self::get_static_pool().cast::<Pool<T, L>>()).alloc_ptr(obj),
|
ptr: (*Self::get_static_pool().cast::<Pool<T, L>>()).alloc_ptr(obj),
|
||||||
_p: PhantomData
|
_p: PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// A multithreading lock guard that prevents another thread from freeing the underlying `T` while it is held. It does not prevent other threads from accessing the underlying `T`.
|
/// A multithreading lock guard that prevents another thread from freeing the underlying `T` while it is held. It does not prevent other threads from accessing the underlying `T`.
|
||||||
///
|
///
|
||||||
/// If the same thread that holds this guard attempts to free `T` before dropping the guard, it will deadlock.
|
/// If the same thread that holds this guard attempts to free `T` before dropping the guard, it will deadlock.
|
||||||
|
@ -158,7 +158,6 @@ impl<'a, T> Deref for PoolGuard<'a, T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// A rust-style RAII wrapper that drops and frees memory allocated from a pool automatically, the same as an `Arc<T>`. This will run the destructor of `T` in place within the pool before freeing it, correctly maintaining the invariants that the borrow checker and rust compiler expect of generic types.
|
/// A rust-style RAII wrapper that drops and frees memory allocated from a pool automatically, the same as an `Arc<T>`. This will run the destructor of `T` in place within the pool before freeing it, correctly maintaining the invariants that the borrow checker and rust compiler expect of generic types.
|
||||||
pub struct PoolArc<T, OriginPool: StaticPool<T, L>, const L: usize = DEFAULT_L> {
|
pub struct PoolArc<T, OriginPool: StaticPool<T, L>, const L: usize = DEFAULT_L> {
|
||||||
ptr: NonNull<Slot<T>>,
|
ptr: NonNull<Slot<T>>,
|
||||||
|
@ -179,9 +178,7 @@ impl<T, OriginPool: StaticPool<T, L>, const L: usize> PoolArc<T, OriginPool, L>
|
||||||
}
|
}
|
||||||
/// Returns a number that uniquely identifies this allocated `T` within this pool. No other instance of `T` may have this uid.
|
/// Returns a number that uniquely identifies this allocated `T` within this pool. No other instance of `T` may have this uid.
|
||||||
pub fn uid(&self) -> NonZeroU64 {
|
pub fn uid(&self) -> NonZeroU64 {
|
||||||
unsafe {
|
unsafe { NonZeroU64::new_unchecked(self.ptr.as_ref().uid) }
|
||||||
NonZeroU64::new_unchecked(self.ptr.as_ref().uid)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
unsafe impl<T, OriginPool: StaticPool<T, L>, const L: usize> Send for PoolArc<T, OriginPool, L> where T: Send {}
|
unsafe impl<T, OriginPool: StaticPool<T, L>, const L: usize> Send for PoolArc<T, OriginPool, L> where T: Send {}
|
||||||
|
@ -191,9 +188,7 @@ impl<T, OriginPool: StaticPool<T, L>, const L: usize> Deref for PoolArc<T, Origi
|
||||||
type Target = T;
|
type Target = T;
|
||||||
#[inline]
|
#[inline]
|
||||||
fn deref(&self) -> &Self::Target {
|
fn deref(&self) -> &Self::Target {
|
||||||
unsafe {
|
unsafe { &self.ptr.as_ref().obj.full_obj }
|
||||||
&self.ptr.as_ref().obj.full_obj
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<T, OriginPool: StaticPool<T, L>, const L: usize> Clone for PoolArc<T, OriginPool, L> {
|
impl<T, OriginPool: StaticPool<T, L>, const L: usize> Clone for PoolArc<T, OriginPool, L> {
|
||||||
|
@ -201,10 +196,7 @@ impl<T, OriginPool: StaticPool<T, L>, const L: usize> Clone for PoolArc<T, Origi
|
||||||
unsafe {
|
unsafe {
|
||||||
self.ptr.as_ref().ref_count.fetch_add(1, Ordering::Relaxed);
|
self.ptr.as_ref().ref_count.fetch_add(1, Ordering::Relaxed);
|
||||||
}
|
}
|
||||||
Self {
|
Self { ptr: self.ptr, _p: PhantomData }
|
||||||
ptr: self.ptr,
|
|
||||||
_p: PhantomData,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<T, OriginPool: StaticPool<T, L>, const L: usize> Drop for PoolArc<T, OriginPool, L> {
|
impl<T, OriginPool: StaticPool<T, L>, const L: usize> Drop for PoolArc<T, OriginPool, L> {
|
||||||
|
@ -218,7 +210,6 @@ impl<T, OriginPool: StaticPool<T, L>, const L: usize> Drop for PoolArc<T, Origin
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// A non-owning reference to a `T` allocated by a pool. This reference has the special property that the underlying `T` can be dropped from the pool while neither making this reference invalid nor leaking the memory of `T`. Instead attempts to `grab` this reference will safely return `None` if the underlying `T` has been freed by any thread.
|
/// A non-owning reference to a `T` allocated by a pool. This reference has the special property that the underlying `T` can be dropped from the pool while neither making this reference invalid nor leaking the memory of `T`. Instead attempts to `grab` this reference will safely return `None` if the underlying `T` has been freed by any thread.
|
||||||
///
|
///
|
||||||
/// Due to there thread safety and low overhead a `PoolWeakRef` implements clone and copy.
|
/// Due to there thread safety and low overhead a `PoolWeakRef` implements clone and copy.
|
||||||
|
@ -252,15 +243,11 @@ unsafe impl<T> Sync for PoolWeakRef<T> where T: Sync {}
|
||||||
|
|
||||||
impl<T> Clone for PoolWeakRef<T> {
|
impl<T> Clone for PoolWeakRef<T> {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
Self {
|
Self { uid: self.uid, ptr: self.ptr }
|
||||||
uid: self.uid,
|
|
||||||
ptr: self.ptr,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<T> Copy for PoolWeakRef<T> {}
|
impl<T> Copy for PoolWeakRef<T> {}
|
||||||
|
|
||||||
|
|
||||||
pub struct PoolArcSwap<T, OriginPool: StaticPool<T, L>, const L: usize = DEFAULT_L> {
|
pub struct PoolArcSwap<T, OriginPool: StaticPool<T, L>, const L: usize = DEFAULT_L> {
|
||||||
ptr: AtomicPtr<Slot<T>>,
|
ptr: AtomicPtr<Slot<T>>,
|
||||||
reads: AtomicU32,
|
reads: AtomicU32,
|
||||||
|
@ -289,10 +276,7 @@ impl<T, OriginPool: StaticPool<T, L>, const L: usize> PoolArcSwap<T, OriginPool,
|
||||||
}
|
}
|
||||||
|
|
||||||
mem::forget(arc);
|
mem::forget(arc);
|
||||||
PoolArc {
|
PoolArc { ptr: NonNull::new_unchecked(pre_ptr), _p: self._p }
|
||||||
ptr: NonNull::new_unchecked(pre_ptr),
|
|
||||||
_p: self._p,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -302,10 +286,7 @@ impl<T, OriginPool: StaticPool<T, L>, const L: usize> PoolArcSwap<T, OriginPool,
|
||||||
let ptr = self.ptr.load(Ordering::Relaxed);
|
let ptr = self.ptr.load(Ordering::Relaxed);
|
||||||
(*ptr).ref_count.fetch_add(1, Ordering::Relaxed);
|
(*ptr).ref_count.fetch_add(1, Ordering::Relaxed);
|
||||||
self.reads.fetch_sub(1, Ordering::Release);
|
self.reads.fetch_sub(1, Ordering::Release);
|
||||||
PoolArc {
|
PoolArc { ptr: NonNull::new_unchecked(ptr), _p: self._p }
|
||||||
ptr: NonNull::new_unchecked(ptr),
|
|
||||||
_p: self._p,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -317,15 +298,11 @@ impl<T, OriginPool: StaticPool<T, L>, const L: usize> Drop for PoolArcSwap<T, Or
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
unsafe {
|
unsafe {
|
||||||
let pre = self.ptr.load(Ordering::SeqCst);
|
let pre = self.ptr.load(Ordering::SeqCst);
|
||||||
PoolArc {
|
PoolArc { _p: self._p, ptr: NonNull::new_unchecked(pre) };
|
||||||
_p: self._p,
|
|
||||||
ptr: NonNull::new_unchecked(pre),
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub struct PoolArcSwapRw<T, OriginPool: StaticPool<T, L>, const L: usize = DEFAULT_L> {
|
pub struct PoolArcSwapRw<T, OriginPool: StaticPool<T, L>, const L: usize = DEFAULT_L> {
|
||||||
ptr: RwLock<NonNull<Slot<T>>>,
|
ptr: RwLock<NonNull<Slot<T>>>,
|
||||||
_p: PhantomData<*const OriginPool>,
|
_p: PhantomData<*const OriginPool>,
|
||||||
|
@ -333,20 +310,14 @@ pub struct PoolArcSwapRw<T, OriginPool: StaticPool<T, L>, const L: usize = DEFAU
|
||||||
|
|
||||||
impl<T, OriginPool: StaticPool<T, L>, const L: usize> PoolArcSwapRw<T, OriginPool, L> {
|
impl<T, OriginPool: StaticPool<T, L>, const L: usize> PoolArcSwapRw<T, OriginPool, L> {
|
||||||
pub fn new(arc: PoolArc<T, OriginPool, L>) -> Self {
|
pub fn new(arc: PoolArc<T, OriginPool, L>) -> Self {
|
||||||
let ret = Self {
|
let ret = Self { ptr: RwLock::new(arc.ptr), _p: arc._p };
|
||||||
ptr: RwLock::new(arc.ptr),
|
|
||||||
_p: arc._p,
|
|
||||||
};
|
|
||||||
mem::forget(arc);
|
mem::forget(arc);
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn swap(&self, arc: PoolArc<T, OriginPool, L>) -> PoolArc<T, OriginPool, L> {
|
pub fn swap(&self, arc: PoolArc<T, OriginPool, L>) -> PoolArc<T, OriginPool, L> {
|
||||||
let mut w = self.ptr.write().unwrap();
|
let mut w = self.ptr.write().unwrap();
|
||||||
let pre = PoolArc {
|
let pre = PoolArc { ptr: *w, _p: self._p };
|
||||||
ptr: *w,
|
|
||||||
_p: self._p,
|
|
||||||
};
|
|
||||||
*w = arc.ptr;
|
*w = arc.ptr;
|
||||||
mem::forget(arc);
|
mem::forget(arc);
|
||||||
pre
|
pre
|
||||||
|
@ -357,10 +328,7 @@ impl<T, OriginPool: StaticPool<T, L>, const L: usize> PoolArcSwapRw<T, OriginPoo
|
||||||
unsafe {
|
unsafe {
|
||||||
r.as_ref().ref_count.fetch_add(1, Ordering::Relaxed);
|
r.as_ref().ref_count.fetch_add(1, Ordering::Relaxed);
|
||||||
}
|
}
|
||||||
let pre = PoolArc {
|
let pre = PoolArc { ptr: *r, _p: self._p };
|
||||||
ptr: *r,
|
|
||||||
_p: self._p,
|
|
||||||
};
|
|
||||||
pre
|
pre
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -368,10 +336,7 @@ impl<T, OriginPool: StaticPool<T, L>, const L: usize> Drop for PoolArcSwapRw<T,
|
||||||
#[inline]
|
#[inline]
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
let w = self.ptr.write().unwrap();
|
let w = self.ptr.write().unwrap();
|
||||||
PoolArc {
|
PoolArc { ptr: *w, _p: self._p };
|
||||||
ptr: *w,
|
|
||||||
_p: self._p,
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
unsafe impl<T, OriginPool: StaticPool<T, L>, const L: usize> Send for PoolArcSwapRw<T, OriginPool, L> where T: Send {}
|
unsafe impl<T, OriginPool: StaticPool<T, L>, const L: usize> Send for PoolArcSwapRw<T, OriginPool, L> where T: Send {}
|
||||||
|
@ -424,9 +389,11 @@ pub use __static_pool__ as static_pool;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::{thread, sync::{Arc, atomic::AtomicU64}};
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use std::{
|
||||||
|
sync::{atomic::AtomicU64, Arc},
|
||||||
|
thread,
|
||||||
|
};
|
||||||
|
|
||||||
fn rand(r: &mut u32) -> u32 {
|
fn rand(r: &mut u32) -> u32 {
|
||||||
/* Algorithm "xor" from p. 4 of Marsaglia, "Xorshift RNGs" */
|
/* Algorithm "xor" from p. 4 of Marsaglia, "Xorshift RNGs" */
|
||||||
|
@ -461,11 +428,7 @@ mod tests {
|
||||||
impl Item {
|
impl Item {
|
||||||
fn new(r: u32, count: &'static AtomicU64) -> Item {
|
fn new(r: u32, count: &'static AtomicU64) -> Item {
|
||||||
count.fetch_add(1, Ordering::Relaxed);
|
count.fetch_add(1, Ordering::Relaxed);
|
||||||
Item {
|
Item { a: r, count, b: r }
|
||||||
a: r,
|
|
||||||
count,
|
|
||||||
b: r,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
fn check(&self, id: u32) {
|
fn check(&self, id: u32) {
|
||||||
assert_eq!(self.a, self.b);
|
assert_eq!(self.a, self.b);
|
||||||
|
@ -486,7 +449,6 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn usage() {
|
fn usage() {
|
||||||
|
|
||||||
let num1 = TestPools::alloc(1u32);
|
let num1 = TestPools::alloc(1u32);
|
||||||
let num2 = TestPools::alloc(2u32);
|
let num2 = TestPools::alloc(2u32);
|
||||||
let num3 = TestPools::alloc(3u32);
|
let num3 = TestPools::alloc(3u32);
|
||||||
|
@ -503,7 +465,6 @@ mod tests {
|
||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
fn single_thread() {
|
fn single_thread() {
|
||||||
|
|
||||||
let mut history = Vec::new();
|
let mut history = Vec::new();
|
||||||
|
|
||||||
let num1 = TestPools::alloc(1u32);
|
let num1 = TestPools::alloc(1u32);
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
* https://www.zerotier.com/
|
* https://www.zerotier.com/
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
pub mod arc_pool;
|
||||||
pub mod arrayvec;
|
pub mod arrayvec;
|
||||||
pub mod base64;
|
pub mod base64;
|
||||||
pub mod blob;
|
pub mod blob;
|
||||||
|
@ -29,8 +30,6 @@ pub mod reaper;
|
||||||
pub mod ringbuffer;
|
pub mod ringbuffer;
|
||||||
pub mod sync;
|
pub mod sync;
|
||||||
pub mod varint;
|
pub mod varint;
|
||||||
pub mod arc_pool;
|
|
||||||
|
|
||||||
|
|
||||||
#[cfg(feature = "tokio")]
|
#[cfg(feature = "tokio")]
|
||||||
pub use tokio;
|
pub use tokio;
|
||||||
|
|
Loading…
Add table
Reference in a new issue