mirror of
https://github.com/zerotier/ZeroTierOne.git
synced 2025-04-26 17:03:43 +02:00
More reorg and cruft removal.
This commit is contained in:
parent
b8e65b667c
commit
854ca07e87
4 changed files with 13 additions and 50 deletions
|
@ -3,6 +3,7 @@
|
||||||
use std::io::{Read, Write};
|
use std::io::{Read, Write};
|
||||||
use std::mem::{size_of, MaybeUninit};
|
use std::mem::{size_of, MaybeUninit};
|
||||||
|
|
||||||
|
use zerotier_utils::memory;
|
||||||
use zerotier_utils::pool::PoolFactory;
|
use zerotier_utils::pool::PoolFactory;
|
||||||
use zerotier_utils::varint;
|
use zerotier_utils::varint;
|
||||||
|
|
||||||
|
@ -37,30 +38,6 @@ fn overflow_err() -> std::io::Error {
|
||||||
impl<const L: usize> Buffer<L> {
|
impl<const L: usize> Buffer<L> {
|
||||||
pub const CAPACITY: usize = L;
|
pub const CAPACITY: usize = L;
|
||||||
|
|
||||||
#[cfg(any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64"))]
|
|
||||||
#[inline(always)]
|
|
||||||
unsafe fn read_obj_internal<T: Sized + Copy>(&self, i: usize) -> T {
|
|
||||||
*self.1.as_ptr().add(i).cast()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))]
|
|
||||||
#[inline(always)]
|
|
||||||
unsafe fn read_obj_internal<T: Sized + Copy>(&self, i: usize) -> T {
|
|
||||||
std::mem::transmute_copy(&*self.1.as_ptr().add(i).cast::<T>())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64"))]
|
|
||||||
#[inline(always)]
|
|
||||||
unsafe fn write_obj_internal<T: Sized + Copy>(&mut self, i: usize, o: T) {
|
|
||||||
*self.1.as_mut_ptr().add(i).cast::<T>() = o;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))]
|
|
||||||
#[inline(always)]
|
|
||||||
unsafe fn write_obj_internal<T: Sized + Copy>(&mut self, i: usize, o: T) {
|
|
||||||
std::ptr::copy_nonoverlapping((&o as *const T).cast::<u8>(), self.1.as_mut_ptr().add(i), size_of::<T>())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create an empty zeroed buffer.
|
/// Create an empty zeroed buffer.
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
|
@ -302,7 +279,7 @@ impl<const L: usize> Buffer<L> {
|
||||||
let end = ptr + 2;
|
let end = ptr + 2;
|
||||||
if end <= L {
|
if end <= L {
|
||||||
self.0 = end;
|
self.0 = end;
|
||||||
unsafe { self.write_obj_internal(ptr, i.to_be()) };
|
memory::store_raw(i.to_be(), &mut self.1[ptr..]);
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err(overflow_err())
|
Err(overflow_err())
|
||||||
|
@ -315,7 +292,7 @@ impl<const L: usize> Buffer<L> {
|
||||||
let end = ptr + 4;
|
let end = ptr + 4;
|
||||||
if end <= L {
|
if end <= L {
|
||||||
self.0 = end;
|
self.0 = end;
|
||||||
unsafe { self.write_obj_internal(ptr, i.to_be()) };
|
memory::store_raw(i.to_be(), &mut self.1[ptr..]);
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err(overflow_err())
|
Err(overflow_err())
|
||||||
|
@ -328,7 +305,7 @@ impl<const L: usize> Buffer<L> {
|
||||||
let end = ptr + 8;
|
let end = ptr + 8;
|
||||||
if end <= L {
|
if end <= L {
|
||||||
self.0 = end;
|
self.0 = end;
|
||||||
unsafe { self.write_obj_internal(ptr, i.to_be()) };
|
memory::store_raw(i.to_be(), &mut self.1[ptr..]);
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err(overflow_err())
|
Err(overflow_err())
|
||||||
|
@ -341,7 +318,7 @@ impl<const L: usize> Buffer<L> {
|
||||||
let end = ptr + 8;
|
let end = ptr + 8;
|
||||||
if end <= L {
|
if end <= L {
|
||||||
self.0 = end;
|
self.0 = end;
|
||||||
unsafe { self.write_obj_internal(ptr, i.to_le()) };
|
memory::store_raw(i.to_be(), &mut self.1[ptr..]);
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err(overflow_err())
|
Err(overflow_err())
|
||||||
|
@ -398,7 +375,7 @@ impl<const L: usize> Buffer<L> {
|
||||||
let end = ptr + 2;
|
let end = ptr + 2;
|
||||||
debug_assert!(end <= L);
|
debug_assert!(end <= L);
|
||||||
if end <= self.0 {
|
if end <= self.0 {
|
||||||
Ok(u16::from_be(unsafe { self.read_obj_internal(ptr) }))
|
Ok(u16::from_be(memory::load_raw(&self.1[ptr..])))
|
||||||
} else {
|
} else {
|
||||||
Err(overflow_err())
|
Err(overflow_err())
|
||||||
}
|
}
|
||||||
|
@ -409,7 +386,7 @@ impl<const L: usize> Buffer<L> {
|
||||||
let end = ptr + 4;
|
let end = ptr + 4;
|
||||||
debug_assert!(end <= L);
|
debug_assert!(end <= L);
|
||||||
if end <= self.0 {
|
if end <= self.0 {
|
||||||
Ok(u32::from_be(unsafe { self.read_obj_internal(ptr) }))
|
Ok(u32::from_be(memory::load_raw(&self.1[ptr..])))
|
||||||
} else {
|
} else {
|
||||||
Err(overflow_err())
|
Err(overflow_err())
|
||||||
}
|
}
|
||||||
|
@ -420,7 +397,7 @@ impl<const L: usize> Buffer<L> {
|
||||||
let end = ptr + 8;
|
let end = ptr + 8;
|
||||||
debug_assert!(end <= L);
|
debug_assert!(end <= L);
|
||||||
if end <= self.0 {
|
if end <= self.0 {
|
||||||
Ok(u64::from_be(unsafe { self.read_obj_internal(ptr) }))
|
Ok(u64::from_be(memory::load_raw(&self.1[ptr..])))
|
||||||
} else {
|
} else {
|
||||||
Err(overflow_err())
|
Err(overflow_err())
|
||||||
}
|
}
|
||||||
|
@ -499,7 +476,7 @@ impl<const L: usize> Buffer<L> {
|
||||||
debug_assert!(end <= L);
|
debug_assert!(end <= L);
|
||||||
if end <= self.0 {
|
if end <= self.0 {
|
||||||
*cursor = end;
|
*cursor = end;
|
||||||
Ok(u16::from_be(unsafe { self.read_obj_internal(ptr) }))
|
Ok(u16::from_be(memory::load_raw(&self.1[ptr..])))
|
||||||
} else {
|
} else {
|
||||||
Err(overflow_err())
|
Err(overflow_err())
|
||||||
}
|
}
|
||||||
|
@ -512,7 +489,7 @@ impl<const L: usize> Buffer<L> {
|
||||||
debug_assert!(end <= L);
|
debug_assert!(end <= L);
|
||||||
if end <= self.0 {
|
if end <= self.0 {
|
||||||
*cursor = end;
|
*cursor = end;
|
||||||
Ok(u32::from_be(unsafe { self.read_obj_internal(ptr) }))
|
Ok(u32::from_be(memory::load_raw(&self.1[ptr..])))
|
||||||
} else {
|
} else {
|
||||||
Err(overflow_err())
|
Err(overflow_err())
|
||||||
}
|
}
|
||||||
|
@ -525,20 +502,7 @@ impl<const L: usize> Buffer<L> {
|
||||||
debug_assert!(end <= L);
|
debug_assert!(end <= L);
|
||||||
if end <= self.0 {
|
if end <= self.0 {
|
||||||
*cursor = end;
|
*cursor = end;
|
||||||
Ok(u64::from_be(unsafe { self.read_obj_internal(ptr) }))
|
Ok(u64::from_be(memory::load_raw(&self.1[ptr..])))
|
||||||
} else {
|
|
||||||
Err(overflow_err())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline(always)]
|
|
||||||
pub fn read_u64_le(&self, cursor: &mut usize) -> std::io::Result<u64> {
|
|
||||||
let ptr = *cursor;
|
|
||||||
let end = ptr + 8;
|
|
||||||
debug_assert!(end <= L);
|
|
||||||
if end <= self.0 {
|
|
||||||
*cursor = end;
|
|
||||||
Ok(u64::from_le(unsafe { self.read_obj_internal(ptr) }))
|
|
||||||
} else {
|
} else {
|
||||||
Err(overflow_err())
|
Err(overflow_err())
|
||||||
}
|
}
|
||||||
|
|
|
@ -226,8 +226,8 @@ mod tests {
|
||||||
|
|
||||||
type TypeMap = HashMap<String, Type>;
|
type TypeMap = HashMap<String, Type>;
|
||||||
|
|
||||||
|
use super::{Dictionary, BOOL_TRUTH};
|
||||||
use crate::util::testutil::randstring;
|
use crate::util::testutil::randstring;
|
||||||
use crate::vl1::dictionary::{Dictionary, BOOL_TRUTH};
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
fn make_dictionary() -> (Dictionary, TypeMap) {
|
fn make_dictionary() -> (Dictionary, TypeMap) {
|
|
@ -1,6 +1,7 @@
|
||||||
// (c) 2020-2022 ZeroTier, Inc. -- currently propritery pending actual release and licensing. See LICENSE.md.
|
// (c) 2020-2022 ZeroTier, Inc. -- currently propritery pending actual release and licensing. See LICENSE.md.
|
||||||
|
|
||||||
pub mod buffer;
|
pub mod buffer;
|
||||||
|
pub mod dictionary;
|
||||||
pub(crate) mod gate;
|
pub(crate) mod gate;
|
||||||
pub mod marshalable;
|
pub mod marshalable;
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
// (c) 2020-2022 ZeroTier, Inc. -- currently propritery pending actual release and licensing. See LICENSE.md.
|
// (c) 2020-2022 ZeroTier, Inc. -- currently propritery pending actual release and licensing. See LICENSE.md.
|
||||||
|
|
||||||
mod address;
|
mod address;
|
||||||
mod dictionary;
|
|
||||||
mod endpoint;
|
mod endpoint;
|
||||||
mod fragmentedpacket;
|
mod fragmentedpacket;
|
||||||
mod identity;
|
mod identity;
|
||||||
|
@ -19,7 +18,6 @@ pub(crate) mod protocol;
|
||||||
pub mod inetaddress;
|
pub mod inetaddress;
|
||||||
|
|
||||||
pub use address::Address;
|
pub use address::Address;
|
||||||
pub use dictionary::Dictionary;
|
|
||||||
pub use endpoint::Endpoint;
|
pub use endpoint::Endpoint;
|
||||||
pub use identity::*;
|
pub use identity::*;
|
||||||
pub use inetaddress::InetAddress;
|
pub use inetaddress::InetAddress;
|
||||||
|
|
Loading…
Add table
Reference in a new issue