This commit is contained in:
Adam Ierymenko 2022-09-28 11:24:42 -04:00
parent 732547400b
commit 433c6058d2
No known key found for this signature in database
GPG key ID: C8877CF2D7A5D7F3
4 changed files with 50 additions and 34 deletions

View file

@ -16,3 +16,4 @@ async-trait = "^0"
parking_lot = { version = "^0", features = [], default-features = false } parking_lot = { version = "^0", features = [], default-features = false }
serde = { version = "^1", features = ["derive"], default-features = false } serde = { version = "^1", features = ["derive"], default-features = false }
serde_json = { version = "^1", features = ["std"], default-features = false } serde_json = { version = "^1", features = ["std"], default-features = false }
clap = { version = "^3", features = ["std", "suggestions"], default-features = false }

View file

@ -18,11 +18,11 @@ use crate::model::*;
pub struct FileDatabase { pub struct FileDatabase {
base: PathBuf, base: PathBuf,
live: PathBuf, cache: PathBuf,
} }
fn network_path(base: &PathBuf, network_id: NetworkId) -> PathBuf { fn network_path(base: &PathBuf, network_id: NetworkId) -> PathBuf {
base.join(network_id.to_string()).join("config.json") base.join(network_id.to_string()).join(format!("n{}.json", network_id.to_string()))
} }
fn member_path(base: &PathBuf, network_id: NetworkId, member_id: Address) -> PathBuf { fn member_path(base: &PathBuf, network_id: NetworkId, member_id: Address) -> PathBuf {
@ -32,32 +32,30 @@ fn member_path(base: &PathBuf, network_id: NetworkId, member_id: Address) -> Pat
impl FileDatabase { impl FileDatabase {
pub async fn new<P: AsRef<Path>>(base_path: P) -> Arc<Self> { pub async fn new<P: AsRef<Path>>(base_path: P) -> Arc<Self> {
let base: PathBuf = base_path.as_ref().into(); let base: PathBuf = base_path.as_ref().into();
let live: PathBuf = base_path.as_ref().join("live"); let cache: PathBuf = base_path.as_ref().join("cache");
let _ = fs::create_dir_all(&live).await; let _ = fs::create_dir_all(&cache).await;
Arc::new(Self { base, live }) Arc::new(Self { base, cache })
} }
async fn merge_with_live<O: Serialize + DeserializeOwned>(&self, live_path: PathBuf, changes: O) -> O { /// Merge an object with its cached instance and save the result to the 'cache' path.
if let Ok(changes) = serde_json::to_value(&changes) { async fn merge_with_cache<O: Serialize + DeserializeOwned>(
if let Ok(old_raw_json) = fs::read(&live_path).await { &self,
if let Ok(mut patched) = serde_json::from_slice::<serde_json::Value>(old_raw_json.as_slice()) { object_path_in_cache: PathBuf,
changes: O,
) -> Result<O, Box<dyn Error>> {
let changes = serde_json::to_value(&changes)?;
let cached_json = fs::read(&object_path_in_cache).await?;
let mut patched = serde_json::from_slice::<serde_json::Value>(cached_json.as_slice())?;
json_patch(&mut patched, &changes, 64); json_patch(&mut patched, &changes, 64);
if let Ok(patched) = serde_json::from_value::<O>(patched) { let patched = serde_json::from_value::<O>(patched)?;
if let Ok(patched_json) = serde_json::to_vec(&patched) {
if let Ok(to_replace) = fs::read(&live_path).await { let patched_json = serde_json::to_vec(&patched)?;
if to_replace.as_slice().eq(patched_json.as_slice()) { if !cached_json.as_slice().eq(patched_json.as_slice()) {
return patched; let _ = fs::write(object_path_in_cache, patched_json.as_slice()).await;
} }
}
let _ = fs::write(live_path, patched_json.as_slice()).await; return Ok(patched);
return patched;
}
}
}
}
}
// TODO: report error
return changes;
} }
} }
@ -70,14 +68,14 @@ impl Database for FileDatabase {
if let Ok(raw) = r { if let Ok(raw) = r {
let r = serde_json::from_slice::<Network>(raw.as_slice()); let r = serde_json::from_slice::<Network>(raw.as_slice());
if let Ok(network) = r { if let Ok(network) = r {
return Ok(Some(self.merge_with_live(network_path(&self.live, id), network).await)); return Ok(Some(self.merge_with_cache(network_path(&self.cache, id), network).await?));
} else { } else {
return Err(Box::new(r.err().unwrap())); return Err(Box::new(r.err().unwrap()));
} }
} else { } else {
let e = r.unwrap_err(); let e = r.unwrap_err();
if matches!(e.kind(), ErrorKind::NotFound) { if matches!(e.kind(), ErrorKind::NotFound) {
let _ = fs::remove_dir_all(self.live.join(id.to_string())).await; let _ = fs::remove_dir_all(self.cache.join(id.to_string())).await;
return Ok(None); return Ok(None);
} else { } else {
return Err(Box::new(e)); return Err(Box::new(e));
@ -87,14 +85,14 @@ impl Database for FileDatabase {
async fn save_network(&self, obj: &Network) -> Result<(), Self::Error> { async fn save_network(&self, obj: &Network) -> Result<(), Self::Error> {
let _ = fs::create_dir_all(self.base.join(obj.id.to_string())).await; let _ = fs::create_dir_all(self.base.join(obj.id.to_string())).await;
let _ = fs::create_dir_all(self.live.join(obj.id.to_string())).await; let _ = fs::create_dir_all(self.cache.join(obj.id.to_string())).await;
let base_network_path = network_path(&self.base, obj.id); let base_network_path = network_path(&self.base, obj.id);
if !fs::metadata(&base_network_path).await.is_ok() { if !fs::metadata(&base_network_path).await.is_ok() {
fs::write(base_network_path, to_json_pretty(obj).as_bytes()).await?; fs::write(base_network_path, to_json_pretty(obj).as_bytes()).await?;
} }
fs::write(network_path(&self.live, obj.id), serde_json::to_vec(obj)?.as_slice()).await?; fs::write(network_path(&self.cache, obj.id), serde_json::to_vec(obj)?.as_slice()).await?;
Ok(()) Ok(())
} }
@ -125,7 +123,7 @@ impl Database for FileDatabase {
let r = serde_json::from_slice::<Member>(raw.as_slice()); let r = serde_json::from_slice::<Member>(raw.as_slice());
if let Ok(member) = r { if let Ok(member) = r {
return Ok(Some( return Ok(Some(
self.merge_with_live(member_path(&self.live, network_id, node_id), member).await, self.merge_with_cache(member_path(&self.cache, network_id, node_id), member).await?,
)); ));
} else { } else {
return Err(Box::new(r.err().unwrap())); return Err(Box::new(r.err().unwrap()));
@ -133,7 +131,7 @@ impl Database for FileDatabase {
} else { } else {
let e = r.unwrap_err(); let e = r.unwrap_err();
if matches!(e.kind(), ErrorKind::NotFound) { if matches!(e.kind(), ErrorKind::NotFound) {
let _ = fs::remove_file(member_path(&self.live, network_id, node_id)).await; let _ = fs::remove_file(member_path(&self.cache, network_id, node_id)).await;
return Ok(None); return Ok(None);
} else { } else {
return Err(Box::new(e)); return Err(Box::new(e));
@ -148,7 +146,7 @@ impl Database for FileDatabase {
} }
fs::write( fs::write(
member_path(&self.live, obj.network_id, obj.node_id), member_path(&self.cache, obj.network_id, obj.node_id),
serde_json::to_vec(obj)?.as_slice(), serde_json::to_vec(obj)?.as_slice(),
) )
.await?; .await?;
@ -160,3 +158,12 @@ impl Database for FileDatabase {
Ok(()) Ok(())
} }
} }
#[cfg(test)]
mod tests {
#[allow(unused_imports)]
use super::*;
#[test]
fn test_db() {}
}

View file

@ -8,6 +8,7 @@ use serde::{Deserialize, Serialize};
use zerotier_network_hypervisor::vl1::{Address, Endpoint, Identity, InetAddress}; use zerotier_network_hypervisor::vl1::{Address, Endpoint, Identity, InetAddress};
use zerotier_network_hypervisor::vl2::NetworkId; use zerotier_network_hypervisor::vl2::NetworkId;
/// Static string included in JSON-serializable objects to indicate their object type through the API.
#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] #[derive(Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum ObjectType { pub enum ObjectType {
#[serde(rename = "network")] #[serde(rename = "network")]
@ -83,7 +84,7 @@ pub struct Network {
} }
impl Hash for Network { impl Hash for Network {
#[inline(always)] #[inline]
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.id.hash(state) self.id.hash(state)
} }
@ -138,6 +139,14 @@ pub struct Member {
pub objtype: ObjectType, pub objtype: ObjectType,
} }
impl Hash for Member {
#[inline]
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.node_id.hash(state);
self.network_id.hash(state);
}
}
/// A complete network with all member configuration information for import/export or blob storage. /// A complete network with all member configuration information for import/export or blob storage.
#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] #[derive(Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct NetworkExport { pub struct NetworkExport {

View file

@ -18,7 +18,6 @@ serde = { version = "^1", features = ["derive"], default-features = false }
serde_json = { version = "^1", features = ["std"], default-features = false } serde_json = { version = "^1", features = ["std"], default-features = false }
parking_lot = { version = "^0", features = [], default-features = false } parking_lot = { version = "^0", features = [], default-features = false }
clap = { version = "^3", features = ["std", "suggestions"], default-features = false } clap = { version = "^3", features = ["std", "suggestions"], default-features = false }
log = "^0"
[target."cfg(windows)".dependencies] [target."cfg(windows)".dependencies]
winapi = { version = "^0", features = ["handleapi", "ws2ipdef", "ws2tcpip"] } winapi = { version = "^0", features = ["handleapi", "ws2ipdef", "ws2tcpip"] }