export diffpatch

This commit is contained in:
Aiden McClelland
2021-05-21 13:35:48 -06:00
committed by Aiden McClelland
parent 9ff57e33d3
commit d3c1c3f9d9
5 changed files with 113 additions and 46 deletions

View File

@@ -1,7 +1,7 @@
use proc_macro2::TokenStream; use proc_macro2::TokenStream;
use quote::quote; use quote::quote;
use syn::{ use syn::{
Data, DataEnum, DataStruct, DeriveInput, Fields, Ident, Lit, LitStr, MetaNameValue, Type, Data, DataEnum, DataStruct, DeriveInput, Fields, Ident, Lit, LitStr, MetaNameValue, Path, Type,
}; };
pub fn build_model(item: &DeriveInput) -> TokenStream { pub fn build_model(item: &DeriveInput) -> TokenStream {
@@ -43,7 +43,7 @@ fn build_model_struct(
let model_vis = &base.vis; let model_vis = &base.vis;
let mut child_fn_name: Vec<Ident> = Vec::new(); let mut child_fn_name: Vec<Ident> = Vec::new();
let mut child_model: Vec<Type> = Vec::new(); let mut child_model: Vec<Type> = Vec::new();
let mut child_path: Vec<LitStr> = Vec::new(); let mut child_path: Vec<Option<LitStr>> = Vec::new();
let serde_rename_all = base let serde_rename_all = base
.attrs .attrs
.iter() .iter()
@@ -81,6 +81,15 @@ fn build_model_struct(
} else { } else {
child_model.push(syn::parse2(quote! { patch_db::Model<#ty> }).unwrap()); child_model.push(syn::parse2(quote! { patch_db::Model<#ty> }).unwrap());
} }
if field
.attrs
.iter()
.filter(|attr| attr.path.is_ident("serde"))
.filter_map(|attr| syn::parse2::<Path>(attr.tokens.clone()).ok())
.any(|path| path.is_ident("flatten"))
{
child_path.push(None);
} else {
let serde_rename = field let serde_rename = field
.attrs .attrs
.iter() .iter()
@@ -91,37 +100,47 @@ fn build_model_struct(
Lit::Str(s) => Some(s), Lit::Str(s) => Some(s),
_ => None, _ => None,
}); });
child_path.push(Some(
match (serde_rename, serde_rename_all.as_ref().map(|s| s.as_str())) { match (serde_rename, serde_rename_all.as_ref().map(|s| s.as_str())) {
(Some(a), _) => child_path.push(a), (Some(a), _) => a,
(None, Some("lowercase")) => child_path.push(LitStr::new( (None, Some("lowercase")) => LitStr::new(
&heck::CamelCase::to_camel_case(ident.to_string().as_str()).to_lowercase(), &heck::CamelCase::to_camel_case(ident.to_string().as_str())
.to_lowercase(),
ident.span(), ident.span(),
)), ),
(None, Some("UPPERCASE")) => child_path.push(LitStr::new( (None, Some("UPPERCASE")) => LitStr::new(
&heck::CamelCase::to_camel_case(ident.to_string().as_str()).to_uppercase(), &heck::CamelCase::to_camel_case(ident.to_string().as_str())
.to_uppercase(),
ident.span(), ident.span(),
)), ),
(None, Some("PascalCase")) => child_path.push(LitStr::new( (None, Some("PascalCase")) => LitStr::new(
&heck::CamelCase::to_camel_case(ident.to_string().as_str()), &heck::CamelCase::to_camel_case(ident.to_string().as_str()),
ident.span(), ident.span(),
)), ),
(None, Some("camelCase")) => child_path.push(LitStr::new( (None, Some("camelCase")) => LitStr::new(
&heck::MixedCase::to_mixed_case(ident.to_string().as_str()), &heck::MixedCase::to_mixed_case(ident.to_string().as_str()),
ident.span(), ident.span(),
)), ),
(None, Some("SCREAMING_SNAKE_CASE")) => child_path.push(LitStr::new( (None, Some("SCREAMING_SNAKE_CASE")) => LitStr::new(
&heck::ShoutySnakeCase::to_shouty_snake_case(ident.to_string().as_str()), &heck::ShoutySnakeCase::to_shouty_snake_case(
ident.to_string().as_str(),
),
ident.span(), ident.span(),
)), ),
(None, Some("kebab-case")) => child_path.push(LitStr::new( (None, Some("kebab-case")) => LitStr::new(
&heck::KebabCase::to_kebab_case(ident.to_string().as_str()), &heck::KebabCase::to_kebab_case(ident.to_string().as_str()),
ident.span(), ident.span(),
)), ),
(None, Some("SCREAMING-KEBAB-CASE")) => child_path.push(LitStr::new( (None, Some("SCREAMING-KEBAB-CASE")) => LitStr::new(
&heck::ShoutyKebabCase::to_shouty_kebab_case(ident.to_string().as_str()), &heck::ShoutyKebabCase::to_shouty_kebab_case(
ident.to_string().as_str(),
),
ident.span(), ident.span(),
)), ),
_ => child_path.push(LitStr::new(&ident.to_string(), ident.span())), _ => LitStr::new(&ident.to_string(), ident.span()),
},
));
} }
} }
} }
@@ -222,15 +241,27 @@ fn build_model_struct(
child_model.push(syn::parse2(quote! { patch_db::Model<#ty> }).unwrap()); child_model.push(syn::parse2(quote! { patch_db::Model<#ty> }).unwrap());
} }
// TODO: serde rename for tuple structs? // TODO: serde rename for tuple structs?
child_path.push(LitStr::new( // TODO: serde flatten for tuple structs?
child_path.push(Some(LitStr::new(
&format!("{}", i), &format!("{}", i),
proc_macro2::Span::call_site(), proc_macro2::Span::call_site(),
)); )));
} }
} }
} }
Fields::Unit => (), Fields::Unit => (),
} }
let child_path_expr = child_path.iter().map(|child_path| {
if let Some(child_path) = child_path {
quote! {
self.0.child(#child_path).into()
}
} else {
quote! {
self.0.into()
}
}
});
quote! { quote! {
#[derive(Debug)] #[derive(Debug)]
#model_vis struct #model_name(patch_db::Model<#base_name>); #model_vis struct #model_name(patch_db::Model<#base_name>);
@@ -248,7 +279,7 @@ fn build_model_struct(
impl #model_name { impl #model_name {
#( #(
pub fn #child_fn_name(self) -> #child_model { pub fn #child_fn_name(self) -> #child_model {
self.0.child(#child_path).into() #child_path_expr
} }
)* )*
} }

View File

@@ -23,7 +23,7 @@ pub use locker::{LockType, Locker};
pub use model::{ pub use model::{
BoxModel, HasModel, Map, MapModel, Model, ModelData, ModelDataMut, OptionModel, VecModel, BoxModel, HasModel, Map, MapModel, Model, ModelData, ModelDataMut, OptionModel, VecModel,
}; };
pub use patch::Revision; pub use patch::{Revision, DiffPatch};
pub use patch_db_macro::HasModel; pub use patch_db_macro::HasModel;
pub use store::{PatchDb, Store}; pub use store::{PatchDb, Store};
pub use transaction::Transaction; pub use transaction::Transaction;

View File

@@ -1,6 +1,7 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use futures::{future::BoxFuture, FutureExt};
use json_ptr::{JsonPointer, SegList}; use json_ptr::{JsonPointer, SegList};
use qutex_2::{QrwLock, ReadGuard, WriteGuard}; use qutex_2::{QrwLock, ReadGuard, WriteGuard};
use tokio::sync::Mutex; use tokio::sync::Mutex;
@@ -67,6 +68,15 @@ impl Locker {
pub fn new() -> Self { pub fn new() -> Self {
Locker(QrwLock::new(HashMap::new())) Locker(QrwLock::new(HashMap::new()))
} }
fn lock_root_read<'a>(guard: &'a ReadGuard<HashMap<String, Locker>>) -> BoxFuture<'a, ()> {
async move {
for (_, v) in &**guard {
let g = v.0.clone().read().await.unwrap();
Self::lock_root_read(&g).await;
}
}
.boxed()
}
pub async fn lock_read<S: AsRef<str>, V: SegList>( pub async fn lock_read<S: AsRef<str>, V: SegList>(
&self, &self,
ptr: &JsonPointer<S, V>, ptr: &JsonPointer<S, V>,
@@ -83,7 +93,9 @@ impl Locker {
}; };
lock = Some(new_lock); lock = Some(new_lock);
} }
lock.unwrap() let res = lock.unwrap();
Self::lock_root_read(&res);
res
} }
pub(crate) async fn add_read_lock<S: AsRef<str> + Clone, V: SegList + Clone>( pub(crate) async fn add_read_lock<S: AsRef<str> + Clone, V: SegList + Clone>(
&self, &self,
@@ -105,6 +117,15 @@ impl Locker {
LockerGuard::Read(self.lock_read(ptr).await.into()), LockerGuard::Read(self.lock_read(ptr).await.into()),
)); ));
} }
fn lock_root_write<'a>(guard: &'a WriteGuard<HashMap<String, Locker>>) -> BoxFuture<'a, ()> {
async move {
for (_, v) in &**guard {
let g = v.0.clone().write().await.unwrap();
Self::lock_root_write(&g).await;
}
}
.boxed()
}
pub async fn lock_write<S: AsRef<str>, V: SegList>( pub async fn lock_write<S: AsRef<str>, V: SegList>(
&self, &self,
ptr: &JsonPointer<S, V>, ptr: &JsonPointer<S, V>,
@@ -119,7 +140,9 @@ impl Locker {
}; };
lock = new_lock; lock = new_lock;
} }
lock let res = lock;
Self::lock_root_write(&res);
res
} }
pub(crate) async fn add_write_lock<S: AsRef<str> + Clone, V: SegList + Clone>( pub(crate) async fn add_write_lock<S: AsRef<str> + Clone, V: SegList + Clone>(
&self, &self,

View File

@@ -11,6 +11,7 @@ use serde_json::Value;
use crate::Error; use crate::Error;
use crate::{locker::LockType, DbHandle}; use crate::{locker::LockType, DbHandle};
#[derive(Debug)]
pub struct ModelData<T: Serialize + for<'de> Deserialize<'de>>(T); pub struct ModelData<T: Serialize + for<'de> Deserialize<'de>>(T);
impl<T: Serialize + for<'de> Deserialize<'de>> Deref for ModelData<T> { impl<T: Serialize + for<'de> Deserialize<'de>> Deref for ModelData<T> {
type Target = T; type Target = T;
@@ -18,7 +19,13 @@ impl<T: Serialize + for<'de> Deserialize<'de>> Deref for ModelData<T> {
&self.0 &self.0
} }
} }
impl<T: Serialize + for<'de> Deserialize<'de>> ModelData<T> {
pub fn to_owned(self) -> T {
self.0
}
}
#[derive(Debug)]
pub struct ModelDataMut<T: Serialize + for<'de> Deserialize<'de>> { pub struct ModelDataMut<T: Serialize + for<'de> Deserialize<'de>> {
original: Value, original: Value,
current: T, current: T,

View File

@@ -35,6 +35,12 @@ impl Transaction<&mut PatchDbHandle> {
drop(store); drop(store);
Ok(rev) Ok(rev)
} }
pub async fn abort(mut self) -> Result<DiffPatch, Error> {
let store_lock = self.parent.store();
let _store = store_lock.read().await;
self.rebase()?;
Ok(self.updates)
}
} }
impl<Parent: DbHandle + Send + Sync> Transaction<Parent> { impl<Parent: DbHandle + Send + Sync> Transaction<Parent> {
pub async fn save(mut self) -> Result<(), Error> { pub async fn save(mut self) -> Result<(), Error> {