diff --git a/Cargo.toml b/Cargo.toml index 7a29538..17daced 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,2 +1,2 @@ [workspace] -members = ["evmap", "evmap-derive", "left-right"] +members = ["evmap", "left-right"] diff --git a/azure-pipelines.yml b/azure-pipelines.yml index ae1ada8..8f35643 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -12,18 +12,6 @@ stages: minrust: 1.40.0 codecov_token: $(CODECOV_TOKEN_SECRET) dir: "evmap" - - job: derive - displayName: "Test evmap-derive" - pool: - vmImage: ubuntu-latest - steps: - - template: install-rust.yml@templates - parameters: - components: - - rust-src - - script: cargo test - displayName: cargo test - workingDirectory: "evmap-derive" - job: benchmark displayName: "Check that benchmark compiles" pool: diff --git a/evmap-derive/Cargo.toml b/evmap-derive/Cargo.toml deleted file mode 100644 index cdf4872..0000000 --- a/evmap-derive/Cargo.toml +++ /dev/null @@ -1,29 +0,0 @@ -[package] -name = "evmap-derive" -version = "0.2.0" -authors = ["Jon Gjengset "] -edition = "2018" -license = "MIT OR Apache-2.0" - -description = "Derive macro for evmap::ShallowCopy" -repository = "https://github.com/jonhoo/rust-evmap.git" - -keywords = ["derive","evmap"] -categories = [] - -[badges] -azure-devops = { project = "jonhoo/jonhoo", pipeline = "evmap", build = "8" } -codecov = { repository = "jonhoo/rust-evmap", branch = "master", service = "github" } -maintenance = { status = "passively-maintained" } - -[lib] -proc-macro = true - -[dependencies] -syn = "1.0.16" -quote = "1.0.2" -proc-macro2 = "1.0.9" - -[dev-dependencies] -evmap = { path = "../evmap", version = "11.0.0-alpha.2" } -trybuild = "1.0.24" diff --git a/evmap-derive/src/lib.rs b/evmap-derive/src/lib.rs deleted file mode 100644 index e90f293..0000000 --- a/evmap-derive/src/lib.rs +++ /dev/null @@ -1,176 +0,0 @@ -//! This crate provides procedural derive macros to simplify the usage of `evmap`. -//! -//! Currently, only `#[derive(ShallowCopy)]` is supported; see below. -#![warn(missing_docs, rust_2018_idioms, broken_intra_doc_links)] - -#[allow(unused_extern_crates)] -extern crate proc_macro; - -use proc_macro2::{Ident, TokenStream}; -use quote::{format_ident, quote, quote_spanned}; -use syn::spanned::Spanned; -use syn::{ - parse_macro_input, parse_quote, Data, DeriveInput, Fields, GenericParam, Generics, Index, -}; - -/// Implementation for `#[derive(ShallowCopy)]` -/// -/// evmap provides the [`ShallowCopy`](evmap::shallow_copy::ShallowCopy) trait, which allows you to -/// cheaply alias types that don't otherwise implement `Copy`. Basic implementations are provided -/// for common types such as `String` and `Vec`, but it must be implemented manually for structs -/// using these types. -/// -/// This macro attempts to simplify this task. It only works on types whose members all implement -/// `ShallowCopy`. If this is not possible, consider using -/// [`CopyValue`](evmap::shallow_copy::CopyValue), `Box`, or `Arc` instead. -/// -/// ## Usage example -/// ``` -/// # use evmap_derive::ShallowCopy; -/// #[derive(ShallowCopy)] -/// struct Thing { field: i32 } -/// -/// #[derive(ShallowCopy)] -/// struct Generic { field: T } -/// -/// #[derive(ShallowCopy)] -/// enum Things { One(Thing), Two(Generic) } -/// ``` -/// -/// ## Generated implementations -/// The generated implementation calls -/// [`shallow_copy`](evmap::shallow_copy::ShallowCopy::shallow_copy) on all the members of the -/// type, and lifts the `ManuallyDrop` wrappers to the top-level return type. -/// -/// For generic types, the derive adds `ShallowCopy` bounds to all the type parameters. -/// -/// For instance, for the following code... -/// ``` -/// # use evmap_derive::ShallowCopy; -/// #[derive(ShallowCopy)] -/// struct Generic { field: T } -/// ``` -/// ...the derive generates... -/// ``` -/// # use evmap::shallow_copy::ShallowCopy; -/// # use std::mem::ManuallyDrop; -/// # struct Generic { field: T } -/// impl ShallowCopy for Generic { -/// unsafe fn shallow_copy(&self) -> ManuallyDrop { -/// ManuallyDrop::new(Self { -/// field: ManuallyDrop::into_inner(ShallowCopy::shallow_copy(&self.field)) -/// }) -/// } -/// } -/// ``` -#[proc_macro_derive(ShallowCopy)] -pub fn derive_shallow_copy(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input = parse_macro_input!(input as DeriveInput); - let name = input.ident; - let generics = add_trait_bounds(input.generics); - let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); - let copy = fields(&input.data, &name); - proc_macro::TokenStream::from(quote! { - impl #impl_generics evmap::shallow_copy::ShallowCopy for #name #ty_generics #where_clause { - unsafe fn shallow_copy(&self) -> std::mem::ManuallyDrop { - #copy - } - } - }) -} - -fn add_trait_bounds(mut generics: Generics) -> Generics { - for param in &mut generics.params { - if let GenericParam::Type(ref mut type_param) = *param { - type_param - .bounds - .push(parse_quote!(evmap::shallow_copy::ShallowCopy)); - } - } - generics -} - -fn fields(data: &Data, type_name: &Ident) -> TokenStream { - match data { - Data::Struct(data) => match &data.fields { - Fields::Named(fields) => { - let recurse = fields.named.iter().map(|f| { - let name = &f.ident; - quote_spanned! {f.span()=> - #name: std::mem::ManuallyDrop::into_inner( - evmap::shallow_copy::ShallowCopy::shallow_copy(&self.#name) - ) - } - }); - quote! { - std::mem::ManuallyDrop::new(Self { #(#recurse,)* }) - } - } - Fields::Unnamed(fields) => { - let recurse = fields.unnamed.iter().enumerate().map(|(i, f)| { - let index = Index::from(i); - quote_spanned! {f.span()=> - std::mem::ManuallyDrop::into_inner( - evmap::shallow_copy::ShallowCopy::shallow_copy(&self.#index) - ) - } - }); - quote! { - std::mem::ManuallyDrop::new(#type_name(#(#recurse,)*)) - } - } - Fields::Unit => quote!(std::mem::ManuallyDrop::new(#type_name)), - }, - Data::Enum(data) => { - let recurse = data.variants.iter().map(|f| { - let (names, fields) = match &f.fields { - Fields::Named(fields) => { - let field_names = f.fields.iter().map(|field| { - let ident = field.ident.as_ref().unwrap(); - quote_spanned! { - field.span()=> #ident - } - }); - let recurse = fields.named.iter().map(|f| { - let name = f.ident.as_ref().unwrap(); - quote_spanned! {f.span()=> - #name: std::mem::ManuallyDrop::into_inner( - evmap::shallow_copy::ShallowCopy::shallow_copy(#name) - ) - } - }); - (quote! { {#(#field_names,)*} }, quote! { { #(#recurse,)* } }) - } - Fields::Unnamed(fields) => { - let field_names = f.fields.iter().enumerate().map(|(i, field)| { - let ident = format_ident!("x{}", i); - quote_spanned! { - field.span()=> #ident - } - }); - let recurse = fields.unnamed.iter().enumerate().map(|(i, f)| { - let ident = format_ident!("x{}", i); - quote_spanned! {f.span()=> - std::mem::ManuallyDrop::into_inner( - evmap::shallow_copy::ShallowCopy::shallow_copy(#ident) - ) - } - }); - (quote! { (#(#field_names,)*) }, quote! { (#(#recurse,)*) }) - } - Fields::Unit => (quote!(), quote!()), - }; - let name = &f.ident; - quote_spanned! {f.span()=> - #type_name::#name#names => std::mem::ManuallyDrop::new(#type_name::#name#fields) - } - }); - quote! { - match self { - #(#recurse,)* - } - } - } - Data::Union(_) => unimplemented!("Unions are not supported"), - } -} diff --git a/evmap-derive/tests/failing/lib.rs b/evmap-derive/tests/failing/lib.rs deleted file mode 100644 index db20bf7..0000000 --- a/evmap-derive/tests/failing/lib.rs +++ /dev/null @@ -1,25 +0,0 @@ -use evmap_derive::ShallowCopy; -use evmap::shallow_copy::ShallowCopy as _; - -#[derive(ShallowCopy)] -struct Broken { - f1: i32, - f2: std::cell::Cell<()>, -} - -#[derive(ShallowCopy)] -struct AlsoBroken(i32, std::cell::Cell<()>); - -#[derive(ShallowCopy)] -struct Generic { - f1: T -} - -fn main() { - let broken = Broken { f1: 0, f2: std::cell::Cell::new(()) }; - broken.shallow_copy(); - let also_broken = AlsoBroken(0, std::cell::Cell::new(())); - broken.shallow_copy(); - let generic = Generic { f1: std::cell::Cell::new(()) }; - generic.shallow_copy(); -} diff --git a/evmap-derive/tests/failing/lib.stderr b/evmap-derive/tests/failing/lib.stderr deleted file mode 100644 index d102c5e..0000000 --- a/evmap-derive/tests/failing/lib.stderr +++ /dev/null @@ -1,49 +0,0 @@ -error[E0277]: the trait bound `Cell<()>: ShallowCopy` is not satisfied - --> $DIR/lib.rs:7:2 - | -7 | f2: std::cell::Cell<()>, - | ^^ the trait `ShallowCopy` is not implemented for `Cell<()>` - | - = note: required by `shallow_copy` - -error[E0277]: the trait bound `Cell<()>: ShallowCopy` is not satisfied - --> $DIR/lib.rs:10:10 - | -10 | #[derive(ShallowCopy)] - | ^^^^^^^^^^^ the trait `ShallowCopy` is not implemented for `Cell<()>` - | - = note: required by `shallow_copy` - = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info) - -error[E0599]: no method named `shallow_copy` found for struct `Generic>` in the current scope - --> $DIR/lib.rs:24:10 - | -14 | struct Generic { - | ----------------- - | | - | method `shallow_copy` not found for this - | doesn't satisfy `Generic>: ShallowCopy` -... -24 | generic.shallow_copy(); - | ^^^^^^^^^^^^ method not found in `Generic>` - | - ::: $RUST/core/src/cell.rs - | - | pub struct Cell { - | -------------------------- doesn't satisfy `Cell<()>: ShallowCopy` - | - ::: $WORKSPACE/evmap/src/shallow_copy.rs - | - | unsafe fn shallow_copy(&self) -> ManuallyDrop; - | ------------ - | | - | the method is available for `Box>>` here - | the method is available for `Arc>>` here - | the method is available for `Rc>>` here - | - = note: the method `shallow_copy` exists but the following trait bounds were not satisfied: - `Cell<()>: ShallowCopy` - which is required by `Generic>: ShallowCopy` - = help: items from traits can only be used if the trait is implemented and in scope - = note: the following trait defines an item `shallow_copy`, perhaps you need to implement it: - candidate #1: `ShallowCopy` diff --git a/evmap-derive/tests/lib.rs b/evmap-derive/tests/lib.rs deleted file mode 100644 index ddb458a..0000000 --- a/evmap-derive/tests/lib.rs +++ /dev/null @@ -1,65 +0,0 @@ -use evmap::shallow_copy::ShallowCopy as _; -use evmap_derive::ShallowCopy; -use std::sync::Arc; - -#[derive(ShallowCopy)] -enum Message { - Quit, - ChangeColor(i32, i32, i32), - Move { x: i32, y: i32 }, - Write(String), -} - -#[derive(ShallowCopy)] -struct Shallow; - -#[derive(ShallowCopy)] -struct Tuple(i32, String, Shallow); - -#[derive(ShallowCopy)] -struct Test { - f1: i32, - f2: (i32, i32), - f3: String, - f4: Arc, - f5: Shallow, - f6: evmap::shallow_copy::CopyValue, - f7: Tuple, -} - -#[derive(ShallowCopy)] -struct Generic { - f1: T, -} - -#[test] -fn test() { - unsafe { - let message = Message::Quit; - message.shallow_copy(); - let shallow = Shallow; - shallow.shallow_copy(); - let tuple = Tuple(0, "test".to_owned(), Shallow); - tuple.shallow_copy(); - let test = Test { - f1: 0, - f2: (1, 2), - f3: "test".to_owned(), - f4: Arc::new("test".to_owned()), - f5: shallow, - f6: 3.into(), - f7: tuple, - }; - test.shallow_copy(); - let generic = Generic { - f1: "test".to_owned(), - }; - generic.shallow_copy(); - } -} - -#[test] -fn failing() { - let t = trybuild::TestCases::new(); - t.compile_fail("tests/failing/lib.rs"); -} diff --git a/evmap/src/aliasing.rs b/evmap/src/aliasing.rs new file mode 100644 index 0000000..74adddc --- /dev/null +++ b/evmap/src/aliasing.rs @@ -0,0 +1,254 @@ +use std::cell::Cell; +use std::marker::PhantomData; +use std::mem::MaybeUninit; +use std::ops::Deref; + +/// A `T` that is aliased across the two map copies. +/// +/// You should be able to mostly ignore this type, as it can generally be treated exactly like a +/// `&T`. However, there are some minor exceptions around forwarding traits -- since `Aliased` is a +/// wrapper type around `T`, it cannot automatically forward traits it does not know about to `&T`. +/// This means that if your `&T` implements, say, `Serialize` or some custom `Borrow`, +/// `Aliased` will not implement that same trait. You can work around this either by +/// implementing your trait specifically for `Aliased` where possible, or by manually +/// dereferencing to get the `&T` before using the trait. +#[repr(transparent)] +pub struct Aliased { + aliased: MaybeUninit, + + // We cannot implement Send just because T is Send since we're aliasing it. + _no_auto_send: PhantomData<*const T>, +} + +impl Aliased { + /// Create an alias of the inner `T`. + /// + /// # Safety + /// + /// This method is safe because safe code can only get at `&T`, and cannot cause the `T` to be + /// dropped. Nevertheless, you should be careful to only invoke this method assuming that you + /// _do_ call [`drop_copies`] somewhere, as the `Aliased` is no longer safe to use after it is + /// truly dropped. + pub(crate) fn alias(&self) -> Self { + // safety: + // We are aliasing T here, but it is okay because: + // a) the T is behind a MaybeUninit, and so will cannot be accessed safely; and + // b) we only expose _either_ &T while aliased, or &mut after the aliasing ends. + Aliased { + aliased: unsafe { std::ptr::read(&self.aliased) }, + _no_auto_send: PhantomData, + } + } + + /// Construct an aliased value around a `T`. + /// + /// Note that we do not implement `From` because we do not want users to construct + /// `Aliased`s on their own. If they did, they would almost certain end up with incorrect + /// drop behavior. + pub(crate) fn from(t: T) -> Self { + Self { + aliased: MaybeUninit::new(t), + _no_auto_send: PhantomData, + } + } +} + +// Aliased gives &T across threads if shared or sent across thread boundaries. +// Aliased gives &mut T across threads (for drop) if sent across thread boundaries. +// This implies that we are only Send if T is Send+Sync, and Sync if T is Sync. +// +// Note that these bounds are stricter than what the compiler would auto-generate for the type. +unsafe impl Send for Aliased where T: Send + Sync {} +unsafe impl Sync for Aliased where T: Sync {} + +// XXX: Is this a problem if people start nesting evmaps? +// I feel like the answer is yes. +thread_local! { + static DROP_FOR_REAL: Cell = Cell::new(false); +} + +/// Make _any_ dropped `Aliased` actually drop their inner `T`. +/// +/// Be very careful: this function will cause _all_ dropped `Aliased` to drop their `T`. +/// +/// When the return value is dropped, dropping `Aliased` will have no effect again. +/// +/// # Safety +/// +/// Only set this when any following `Aliased` that are dropped are no longer aliased. +pub(crate) unsafe fn drop_copies() -> impl Drop { + struct DropGuard; + impl Drop for DropGuard { + fn drop(&mut self) { + DROP_FOR_REAL.with(|dfr| dfr.set(false)); + } + } + let guard = DropGuard; + DROP_FOR_REAL.with(|dfr| dfr.set(true)); + guard +} + +impl Drop for Aliased { + fn drop(&mut self) { + DROP_FOR_REAL.with(move |drop_for_real| { + if drop_for_real.get() { + // safety: + // MaybeUninit was created from a valid T. + // That T has not been dropped (drop_copies is unsafe). + // T is no longer aliased (drop_copies is unsafe), + // so we are allowed to re-take ownership of the T. + unsafe { std::ptr::drop_in_place(self.aliased.as_mut_ptr()) } + } + }) + } +} + +impl AsRef for Aliased { + fn as_ref(&self) -> &T { + // safety: + // MaybeUninit was created from a valid T. + // That T has not been dropped (drop_copies is unsafe). + // All we have done to T is alias it. But, since we only give out &T + // (which should be legal anyway), we're fine. + unsafe { &*self.aliased.as_ptr() } + } +} + +impl Deref for Aliased { + type Target = T; + fn deref(&self) -> &Self::Target { + self.as_ref() + } +} + +use std::hash::{Hash, Hasher}; +impl Hash for Aliased +where + T: Hash, +{ + fn hash(&self, state: &mut H) + where + H: Hasher, + { + self.as_ref().hash(state) + } +} + +use std::fmt; +impl fmt::Debug for Aliased +where + T: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.as_ref().fmt(f) + } +} + +impl PartialEq for Aliased +where + T: PartialEq, +{ + fn eq(&self, other: &Self) -> bool { + self.as_ref().eq(other.as_ref()) + } + + fn ne(&self, other: &Self) -> bool { + self.as_ref().ne(other.as_ref()) + } +} + +impl Eq for Aliased where T: Eq {} + +impl PartialOrd for Aliased +where + T: PartialOrd, +{ + fn partial_cmp(&self, other: &Self) -> Option { + self.as_ref().partial_cmp(other.as_ref()) + } + + fn lt(&self, other: &Self) -> bool { + self.as_ref().lt(other.as_ref()) + } + + fn le(&self, other: &Self) -> bool { + self.as_ref().le(other.as_ref()) + } + + fn gt(&self, other: &Self) -> bool { + self.as_ref().gt(other.as_ref()) + } + + fn ge(&self, other: &Self) -> bool { + self.as_ref().ge(other.as_ref()) + } +} + +impl Ord for Aliased +where + T: Ord, +{ + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.as_ref().cmp(other.as_ref()) + } +} + +use std::borrow::Borrow; +impl Borrow for Aliased { + fn borrow(&self) -> &T { + self.as_ref() + } +} +// What we _really_ want here is: +// ``` +// impl Borrow for Aliased +// where +// T: Borrow, +// { +// fn borrow(&self) -> &U { +// self.as_ref().borrow() +// } +// } +// ``` +// But unfortunately that won't work due to trait coherence. +// Instead, we manually write the nice Borrow impls from std. +// This won't help with custom Borrow impls, but gets you pretty far. +impl Borrow for Aliased { + fn borrow(&self) -> &str { + self.as_ref() + } +} +impl Borrow for Aliased { + fn borrow(&self) -> &std::path::Path { + self.as_ref() + } +} +impl Borrow<[T]> for Aliased> { + fn borrow(&self) -> &[T] { + self.as_ref() + } +} +impl Borrow for Aliased> +where + T: ?Sized, +{ + fn borrow(&self) -> &T { + self.as_ref() + } +} +impl Borrow for Aliased> +where + T: ?Sized, +{ + fn borrow(&self) -> &T { + self.as_ref() + } +} +impl Borrow for Aliased> +where + T: ?Sized, +{ + fn borrow(&self) -> &T { + self.as_ref() + } +} diff --git a/evmap/src/lib.rs b/evmap/src/lib.rs index 9b0eee7..75b14d3 100644 --- a/evmap/src/lib.rs +++ b/evmap/src/lib.rs @@ -214,8 +214,8 @@ pub use crate::write::WriteHandle; mod read; pub use crate::read::{MapReadRef, ReadGuardIter, ReadHandle, ReadHandleFactory}; -pub mod shallow_copy; -pub use crate::shallow_copy::ShallowCopy; +mod aliasing; +pub use aliasing::Aliased; // Expose `ReadGuard` since it has useful methods the user will likely care about. #[doc(inline)] @@ -257,9 +257,9 @@ impl fmt::Debug for Predicate { #[non_exhaustive] pub(crate) enum Operation { /// Replace the set of entries for this key with this value. - Replace(K, V), + Replace(K, Aliased), /// Add this value to the set of entries for this key. - Add(K, V), + Add(K, Aliased), /// Remove this value from the set of entries for this key. RemoveValue(K, V), /// Remove the value set for this key. @@ -400,7 +400,7 @@ where where K: Eq + Hash + Clone, S: BuildHasher + Clone, - V: Eq + Hash + ShallowCopy, + V: Eq + Hash, M: 'static + Clone, { let inner = if let Some(cap) = self.capacity { @@ -426,7 +426,7 @@ pub fn new() -> ( ) where K: Eq + Hash + Clone, - V: Eq + Hash + ShallowCopy, + V: Eq + Hash, { Options::default().construct() } @@ -443,7 +443,7 @@ pub fn with_meta( ) where K: Eq + Hash + Clone, - V: Eq + Hash + ShallowCopy, + V: Eq + Hash, M: 'static + Clone, { Options::default().with_meta(meta).construct() @@ -459,7 +459,7 @@ pub fn with_hasher( ) -> (WriteHandle, ReadHandle) where K: Eq + Hash + Clone, - V: Eq + Hash + ShallowCopy, + V: Eq + Hash, M: 'static + Clone, S: BuildHasher + Clone, { diff --git a/evmap/src/read.rs b/evmap/src/read.rs index 8033175..5f98bd3 100644 --- a/evmap/src/read.rs +++ b/evmap/src/read.rs @@ -1,5 +1,4 @@ -use crate::inner::Inner; -use crate::values::Values; +use crate::{inner::Inner, Aliased, Values}; use left_right::ReadGuard; use std::borrow::Borrow; @@ -209,9 +208,10 @@ where pub fn contains_value(&self, key: &Q, value: &W) -> bool where K: Borrow, - V: Borrow, + Aliased: Borrow, Q: Hash + Eq, W: Hash + Eq, + V: Hash + Eq, { self.get_raw(key.borrow()) .map(|x| x.contains(value)) diff --git a/evmap/src/read/read_ref.rs b/evmap/src/read/read_ref.rs index 4c1e55c..f143fc0 100644 --- a/evmap/src/read/read_ref.rs +++ b/evmap/src/read/read_ref.rs @@ -1,4 +1,4 @@ -use crate::{inner::Inner, values::Values}; +use crate::{inner::Inner, Aliased, Values}; use left_right::ReadGuard; use std::borrow::Borrow; use std::collections::hash_map::RandomState; @@ -147,7 +147,7 @@ where pub fn contains_value(&self, key: &Q, value: &W) -> bool where K: Borrow, - V: Borrow, + Aliased: Borrow, Q: Hash + Eq, W: Hash + Eq, { diff --git a/evmap/src/shallow_copy.rs b/evmap/src/shallow_copy.rs deleted file mode 100644 index b238b57..0000000 --- a/evmap/src/shallow_copy.rs +++ /dev/null @@ -1,291 +0,0 @@ -//! Types that can be cheaply aliased. - -use std::mem::ManuallyDrop; -use std::ops::{Deref, DerefMut}; - -/// Types that implement this trait can be cheaply copied by (potentially) aliasing the data they -/// contain. Only the _last_ shallow copy will be dropped -- all others will be silently leaked -/// (with `mem::forget`). -/// -/// To implement this trait for your own `Copy` type, write: -/// -/// ```rust -/// # use evmap::ShallowCopy; -/// use std::mem::ManuallyDrop; -/// -/// #[derive(Copy, Clone)] -/// struct T; -/// -/// impl ShallowCopy for T { -/// unsafe fn shallow_copy(&self) -> ManuallyDrop { -/// ManuallyDrop::new(*self) -/// } -/// } -/// ``` -/// -/// If you have a non-`Copy` type, the value returned by `shallow_copy` should point to the same -/// data as the `&mut self`, and it should be safe to `mem::forget` either of the copies as long as -/// the other is dropped normally afterwards. -/// -/// For complex, non-`Copy` types, you can place the type behind a wrapper that implements -/// `ShallowCopy` such as `Box` or `Arc`. -/// Alternatively, if your type is made up of types that all implement `ShallowCopy`, consider -/// using the `evmap-derive` crate, which contains a derive macro for `ShallowCopy`. -/// See that crate's documentation for details. -pub trait ShallowCopy { - /// Perform an aliasing copy of this value. - /// - /// # Safety - /// - /// The use of this method is *only* safe if the values involved are never mutated, and only - /// one of the copies is dropped; the remaining copies must be forgotten with `mem::forget`. - unsafe fn shallow_copy(&self) -> ManuallyDrop; -} - -use std::sync::Arc; -impl ShallowCopy for Arc -where - T: ?Sized, -{ - unsafe fn shallow_copy(&self) -> ManuallyDrop { - ManuallyDrop::new(Arc::from_raw(&**self as *const _)) - } -} - -use std::rc::Rc; -impl ShallowCopy for Rc -where - T: ?Sized, -{ - unsafe fn shallow_copy(&self) -> ManuallyDrop { - ManuallyDrop::new(Rc::from_raw(&**self as *const _)) - } -} - -impl ShallowCopy for Box -where - T: ?Sized, -{ - unsafe fn shallow_copy(&self) -> ManuallyDrop { - ManuallyDrop::new(Box::from_raw(&**self as *const _ as *mut _)) - } -} - -impl ShallowCopy for Option -where - T: ShallowCopy, -{ - unsafe fn shallow_copy(&self) -> ManuallyDrop { - ManuallyDrop::new(if let Some(value) = self { - Some(ManuallyDrop::into_inner(value.shallow_copy())) - } else { - None - }) - } -} - -impl ShallowCopy for String { - unsafe fn shallow_copy(&self) -> ManuallyDrop { - let buf = self.as_bytes().as_ptr(); - let len = self.len(); - let cap = self.capacity(); - ManuallyDrop::new(String::from_raw_parts(buf as *mut _, len, cap)) - } -} - -impl ShallowCopy for Vec { - unsafe fn shallow_copy(&self) -> ManuallyDrop { - let ptr = self.as_ptr() as *mut _; - let len = self.len(); - let cap = self.capacity(); - ManuallyDrop::new(Vec::from_raw_parts(ptr, len, cap)) - } -} - -#[cfg(feature = "bytes")] -impl ShallowCopy for bytes::Bytes { - unsafe fn shallow_copy(&self) -> ManuallyDrop { - let len = self.len(); - let buf: &'static [u8] = std::slice::from_raw_parts(self.as_ptr(), len); - ManuallyDrop::new(bytes::Bytes::from_static(buf)) - } -} - -impl<'a, T> ShallowCopy for &'a T -where - T: ?Sized, -{ - unsafe fn shallow_copy(&self) -> ManuallyDrop { - ManuallyDrop::new(&*self) - } -} - -/// If you are willing to have your values be copied between the two views of the `evmap`, -/// wrap them in this type. -/// -/// This is effectively a way to bypass the `ShallowCopy` optimization. -/// Note that you do not need this wrapper for most `Copy` primitives. -#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq, Ord, PartialOrd, Default)] -#[repr(transparent)] -pub struct CopyValue(T); - -impl From for CopyValue { - fn from(t: T) -> Self { - CopyValue(t) - } -} - -impl ShallowCopy for CopyValue -where - T: Copy, -{ - unsafe fn shallow_copy(&self) -> ManuallyDrop { - ManuallyDrop::new(CopyValue(self.0)) - } -} - -impl Deref for CopyValue { - type Target = T; - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl DerefMut for CopyValue { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -macro_rules! impl_shallow_copy_for_copy_primitives { - ($($t:ty)*) => ($( - impl ShallowCopy for $t { - unsafe fn shallow_copy(&self) -> ManuallyDrop { - ManuallyDrop::new(*self) - } - } - )*) -} - -impl_shallow_copy_for_copy_primitives!(() bool char usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64); - -macro_rules! tuple_impls { - ($( - $Tuple:ident { - $(($idx:tt) -> $T:ident)+ - } - )+) => { - $( - impl<$($T:ShallowCopy),+> ShallowCopy for ($($T,)+) { - unsafe fn shallow_copy(&self) -> ManuallyDrop { - ManuallyDrop::new(($(ManuallyDrop::into_inner(self.$idx.shallow_copy()),)+)) - } - } - )+ - } -} - -tuple_impls! { - Tuple1 { - (0) -> A - } - Tuple2 { - (0) -> A - (1) -> B - } - Tuple3 { - (0) -> A - (1) -> B - (2) -> C - } - Tuple4 { - (0) -> A - (1) -> B - (2) -> C - (3) -> D - } - Tuple5 { - (0) -> A - (1) -> B - (2) -> C - (3) -> D - (4) -> E - } - Tuple6 { - (0) -> A - (1) -> B - (2) -> C - (3) -> D - (4) -> E - (5) -> F - } - Tuple7 { - (0) -> A - (1) -> B - (2) -> C - (3) -> D - (4) -> E - (5) -> F - (6) -> G - } - Tuple8 { - (0) -> A - (1) -> B - (2) -> C - (3) -> D - (4) -> E - (5) -> F - (6) -> G - (7) -> H - } - Tuple9 { - (0) -> A - (1) -> B - (2) -> C - (3) -> D - (4) -> E - (5) -> F - (6) -> G - (7) -> H - (8) -> I - } - Tuple10 { - (0) -> A - (1) -> B - (2) -> C - (3) -> D - (4) -> E - (5) -> F - (6) -> G - (7) -> H - (8) -> I - (9) -> J - } - Tuple11 { - (0) -> A - (1) -> B - (2) -> C - (3) -> D - (4) -> E - (5) -> F - (6) -> G - (7) -> H - (8) -> I - (9) -> J - (10) -> K - } - Tuple12 { - (0) -> A - (1) -> B - (2) -> C - (3) -> D - (4) -> E - (5) -> F - (6) -> G - (7) -> H - (8) -> I - (9) -> J - (10) -> K - (11) -> L - } -} diff --git a/evmap/src/values.rs b/evmap/src/values.rs index f24c860..b5862fc 100644 --- a/evmap/src/values.rs +++ b/evmap/src/values.rs @@ -1,3 +1,4 @@ +use crate::Aliased; use std::borrow::Borrow; use std::fmt; use std::hash::{BuildHasher, Hash}; @@ -26,8 +27,8 @@ where } enum ValuesInner { - Short(smallvec::SmallVec<[T; 1]>), - Long(hashbag::HashBag), + Short(smallvec::SmallVec<[Aliased; 1]>), + Long(hashbag::HashBag, S>), } impl Values { @@ -72,8 +73,8 @@ impl Values { /// is returned. pub fn get_one(&self) -> Option<&T> { match self.0 { - ValuesInner::Short(ref v) => v.get(0), - ValuesInner::Long(ref v) => v.iter().next(), + ValuesInner::Short(ref v) => v.get(0).map(|v| &**v), + ValuesInner::Long(ref v) => v.iter().next().map(|v| &**v), } } @@ -83,7 +84,7 @@ impl Values { /// *must* match those for the value type. pub fn contains(&self, value: &Q) -> bool where - T: Borrow, + Aliased: Borrow, Q: Eq + Hash, T: Eq + Hash, S: BuildHasher, @@ -111,9 +112,9 @@ impl<'a, T, S> IntoIterator for &'a Values { #[non_exhaustive] pub enum ValuesIter<'a, T, S> { #[doc(hidden)] - Short(<&'a smallvec::SmallVec<[T; 1]> as IntoIterator>::IntoIter), + Short(<&'a smallvec::SmallVec<[Aliased; 1]> as IntoIterator>::IntoIter), #[doc(hidden)] - Long(<&'a hashbag::HashBag as IntoIterator>::IntoIter), + Long(<&'a hashbag::HashBag, S> as IntoIterator>::IntoIter), } impl<'a, T, S> fmt::Debug for ValuesIter<'a, T, S> @@ -132,8 +133,8 @@ impl<'a, T, S> Iterator for ValuesIter<'a, T, S> { type Item = &'a T; fn next(&mut self) -> Option { match *self { - Self::Short(ref mut it) => it.next(), - Self::Long(ref mut it) => it.next(), + Self::Short(ref mut it) => it.next().map(|v| &**v), + Self::Long(ref mut it) => it.next().map(|v| &**v), } } @@ -230,7 +231,7 @@ where pub(crate) fn swap_remove(&mut self, value: &T) { match self.0 { ValuesInner::Short(ref mut v) => { - if let Some(i) = v.iter().position(|v| v == value) { + if let Some(i) = v.iter().position(|v| &**v == value) { v.swap_remove(i); } } @@ -269,7 +270,7 @@ where } } - pub(crate) fn push(&mut self, value: T, hasher: &S) { + pub(crate) fn push(&mut self, value: Aliased, hasher: &S) { match self.0 { ValuesInner::Short(ref mut v) => { // we may want to upgrade to a Long.. @@ -301,18 +302,19 @@ where } } - pub(crate) fn from_iter(iter: I, hasher: &S) -> Self - where - I: IntoIterator, - { - let iter = iter.into_iter(); - if iter.size_hint().0 > BAG_THRESHOLD { - let mut long = hashbag::HashBag::with_hasher(hasher.clone()); - long.extend(iter); - Self(ValuesInner::Long(long)) - } else { - use std::iter::FromIterator; - Self(ValuesInner::Short(smallvec::SmallVec::from_iter(iter))) + pub(crate) fn alias(other: &Self, hasher: &S) -> Self { + match &other.0 { + ValuesInner::Short(s) => { + use std::iter::FromIterator; + Self(ValuesInner::Short(smallvec::SmallVec::from_iter( + s.iter().map(|v| v.alias()), + ))) + } + ValuesInner::Long(l) => { + let mut long = hashbag::HashBag::with_hasher(hasher.clone()); + long.extend(l.set_iter().map(|(v, n)| (v.alias(), n))); + Self(ValuesInner::Long(long)) + } } } } @@ -351,13 +353,15 @@ mod tests { #[test] fn short_values() { + let _guard = unsafe { crate::aliasing::drop_copies() }; + let hasher = RandomState::default(); let mut v = Values::new(); let values = 0..BAG_THRESHOLD - 1; let len = values.clone().count(); for i in values.clone() { - v.push(i, &hasher); + v.push(Aliased::from(i), &hasher); } for i in values.clone() { @@ -382,13 +386,15 @@ mod tests { #[test] fn long_values() { + let _guard = unsafe { crate::aliasing::drop_copies() }; + let hasher = RandomState::default(); let mut v = Values::new(); let values = 0..BAG_THRESHOLD; let len = values.clone().count(); for i in values.clone() { - v.push(i, &hasher); + v.push(Aliased::from(i), &hasher); } for i in values.clone() { diff --git a/evmap/src/write.rs b/evmap/src/write.rs index 8616de9..28873c3 100644 --- a/evmap/src/write.rs +++ b/evmap/src/write.rs @@ -1,4 +1,4 @@ -use super::{Operation, Predicate, ShallowCopy}; +use super::{Aliased, Operation, Predicate}; use crate::inner::Inner; use crate::read::ReadHandle; use crate::values::Values; @@ -7,7 +7,6 @@ use left_right::Absorb; use std::collections::hash_map::RandomState; use std::fmt; use std::hash::{BuildHasher, Hash}; -use std::mem::ManuallyDrop; #[cfg(feature = "indexed")] use indexmap::map::Entry; @@ -51,7 +50,7 @@ pub struct WriteHandle where K: Eq + Hash + Clone, S: BuildHasher + Clone, - V: Eq + Hash + ShallowCopy, + V: Eq + Hash, M: 'static + Clone, { handle: left_right::WriteHandle, Operation>, @@ -67,7 +66,7 @@ impl fmt::Debug for WriteHandle where K: Eq + Hash + Clone + fmt::Debug, S: BuildHasher + Clone, - V: Eq + Hash + ShallowCopy + fmt::Debug, + V: Eq + Hash + fmt::Debug, M: 'static + Clone + fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -82,7 +81,7 @@ impl WriteHandle where K: Eq + Hash + Clone, S: BuildHasher + Clone, - V: Eq + Hash + ShallowCopy, + V: Eq + Hash, M: 'static + Clone, { pub(crate) fn new( @@ -139,7 +138,6 @@ where } else { self.handle.append(op); } - self } @@ -148,7 +146,7 @@ where /// The updated value-bag will only be visible to readers after the next call to /// [`publish`](Self::publish). pub fn insert(&mut self, k: K, v: V) -> &mut Self { - self.add_op(Operation::Add(k, v)) + self.add_op(Operation::Add(k, Aliased::from(v))) } /// Replace the value-bag of the given key with the given value. @@ -162,7 +160,7 @@ where /// The new value will only be visible to readers after the next call to /// [`publish`](Self::publish). pub fn update(&mut self, k: K, v: V) -> &mut Self { - self.add_op(Operation::Replace(k, v)) + self.add_op(Operation::Replace(k, Aliased::from(v))) } /// Clear the value-bag of the given key, without removing it. @@ -331,22 +329,15 @@ impl Absorb> for Inner where K: Eq + Hash + Clone, S: BuildHasher + Clone, - V: Eq + Hash + ShallowCopy, + V: Eq + Hash, M: 'static + Clone, { /// Apply ops in such a way that no values are dropped, only forgotten fn absorb_first(&mut self, op: &mut Operation, other: &Self) { - // Make sure that no methods below drop values since we're only operating on the first - // shallow copy of each value. - // - // Safety: ManuallyDrop has the same layout as T. - let inner = unsafe { - &mut *(self as *mut Inner as *mut Inner, M, S>) - }; let hasher = other.data.hasher(); match *op { Operation::Replace(ref key, ref mut value) => { - let vs = inner.data.entry(key.clone()).or_insert_with(Values::new); + let vs = self.data.entry(key.clone()).or_insert_with(Values::new); // truncate vector vs.clear(); @@ -355,46 +346,42 @@ where // so it will switch back to inline allocation for the subsequent push. vs.shrink_to_fit(); - vs.push(unsafe { value.shallow_copy() }, hasher); + vs.push(value.alias(), hasher); } Operation::Clear(ref key) => { - inner - .data + self.data .entry(key.clone()) .or_insert_with(Values::new) .clear(); } Operation::Add(ref key, ref mut value) => { - inner - .data + self.data .entry(key.clone()) .or_insert_with(Values::new) - .push(unsafe { value.shallow_copy() }, hasher); + .push(value.alias(), hasher); } Operation::RemoveEntry(ref key) => { #[cfg(not(feature = "indexed"))] - inner.data.remove(key); + self.data.remove(key); #[cfg(feature = "indexed")] - inner.data.swap_remove(key); + self.data.swap_remove(key); } Operation::Purge => { - inner.data.clear(); + self.data.clear(); } #[cfg(feature = "eviction")] Operation::EmptyAt(ref indices) => { for &index in indices.iter().rev() { - inner.data.swap_remove_index(index); + self.data.swap_remove_index(index); } } Operation::RemoveValue(ref key, ref value) => { - if let Some(e) = inner.data.get_mut(key) { - // remove a matching value from the value set - // safety: this is fine - e.swap_remove(unsafe { &*(value as *const _ as *const ManuallyDrop) }); + if let Some(e) = self.data.get_mut(key) { + e.swap_remove(&value); } } Operation::Retain(ref key, ref mut predicate) => { - if let Some(e) = inner.data.get_mut(key) { + if let Some(e) = self.data.get_mut(key) { let mut first = true; e.retain(move |v| { let retain = predicate.eval(v, first); @@ -405,17 +392,17 @@ where } Operation::Fit(ref key) => match key { Some(ref key) => { - if let Some(e) = inner.data.get_mut(key) { + if let Some(e) = self.data.get_mut(key) { e.shrink_to_fit(); } } None => { - for value_set in inner.data.values_mut() { + for value_set in self.data.values_mut() { value_set.shrink_to_fit(); } } }, - Operation::Reserve(ref key, additional) => match inner.data.entry(key.clone()) { + Operation::Reserve(ref key, additional) => match self.data.entry(key.clone()) { Entry::Occupied(mut entry) => { entry.get_mut().reserve(additional, hasher); } @@ -424,10 +411,10 @@ where } }, Operation::MarkReady => { - inner.ready = true; + self.ready = true; } Operation::SetMeta(ref m) => { - inner.meta = m.clone(); + self.meta = m.clone(); } Operation::JustCloneRHandle => { // This is applying the operation to the original write handle, @@ -438,11 +425,14 @@ where /// Apply operations while allowing dropping of values fn absorb_second(&mut self, op: Operation, other: &Self) { - let inner = self; + let _guard = unsafe { crate::aliasing::drop_copies() }; + // NOTE: the dropping here applies equally to Vs in the Operation as to Vs in the map. + // So, we need to make sure _consume_ the Aliased from the oplog. + let hasher = other.data.hasher(); match op { Operation::Replace(key, value) => { - let v = inner.data.entry(key).or_insert_with(Values::new); + let v = self.data.entry(key).or_insert_with(Values::new); // we are going second, so we should drop! v.clear(); @@ -452,41 +442,40 @@ where v.push(value, hasher); } Operation::Clear(key) => { - inner.data.entry(key).or_insert_with(Values::new).clear(); + self.data.entry(key).or_insert_with(Values::new).clear(); } Operation::Add(key, value) => { - inner - .data + self.data .entry(key) .or_insert_with(Values::new) .push(value, hasher); } Operation::RemoveEntry(key) => { #[cfg(not(feature = "indexed"))] - inner.data.remove(&key); + self.data.remove(&key); #[cfg(feature = "indexed")] - inner.data.swap_remove(&key); + self.data.swap_remove(&key); } Operation::Purge => { - inner.data.clear(); + self.data.clear(); } #[cfg(feature = "eviction")] Operation::EmptyAt(indices) => { for &index in indices.iter().rev() { - inner.data.swap_remove_index(index); + self.data.swap_remove_index(index); } } Operation::RemoveValue(key, value) => { - if let Some(e) = inner.data.get_mut(&key) { + if let Some(e) = self.data.get_mut(&key) { // find the first entry that matches all fields e.swap_remove(&value); } } Operation::Retain(key, mut predicate) => { - if let Some(e) = inner.data.get_mut(&key) { + if let Some(e) = self.data.get_mut(&key) { let mut first = true; e.retain(move |v| { - let retain = predicate.eval(v, first); + let retain = predicate.eval(&*v, first); first = false; retain }); @@ -494,17 +483,17 @@ where } Operation::Fit(key) => match key { Some(ref key) => { - if let Some(e) = inner.data.get_mut(key) { + if let Some(e) = self.data.get_mut(key) { e.shrink_to_fit(); } } None => { - for value_set in inner.data.values_mut() { + for value_set in self.data.values_mut() { value_set.shrink_to_fit(); } } }, - Operation::Reserve(key, additional) => match inner.data.entry(key) { + Operation::Reserve(key, additional) => match self.data.entry(key) { Entry::Occupied(mut entry) => { entry.get_mut().reserve(additional, hasher); } @@ -513,10 +502,10 @@ where } }, Operation::MarkReady => { - inner.ready = true; + self.ready = true; } Operation::SetMeta(m) => { - inner.meta = m; + self.meta = m; } Operation::JustCloneRHandle => { // This is applying the operation to the original read handle, @@ -525,16 +514,12 @@ where // XXX: it really is too bad that we can't just .clone() the data here and save // ourselves a lot of re-hashing, re-bucketization, etc. - inner.data.extend(other.data.iter().map(|(k, vs)| { - ( - k.clone(), - Values::from_iter( - vs.iter() - .map(|v| unsafe { ManuallyDrop::into_inner((&*v).shallow_copy()) }), - other.data.hasher(), - ), - ) - })); + self.data.extend( + other + .data + .iter() + .map(|(k, vs)| (k.clone(), Values::alias(vs, other.data.hasher()))), + ); } } } @@ -542,17 +527,16 @@ where fn drop_first(self: Box) { // since the two copies are exactly equal, we need to make sure that we *don't* call the // destructors of any of the values that are in our map, as they'll all be called when the - // last read handle goes out of scope. - // - // Safety: ManuallyDrop has the same layout as T. - let inner = - unsafe { Box::from_raw(Box::into_raw(self) as *mut Inner, M, S>) }; - drop(inner); + // last read handle goes out of scope. that's easy enough since none of them will be + // dropped by default. } fn drop_second(self: Box) { // when the second copy is dropped is where we want to _actually_ drop all the values in - // the map. this happens automatically. + // the map. we do that by setting drop_copies to true. we do it with a guard though to make + // sure that if drop panics we unset the thread-local! + + let _guard = unsafe { crate::aliasing::drop_copies() }; drop(self); } } @@ -561,7 +545,7 @@ impl Extend<(K, V)> for WriteHandle where K: Eq + Hash + Clone, S: BuildHasher + Clone, - V: Eq + Hash + ShallowCopy, + V: Eq + Hash, M: 'static + Clone, { fn extend>(&mut self, iter: I) { @@ -577,7 +561,7 @@ impl Deref for WriteHandle where K: Eq + Hash + Clone, S: BuildHasher + Clone, - V: Eq + Hash + ShallowCopy, + V: Eq + Hash, M: 'static + Clone, { type Target = ReadHandle; diff --git a/evmap/tests/lib.rs b/evmap/tests/lib.rs index 83e12c4..97efcfb 100644 --- a/evmap/tests/lib.rs +++ b/evmap/tests/lib.rs @@ -193,7 +193,7 @@ fn read_after_drop() { #[test] fn clone_types() { - let x = evmap::shallow_copy::CopyValue::from(b"xyz"); + let x = b"xyz"; let (mut w, r) = evmap::new(); w.insert(&*x, x); diff --git a/evmap/tests/quick.rs b/evmap/tests/quick.rs index bb2a480..c2558bb 100644 --- a/evmap/tests/quick.rs +++ b/evmap/tests/quick.rs @@ -97,7 +97,7 @@ fn do_ops( read_ref: &mut HashMap>, ) where K: Hash + Eq + Clone, - V: Clone + evmap::ShallowCopy + Eq + Hash, + V: Clone + Eq + Hash, S: BuildHasher + Clone, { for op in ops {