diff --git a/CHANGELOG.md b/CHANGELOG.md index cc47acc5..73b10d18 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # CHANGELOG +## 0.9.0 + +- Make file backed mmap `SkipMap` and `SkipSet` still can be reopened even last time the program was aborted. +- Remove checksum validation, users should take care of data integrity by themselves. +- Support `Clone` directly, no need to use `Arc` wrapper anymore. +- Add `OpenOptions` and `MmapOptions` to support better controls on file mmap backed `SkipMap` and `SkipSet`. + ## 0.8.6 - Add `SkipMap::min_version` and `SkipSet::min_version` to access the min version of the `SkipMap` or `SkipSet`. diff --git a/Cargo.toml b/Cargo.toml index 697f798c..3180f68d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "skl" -version = "0.8.6" +version = "0.9.0" edition = "2021" rust-version = "1.56.0" repository = "https://github.com/al8n/skl-rs" @@ -33,7 +33,7 @@ required-features = ["memmap"] [features] default = ["std"] alloc = [] -memmap = ["memmap2", "fs4", "std", "crc32fast"] +memmap = ["memmap2", "fs4", "std"] std = ["rand/default", "either/default"] [target.'cfg(loom)'.dependencies] @@ -49,7 +49,6 @@ rand = { version = "0.8", default-features = false, features = ["getrandom"] } fs4 = { version = "0.8", optional = true } memmap2 = { version = "0.9", optional = true } -crc32fast = { version = "1", optional = true } [dev-dependencies] criterion = "0.5" diff --git a/README.md b/README.md index aa7a86a1..1da00452 100644 --- a/README.md +++ b/README.md @@ -28,14 +28,14 @@ ```toml [dependencies] - skl = "0.8" + skl = "0.9" ``` - Enable memory map backend ```toml [dependencies] - skl = { version = "0.8", features = ["memmap"] } + skl = { version = "0.9", features = ["memmap"] } ``` ## Features diff --git a/examples/mmap.rs b/examples/mmap.rs index ffa063b7..40c04c66 100644 --- a/examples/mmap.rs +++ b/examples/mmap.rs @@ -11,7 +11,13 @@ pub fn new_value(i: usize) -> Vec { fn main() { const N: usize = 1000; - let l = Arc::new(SkipMap::mmap_mut("test.wal", 1 << 20, true).unwrap()); + let mmap_options = skl::MmapOptions::default(); + let open_options = skl::OpenOptions::default() + .create_new(Some(1 << 20)) + .read(true) + .write(true); + + let l = Arc::new(SkipMap::mmap_mut("test.wal", open_options, mmap_options).unwrap()); let wg = Arc::new(()); for i in 0..N { let w = wg.clone(); diff --git a/examples/mmap_anon.rs b/examples/mmap_anon.rs index 0a4dcb6a..f5069c0e 100644 --- a/examples/mmap_anon.rs +++ b/examples/mmap_anon.rs @@ -11,7 +11,9 @@ pub fn new_value(i: usize) -> Vec { fn main() { const N: usize = 1000; - let l = Arc::new(SkipMap::mmap_anon(1 << 20).unwrap()); + + let mmap_options = skl::MmapOptions::default().len(1 << 20); + let l = Arc::new(SkipMap::mmap_anon(mmap_options).unwrap()); let wg = Arc::new(()); for i in 0..N { let w = wg.clone(); diff --git a/integration/src/bin/test-mmap-anon.rs b/integration/src/bin/test-mmap-anon.rs index a2513bc0..9487f786 100644 --- a/integration/src/bin/test-mmap-anon.rs +++ b/integration/src/bin/test-mmap-anon.rs @@ -5,7 +5,9 @@ use std::sync::Arc; fn main() { { const N: usize = 10; - let l = Arc::new(SkipMap::mmap_anon(1 << 20).unwrap()); + + let mmap_options = MmapOptions::default().len(1 << 20); + let l = Arc::new(SkipMap::mmap_anon(mmap_options).unwrap()); for i in 0..N { let l = l.clone(); std::thread::spawn(move || { @@ -27,7 +29,9 @@ fn main() { { const N2: usize = 100; - let l = Arc::new(SkipMap::mmap_anon(120 << 20).unwrap()); + + let mmap_options = MmapOptions::default().len(120 << 20); + let l = Arc::new(SkipMap::mmap_anon(mmap_options).unwrap()); for i in 0..N2 { let l = l.clone(); std::thread::spawn(move || { diff --git a/integration/src/bin/test-mmap.rs b/integration/src/bin/test-mmap.rs index f3b401cc..de0e463c 100644 --- a/integration/src/bin/test-mmap.rs +++ b/integration/src/bin/test-mmap.rs @@ -7,7 +7,13 @@ fn main() { let p = dir.path().join("test_mmap"); { const N: usize = 10; - let l = Arc::new(SkipMap::mmap_mut(&p, 1 << 20, true).unwrap()); + + let open_options = OpenOptions::default() + .create_new(Some(1 << 20)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + let l = Arc::new(SkipMap::mmap_mut(&p, open_options, mmap_options).unwrap()); for i in 0..N { let l = l.clone(); std::thread::spawn(move || { @@ -29,7 +35,10 @@ fn main() { { const N2: usize = 10; - let l = Arc::new(SkipMap::::mmap(&p, false).unwrap()); + + let open_options = OpenOptions::default().read(true); + let mmap_options = MmapOptions::default(); + let l = Arc::new(SkipMap::::mmap(&p, open_options, mmap_options).unwrap()); assert_eq!(N2, l.len()); for i in 0..N2 { let l = l.clone(); diff --git a/src/arena.rs b/src/arena.rs index 9de6e0ce..a333bfad 100644 --- a/src/arena.rs +++ b/src/arena.rs @@ -1,48 +1,52 @@ -use crate::{ - sync::{AtomicMut, AtomicPtr, AtomicU32, AtomicU64, Ordering}, - NODE_ALIGNMENT_FACTOR, -}; +#[allow(unused_imports)] +use crate::sync::Box; +use crate::sync::{AtomicMut, AtomicPtr, AtomicU32, AtomicU64, Ordering}; + use core::{ mem, ptr::{self, NonNull}, slice, }; -#[allow(unused_imports)] -use std::boxed::Box; - -use crossbeam_utils::CachePadded; - -mod shared; -use shared::{Shared, SharedMeta}; - -#[cfg(all(feature = "memmap", not(target_family = "wasm")))] -const HEIGHT_ENCODED_SIZE: usize = mem::size_of::(); - -#[cfg(all(feature = "memmap", not(target_family = "wasm")))] -const LEN_ENCODED_SIZE: usize = mem::size_of::(); #[cfg(all(feature = "memmap", not(target_family = "wasm")))] -const MAX_VERSION_ENCODED_SIZE: usize = mem::size_of::(); +use crate::{MmapOptions, OpenOptions}; -#[cfg(all(feature = "memmap", not(target_family = "wasm")))] -const MIN_VERSION_ENCODED_SIZE: usize = mem::size_of::(); - -#[cfg(all(feature = "memmap", not(target_family = "wasm")))] -const CHECKSUM_ENCODED_SIZE: usize = mem::size_of::(); +mod shared; +use shared::Shared; /// The overhead of the memory-mapped file. /// /// ```text -/// +---------------+------------+--------------------+--------------------+-----------------+ -/// | 8-bit height | 32-bit len | 64-bit max version | 64-bit min version | 32-bit checksum | -/// +---------------+------------+--------------------+--------------------+-----------------+ +/// +----------------+------------+--------------------+--------------------+---------------------+ +/// | 32-bit height | 32-bit len | 64-bit max version | 64-bit min version | 64-bit allocated | +/// +----------------+------------+--------------------+--------------------+---------------------+ /// ``` -#[cfg(all(feature = "memmap", not(target_family = "wasm")))] -const MMAP_OVERHEAD: usize = HEIGHT_ENCODED_SIZE - + LEN_ENCODED_SIZE - + MAX_VERSION_ENCODED_SIZE - + MIN_VERSION_ENCODED_SIZE - + CHECKSUM_ENCODED_SIZE; +const MMAP_OVERHEAD: usize = mem::size_of::
(); + +#[derive(Debug)] +#[repr(C)] +pub(super) struct Header { + max_version: AtomicU64, + min_version: AtomicU64, + allocated: AtomicU64, + /// Current height. 1 <= height <= kMaxHeight. CAS. + height: AtomicU32, + len: AtomicU32, +} + +impl Header { + fn new(size: u64) -> Self { + Self { + height: AtomicU32::new(1), + len: AtomicU32::new(0), + max_version: AtomicU64::new(0), + min_version: AtomicU64::new(0), + // Don't store data at position 0 in order to reserve offset=0 as a kind + // of nil pointer. + allocated: AtomicU64::new(size + 1), + } + } +} /// An error indicating that the arena is full #[derive(Debug, Clone, PartialEq, Eq, Copy)] @@ -61,12 +65,8 @@ impl std::error::Error for ArenaError {} pub struct Arena { write_data_ptr: NonNull, read_data_ptr: *const u8, - n: CachePadded, - /// Current height. 1 <= height <= kMaxHeight. CAS. - pub(super) height: CachePadded, - pub(super) len: AtomicU32, - pub(super) max_version: AtomicU64, - pub(super) min_version: AtomicU64, + header_ptr: *const Header, + data_offset: u64, inner: AtomicPtr<()>, cap: usize, } @@ -76,20 +76,68 @@ impl core::fmt::Debug for Arena { let allocated = self.size(); // Safety: // The ptr is always non-null, we only deallocate it when the arena is dropped. - let data = unsafe { slice::from_raw_parts(self.read_data_ptr, allocated) }; + let data = + unsafe { slice::from_raw_parts(self.read_data_ptr, allocated - self.data_offset as usize) }; + let header = self.header(); f.debug_struct("Arena") .field("cap", &self.cap) - .field("allocated", &allocated) + .field("header", header) .field("data", &data) .finish() } } +impl Clone for Arena { + fn clone(&self) -> Self { + unsafe { + let shared: *mut Shared = self.inner.load(Ordering::Relaxed).cast(); + + let old_size = (*shared).refs.fetch_add(1, Ordering::Release); + if old_size > usize::MAX >> 1 { + abort(); + } + + // Safety: + // The ptr is always non-null, and the data is only deallocated when the + // last Arena is dropped. + Self { + write_data_ptr: self.write_data_ptr, + read_data_ptr: self.read_data_ptr, + header_ptr: self.header_ptr, + data_offset: self.data_offset, + inner: AtomicPtr::new(shared as _), + cap: self.cap, + } + } + } +} + +#[inline(never)] +#[cold] +fn abort() -> ! { + #[cfg(feature = "std")] + { + std::process::abort() + } + + #[cfg(not(feature = "std"))] + { + struct Abort; + impl Drop for Abort { + fn drop(&mut self) { + panic!(); + } + } + let _a = Abort; + panic!("abort"); + } +} + impl Arena { /// Returns the number of bytes allocated by the arena. #[inline] pub fn size(&self) -> usize { - self.n.load(Ordering::Acquire) as usize + self.header().allocated.load(Ordering::Acquire) as usize } /// Returns the capacity of the arena. @@ -104,19 +152,53 @@ impl Arena { self.cap.saturating_sub(self.size()) } - pub(crate) fn update_max_version(&self, version: u64) { - let mut current = self.max_version.load(Ordering::Acquire); + /// Returns the height of the arena. + #[inline] + pub fn height(&self) -> u32 { + self.header().height.load(Ordering::Acquire) + } + + /// Returns the length of the arena. + #[inline] + pub(super) fn len(&self) -> u32 { + self.header().len.load(Ordering::Acquire) + } + + /// Returns the max version of the arena. + #[inline] + pub fn max_version(&self) -> u64 { + self.header().max_version.load(Ordering::Acquire) + } + + /// Returns the min version of the arena. + #[inline] + pub fn min_version(&self) -> u64 { + self.header().min_version.load(Ordering::Acquire) + } + + #[inline] + pub(super) const fn atomic_height(&self) -> &AtomicU32 { + &self.header().height + } + + #[inline] + pub(super) fn incr_len(&self) { + self.header().len.fetch_add(1, Ordering::Release); + } + + pub(super) fn update_max_version(&self, version: u64) { + let mut current = self.header().max_version.load(Ordering::Acquire); loop { if version <= current { return; } - match self.max_version.compare_exchange_weak( + match self.header().max_version.compare_exchange_weak( current, version, Ordering::SeqCst, - Ordering::SeqCst, + Ordering::Acquire, ) { Ok(_) => break, Err(v) => current = v, @@ -124,74 +206,94 @@ impl Arena { } } - pub(crate) fn update_min_version(&self, version: u64) { - let mut current = self.min_version.load(Ordering::Acquire); + pub(super) fn update_min_version(&self, version: u64) { + let mut current = self.header().min_version.load(Ordering::Acquire); loop { if version >= current { return; } - match self.min_version.compare_exchange_weak( + match self.header().min_version.compare_exchange_weak( current, version, Ordering::SeqCst, - Ordering::SeqCst, + Ordering::Acquire, ) { Ok(_) => break, Err(v) => current = v, } } } + + #[inline] + pub(super) const fn header(&self) -> &Header { + // Safety: + // The header is always non-null, we only deallocate it when the arena is dropped. + unsafe { &*self.header_ptr } + } + + #[inline] + pub(crate) fn clear(&self) { + let header = self.header(); + header.len.store(0, Ordering::Release); + header.height.store(1, Ordering::Release); + header.max_version.store(0, Ordering::Release); + header.min_version.store(0, Ordering::Release); + } } impl Arena { #[inline] - pub(super) fn new_vec(n: usize, min_cap: usize) -> Self { - Self::new( - Shared::new_vec( - n.max(min_cap), - mem::align_of::().max(NODE_ALIGNMENT_FACTOR), - ), - None, - ) + pub(super) fn new_vec(n: usize, min_cap: usize, alignment: usize) -> Self { + Self::new(Shared::new_vec( + n.max(min_cap.saturating_add(MMAP_OVERHEAD)), + alignment, + )) } #[cfg(all(feature = "memmap", not(target_family = "wasm")))] #[inline] pub(super) fn mmap_mut>( - n: usize, - min_cap: usize, path: P, - lock: bool, + open_options: OpenOptions, + mmap_options: MmapOptions, + min_cap: usize, + alignment: usize, ) -> std::io::Result { - let n = n.saturating_add(MMAP_OVERHEAD); - Shared::mmap_mut(n.max(min_cap.saturating_add(MMAP_OVERHEAD)), path, lock) - .map(|shared| Self::new(shared, None)) + let min_cap = min_cap.saturating_add(MMAP_OVERHEAD); + Shared::mmap_mut(path, open_options, mmap_options, min_cap, alignment).map(Self::new) } #[cfg(all(feature = "memmap", not(target_family = "wasm")))] #[inline] pub(super) fn mmap>( - min_cap: usize, path: P, - lock: bool, + open_options: OpenOptions, + mmap_options: MmapOptions, + min_cap: usize, + alignment: usize, ) -> std::io::Result { - Shared::mmap(min_cap + MMAP_OVERHEAD, path, lock) - .map(|(meta, shared)| Self::new(shared, Some(meta))) + let min_cap = min_cap.saturating_add(MMAP_OVERHEAD); + Shared::mmap(path, open_options, mmap_options, min_cap, alignment).map(Self::new) } #[cfg(all(feature = "memmap", not(target_family = "wasm")))] #[inline] - pub(super) fn new_anonymous_mmap(n: usize, min_cap: usize) -> std::io::Result { - Shared::new_mmaped_anon(n.max(min_cap)).map(|shared| Self::new(shared, None)) + pub(super) fn new_anonymous_mmap( + mmap_options: MmapOptions, + min_cap: usize, + alignment: usize, + ) -> std::io::Result { + let min_cap = min_cap.saturating_add(MMAP_OVERHEAD); + Shared::new_mmaped_anon(mmap_options, min_cap, alignment).map(Self::new) } #[inline] fn head_offset(&self, max_node_size: u32, align: u32) -> u32 { // Pad the allocation with enough bytes to ensure the requested alignment. let padded = max_node_size as u64 + align as u64 - 1; - let new_size = 1 + padded; + let new_size = 1 + self.data_offset + padded; // Return the aligned offset. (new_size as u32 - max_node_size) & !(align - 1) @@ -218,46 +320,25 @@ impl Arena { } #[inline] - fn new(mut shared: Shared, meta: Option) -> Self { + fn new(mut shared: Shared) -> Self { // Safety: // The ptr is always non-null, we just initialized it. // And this ptr is only deallocated when the arena is dropped. let read_data_ptr = shared.as_ptr(); + let header_ptr = shared.header_ptr(); let write_data_ptr = shared .as_mut_ptr() .map(|p| unsafe { NonNull::new_unchecked(p) }) .unwrap_or_else(NonNull::dangling); - - let (height, allocated, len, max_version, min_version) = match meta { - Some(meta) => ( - meta.height, - meta.allocated, - meta.len, - meta.max_version, - meta.min_version, - ), - None => { - let height = 1; - let len = 0; - let max_version = 0; - let min_version = 0; - let allocated = 1; - (height, allocated, len, max_version, min_version) - } - }; + let data_offset = shared.data_offset as u64; Self { cap: shared.cap(), inner: AtomicPtr::new(Box::into_raw(Box::new(shared)) as _), write_data_ptr, read_data_ptr, - height: CachePadded::new(AtomicU32::new(height as u32)), - len: AtomicU32::new(len), - // Don't store data at position 0 in order to reserve offset=0 as a kind - // of nil pointer. - n: CachePadded::new(AtomicU64::new(allocated)), - max_version: AtomicU64::new(max_version), - min_version: AtomicU64::new(min_version), + header_ptr, + data_offset, } } @@ -289,14 +370,15 @@ impl Arena { // Pad the allocation with enough bytes to ensure the requested alignment. let padded = size as u64 + align as u64 - 1; - let mut current_allocated = self.n.load(Ordering::Acquire); + let header = self.header(); + let mut current_allocated = header.allocated.load(Ordering::Acquire); if current_allocated + padded + overflow as u64 > self.cap as u64 { return Err(ArenaError); } loop { let want = current_allocated + padded; - match self.n.compare_exchange_weak( + match header.allocated.compare_exchange_weak( current_allocated, want, Ordering::SeqCst, @@ -405,13 +487,7 @@ impl Drop for Arena { // Relaxed is enough here as we're in a drop, no one else can // access this memory anymore. - shared.unmount( - self.height.load(Ordering::Acquire) as u8, - self.len.load(Ordering::Acquire), - self.n.load(Ordering::Acquire), - self.max_version.load(Ordering::Acquire), - self.min_version.load(Ordering::Acquire), - ); + shared.unmount(); }); } } @@ -420,10 +496,10 @@ impl Drop for Arena { #[test] #[cfg(test)] fn test_debug() { - let arena = Arena::new_vec(1024, 1024); + let arena = Arena::new_vec(1024, 1024, 8); assert_eq!( std::format!("{:?}", arena), - "Arena { cap: 1024, allocated: 1, data: [0] }" + "Arena { cap: 1056, header: Header { max_version: 0, min_version: 0, allocated: 33, height: 1, len: 0 }, data: [0] }" ); let err = ArenaError; diff --git a/src/arena/shared.rs b/src/arena/shared.rs index 3d80e725..fefc6acf 100644 --- a/src/arena/shared.rs +++ b/src/arena/shared.rs @@ -75,172 +75,153 @@ enum SharedBackend { buf: *mut memmap2::MmapMut, file: std::fs::File, lock: bool, + shrink_on_drop: bool, }, #[cfg(all(feature = "memmap", not(target_family = "wasm")))] Mmap { - buf: memmap2::Mmap, + buf: *mut memmap2::Mmap, file: std::fs::File, lock: bool, + #[allow(dead_code)] + shrink_on_drop: bool, }, #[cfg(all(feature = "memmap", not(target_family = "wasm")))] AnonymousMmap(memmap2::MmapMut), } -pub(super) struct SharedMeta { - pub(super) height: u8, - pub(super) len: u32, - pub(super) max_version: u64, - pub(super) min_version: u64, - pub(super) allocated: u64, +const fn data_offset(alignment: usize) -> usize { + let header_size = mem::size_of::
(); + let offset = (alignment - (header_size % alignment)) % alignment; + header_size + offset } pub(super) struct Shared { pub(super) refs: AtomicUsize, cap: usize, + pub(super) data_offset: usize, backend: SharedBackend, } impl Shared { - pub(super) fn new_vec(cap: usize, align: usize) -> Self { - let vec = AlignedVec::new(cap, align); - Self { + pub(super) fn new_vec(cap: usize, alignment: usize) -> Self { + let vec = AlignedVec::new(cap, alignment); + let this = Self { cap: vec.cap, backend: SharedBackend::Vec(vec), refs: AtomicUsize::new(1), + data_offset: data_offset(alignment), + }; + // Safety: we have add the overhead for the header + unsafe { + this + .header_mut_ptr() + .write(Header::new(this.data_offset as u64)); } + this } #[cfg(all(feature = "memmap", not(target_family = "wasm")))] pub(super) fn mmap_mut>( - cap: usize, path: P, - lock: bool, + open_options: OpenOptions, + mmap_options: MmapOptions, + min_cap: usize, + alignment: usize, ) -> std::io::Result { - use fs4::FileExt; + let file = open_options.open(path.as_ref())?; - let file = std::fs::OpenOptions::new() - .read(true) - .write(true) - .create_new(true) - .open(path.as_ref())?; unsafe { - if lock { - file.lock_exclusive()?; - } - - file.set_len(cap as u64).and_then(|_| { - memmap2::MmapOptions::new() - .len(cap) - .map_mut(&file) - .map(|mmap| Self { - cap: cap - MMAP_OVERHEAD, - backend: SharedBackend::MmapMut { - buf: Box::into_raw(Box::new(mmap)), - file, - lock, - }, - refs: AtomicUsize::new(1), - }) + mmap_options.map_mut(&file).and_then(|mmap| { + let cap = mmap.len(); + if cap < min_cap { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "file size is less than the minimum capacity", + )); + } + let this = Self { + cap, + backend: SharedBackend::MmapMut { + buf: Box::into_raw(Box::new(mmap)), + file, + lock: open_options.is_lock(), + shrink_on_drop: open_options.is_shrink_on_drop(), + }, + refs: AtomicUsize::new(1), + data_offset: data_offset(alignment), + }; + // Safety: we have add the overhead for the header + this + .header_mut_ptr() + .write(Header::new(this.data_offset as u64)); + + Ok(this) }) } } #[cfg(all(feature = "memmap", not(target_family = "wasm")))] pub(super) fn mmap>( - min_cap: usize, path: P, - lock: bool, - ) -> std::io::Result<(SharedMeta, Self)> { - use fs4::FileExt; - - let file = std::fs::OpenOptions::new().read(true).open(path.as_ref())?; + open_options: OpenOptions, + mmap_options: MmapOptions, + min_cap: usize, + alignment: usize, + ) -> std::io::Result { + let file = open_options.open(path.as_ref())?; unsafe { - if lock { - file.lock_shared()?; - } + mmap_options.map(&file).and_then(|mmap| { + let len = mmap.len(); + if len < min_cap { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "file size is less than the minimum capacity", + )); + } + + Ok(Self { + cap: len, + backend: SharedBackend::Mmap { + buf: Box::into_raw(Box::new(mmap)), + file, + lock: open_options.is_lock(), + shrink_on_drop: open_options.is_shrink_on_drop(), + }, + refs: AtomicUsize::new(1), + data_offset: data_offset(alignment), + }) + }) + } + } - let allocated = file.metadata()?.len(); - if min_cap as u64 > allocated { + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + pub(super) fn new_mmaped_anon( + mmap_options: MmapOptions, + min_cap: usize, + alignment: usize, + ) -> std::io::Result { + mmap_options.map_anon().and_then(|mmap| { + if mmap.len() < min_cap { return Err(std::io::Error::new( std::io::ErrorKind::InvalidData, "file size is less than the minimum capacity", )); } - memmap2::MmapOptions::new() - .len(allocated as usize) - .map(&file) - .and_then(|mmap| { - let len = mmap.len(); - if len < MMAP_OVERHEAD { - return Err(std::io::Error::new( - std::io::ErrorKind::InvalidData, - "file size is less than the minimum capacity", - )); - } - - let cks = crc32fast::hash(&mmap[..len - CHECKSUM_ENCODED_SIZE]); - let cks2 = u32::from_le_bytes(mmap[len - CHECKSUM_ENCODED_SIZE..len].try_into().unwrap()); - if cks != cks2 { - return Err(std::io::Error::new( - std::io::ErrorKind::InvalidData, - "checksum mismatch", - )); - } - - let mut overhead_offset = len - MMAP_OVERHEAD; - let height = mmap[overhead_offset]; - overhead_offset += HEIGHT_ENCODED_SIZE; - let len = u32::from_le_bytes( - mmap[overhead_offset..overhead_offset + LEN_ENCODED_SIZE] - .try_into() - .unwrap(), - ); - overhead_offset += LEN_ENCODED_SIZE; - let max_version = u64::from_le_bytes( - mmap[overhead_offset..overhead_offset + MAX_VERSION_ENCODED_SIZE] - .try_into() - .unwrap(), - ); - overhead_offset += MAX_VERSION_ENCODED_SIZE; - let min_version = u64::from_le_bytes( - mmap[overhead_offset..overhead_offset + MIN_VERSION_ENCODED_SIZE] - .try_into() - .unwrap(), - ); - - Ok(( - SharedMeta { - height, - len, - max_version, - min_version, - allocated, - }, - Self { - cap: allocated as usize, - backend: SharedBackend::Mmap { - buf: mmap, - file, - lock, - }, - refs: AtomicUsize::new(1), - }, - )) - }) - } - } - - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - pub(super) fn new_mmaped_anon(cap: usize) -> std::io::Result { - memmap2::MmapOptions::new() - .len(cap) - .map_anon() - .map(|mmap| Self { - cap, + let this = Self { + cap: mmap.len(), backend: SharedBackend::AnonymousMmap(mmap), refs: AtomicUsize::new(1), - }) + data_offset: data_offset(alignment), + }; + // Safety: we have add the overhead for the header + unsafe { + this + .header_mut_ptr() + .write(Header::new(this.data_offset as u64)); + } + Ok(this) + }) } #[cfg(all(feature = "memmap", not(target_family = "wasm")))] @@ -261,27 +242,59 @@ impl Shared { } } + #[inline] pub(super) fn as_mut_ptr(&mut self) -> Option<*mut u8> { - Some(match &mut self.backend { - SharedBackend::Vec(vec) => vec.as_mut_ptr(), + unsafe { + Some(match &mut self.backend { + SharedBackend::Vec(vec) => vec.as_mut_ptr().add(self.data_offset), + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + SharedBackend::MmapMut { buf: mmap, .. } => (**mmap).as_mut_ptr().add(self.data_offset), + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + SharedBackend::Mmap { .. } => return None, + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + SharedBackend::AnonymousMmap(mmap) => mmap.as_mut_ptr().add(self.data_offset), + }) + } + } + + #[inline] + pub(super) fn as_ptr(&self) -> *const u8 { + unsafe { + match &self.backend { + SharedBackend::Vec(vec) => vec.as_ptr().add(self.data_offset), + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + SharedBackend::MmapMut { buf: mmap, .. } => (**mmap).as_ptr().add(self.data_offset), + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + SharedBackend::Mmap { buf: mmap, .. } => (**mmap).as_ptr().add(self.data_offset), + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + SharedBackend::AnonymousMmap(mmap) => mmap.as_ptr().add(self.data_offset), + } + } + } + + #[inline] + pub(super) fn header_ptr(&self) -> *const Header { + match &self.backend { + SharedBackend::Vec(vec) => vec.as_ptr() as _, #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - SharedBackend::MmapMut { buf: mmap, .. } => unsafe { (**mmap).as_mut_ptr() }, + SharedBackend::MmapMut { buf: mmap, .. } => unsafe { (**mmap).as_ptr() as _ }, #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - SharedBackend::Mmap { .. } => return None, + SharedBackend::Mmap { buf: mmap, .. } => unsafe { (**mmap).as_ptr() as _ }, #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - SharedBackend::AnonymousMmap(mmap) => mmap.as_mut_ptr(), - }) + SharedBackend::AnonymousMmap(mmap) => mmap.as_ptr() as _, + } } - pub(super) fn as_ptr(&mut self) -> *const u8 { - match &mut self.backend { - SharedBackend::Vec(vec) => vec.as_ptr(), + #[inline] + pub(super) fn header_mut_ptr(&self) -> *mut Header { + match &self.backend { + SharedBackend::Vec(vec) => vec.as_ptr() as _, #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - SharedBackend::MmapMut { buf: mmap, .. } => unsafe { (**mmap).as_ptr() }, + SharedBackend::MmapMut { buf: mmap, .. } => unsafe { (**mmap).as_ptr() as _ }, #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - SharedBackend::Mmap { buf: mmap, .. } => mmap.as_ptr(), + SharedBackend::Mmap { buf: mmap, .. } => unsafe { (**mmap).as_ptr() as _ }, #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - SharedBackend::AnonymousMmap(mmap) => mmap.as_ptr(), + SharedBackend::AnonymousMmap(mmap) => mmap.as_ptr() as _, } } @@ -294,61 +307,71 @@ impl Shared { /// /// ## Safety: /// - This method must be invoked in the drop impl of `Arena`. - pub(super) unsafe fn unmount( - &mut self, - _height: u8, - _len: u32, - _size: u64, - _max_version: u64, - _min_version: u64, - ) { + pub(super) unsafe fn unmount(&mut self) { #[cfg(all(feature = "memmap", not(target_family = "wasm")))] match &self.backend { - SharedBackend::MmapMut { buf, file, lock } => { + SharedBackend::MmapMut { + buf, + file, + lock, + shrink_on_drop, + } => { use fs4::FileExt; // Any errors during unmapping/closing are ignored as the only way // to report them would be through panicking which is highly discouraged // in Drop impls, c.f. https://github.com/rust-lang/lang-team/issues/97 - { - let mmap = &mut **buf; - let mut cur = _size as usize; - mmap[cur] = _height; - cur += HEIGHT_ENCODED_SIZE; - mmap[cur..cur + LEN_ENCODED_SIZE].copy_from_slice(&_len.to_le_bytes()); - cur += LEN_ENCODED_SIZE; - mmap[cur..cur + MAX_VERSION_ENCODED_SIZE].copy_from_slice(&_max_version.to_le_bytes()); - cur += MAX_VERSION_ENCODED_SIZE; - mmap[cur..cur + MIN_VERSION_ENCODED_SIZE].copy_from_slice(&_min_version.to_le_bytes()); - cur += MIN_VERSION_ENCODED_SIZE; - - let h = crc32fast::hash(&mmap[..cur]); - mmap[cur..cur + CHECKSUM_ENCODED_SIZE].copy_from_slice(&h.to_le_bytes()); - let _ = mmap.flush(); - } - - // we must trigger the drop of the mmap before truncating the file - drop(Box::from_raw(*buf)); - - // relaxed ordering is enough here as we're in a drop, no one else can - // access this memory anymore. - let actual_size = _size + MMAP_OVERHEAD as u64; - if actual_size != self.cap as u64 { - let _ = file.set_len(_size + MMAP_OVERHEAD as u64); + let mmap = &mut **buf; + let _ = mmap.flush(); + + // we must trigger the drop of the mmap + let used = if *shrink_on_drop { + let header_ptr = self.header_ptr().cast::
(); + let header = &*header_ptr; + Some(header.allocated.load(Ordering::Acquire)) + } else { + None + }; + + let _ = Box::from_raw(*buf); + + if let Some(used) = used { + if used < self.cap as u64 { + let _ = file.set_len(used); + } } if *lock { let _ = file.unlock(); } } - SharedBackend::Mmap { lock, file, .. } => { + SharedBackend::Mmap { + lock, + file, + shrink_on_drop, + buf, + } => { use fs4::FileExt; // Any errors during unmapping/closing are ignored as the only way // to report them would be through panicking which is highly discouraged // in Drop impls, c.f. https://github.com/rust-lang/lang-team/issues/97 - // we must trigger the drop of the mmap before truncating the file + let used = if *shrink_on_drop { + let header_ptr = self.header_ptr().cast::
(); + let header = &*header_ptr; + Some(header.allocated.load(Ordering::Acquire)) + } else { + None + }; + + let _ = Box::from_raw(*buf); + + if let Some(used) = used { + if used < self.cap as u64 { + let _ = file.set_len(used); + } + } // relaxed ordering is enough here as we're in a drop, no one else can // access this memory anymore. diff --git a/src/lib.rs b/src/lib.rs index eab9e393..4e30272a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -16,7 +16,7 @@ extern crate alloc as std; #[cfg(feature = "std")] extern crate std; -use core::{cmp, mem, ops::RangeBounds}; +use core::{cmp, ops::RangeBounds}; mod arena; /// A map implementation based on skiplist @@ -25,12 +25,22 @@ pub mod map; /// A set implementation based on skiplist pub mod set; +#[cfg(all(feature = "memmap", not(target_family = "wasm")))] +mod options; +#[cfg(all(feature = "memmap", not(target_family = "wasm")))] +#[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] +pub use options::{MmapOptions, OpenOptions}; + +#[cfg(all(feature = "memmap", not(target_family = "wasm")))] +fn invalid_data(e: E) -> std::io::Error { + std::io::Error::new(std::io::ErrorKind::InvalidData, e) +} + pub use arena::{Arena, ArenaError}; pub use map::{MapIterator, SkipMap}; pub use set::{SetIterator, SkipSet}; const MAX_HEIGHT: usize = 20; -const NODE_ALIGNMENT_FACTOR: usize = mem::align_of::(); #[cfg(feature = "std")] fn random_height() -> u32 { @@ -268,6 +278,8 @@ mod sync { #[cfg(not(loom))] pub(crate) use core::sync::atomic::*; + #[cfg(not(loom))] + pub(crate) use std::boxed::Box; #[cfg(all(not(loom), test))] pub(crate) use std::sync::Arc; diff --git a/src/map.rs b/src/map.rs index d69558b0..32c5ab71 100644 --- a/src/map.rs +++ b/src/map.rs @@ -6,6 +6,9 @@ use core::{ use crate::{OccupiedValue, Trailer}; +#[cfg(all(feature = "memmap", not(target_family = "wasm")))] +use super::{invalid_data, MmapOptions, OpenOptions}; + use super::{arena::Arena, sync::Ordering, Ascend, Comparator, MAX_HEIGHT}; mod node; @@ -50,13 +53,27 @@ pub struct SkipMap { unsafe impl Send for SkipMap {} unsafe impl Sync for SkipMap {} +impl Clone for SkipMap { + fn clone(&self) -> Self { + Self { + arena: self.arena.clone(), + head: self.head, + tail: self.tail, + ro: self.ro, + #[cfg(all(test, feature = "std"))] + yield_now: self.yield_now, + cmp: self.cmp.clone(), + } + } +} + // --------------------------------Public Methods-------------------------------- impl SkipMap { /// Returns the height of the highest tower within any of the nodes that /// have ever been allocated as part of this skiplist. #[inline] pub fn height(&self) -> u32 { - self.arena.height.load(Ordering::Acquire) + self.arena.height() } /// Returns the number of bytes that have allocated from the arena. @@ -80,7 +97,7 @@ impl SkipMap { /// Returns the number of entries in the skipmap. #[inline] pub fn len(&self) -> usize { - self.arena.len.load(Ordering::Acquire) as usize + self.arena.len() as usize } /// Returns true if the skipmap is empty. @@ -92,13 +109,13 @@ impl SkipMap { /// Returns the maximum version of all entries in the map. #[inline] pub fn max_version(&self) -> u64 { - self.arena.max_version.load(Ordering::Acquire) + self.arena.max_version() } /// Returns the minimum version of all entries in the map. #[inline] pub fn min_version(&self) -> u64 { - self.arena.min_version.load(Ordering::Acquire) + self.arena.min_version() } /// Returns the comparator used to compare keys. @@ -172,10 +189,10 @@ impl SkipMap { #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] pub fn mmap_mut>( path: P, - cap: usize, - lock: bool, + open_options: OpenOptions, + mmap_options: MmapOptions, ) -> std::io::Result { - Self::mmap_mut_with_comparator(path, cap, lock, Ascend) + Self::mmap_mut_with_comparator(path, open_options, mmap_options, Ascend) } /// Open an exist file and mmap it to create skipmap. @@ -183,8 +200,12 @@ impl SkipMap { /// `lock`: whether to lock the underlying file or not #[cfg(all(feature = "memmap", not(target_family = "wasm")))] #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - pub fn mmap>(path: P, lock: bool) -> std::io::Result { - Self::mmap_with_comparator(path, lock, Ascend) + pub fn mmap>( + path: P, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::mmap_with_comparator(path, open_options, mmap_options, Ascend) } /// Create a new skipmap according to the given capacity, and mmap anon. @@ -204,15 +225,15 @@ impl SkipMap { /// [`SkipMap::new`]: #method.new #[cfg(all(feature = "memmap", not(target_family = "wasm")))] #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - pub fn mmap_anon(cap: usize) -> std::io::Result { - Self::mmap_anon_with_comparator(cap, Ascend) + pub fn mmap_anon(mmap_options: MmapOptions) -> std::io::Result { + Self::mmap_anon_with_comparator(mmap_options, Ascend) } } impl SkipMap { /// Like [`SkipMap::new`], but with a custom comparator. pub fn with_comparator(cap: usize, cmp: C) -> Result { - let arena = Arena::new_vec(cap, Node::::min_cap()); + let arena = Arena::new_vec(cap, Node::::min_cap(), Node::::alignment() as usize); Self::new_in(arena, cmp, false) } @@ -221,13 +242,14 @@ impl SkipMap { #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] pub fn mmap_mut_with_comparator>( path: P, - cap: usize, - lock: bool, + open_options: OpenOptions, + mmap_options: MmapOptions, cmp: C, ) -> std::io::Result { - let arena = Arena::mmap_mut(cap, Node::::min_cap(), path, lock)?; - Self::new_in(arena, cmp, false) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e)) + let alignment = Node::::alignment() as usize; + let min_cap = Node::::min_cap(); + let arena = Arena::mmap_mut(path, open_options, mmap_options, min_cap, alignment)?; + Self::new_in(arena, cmp, false).map_err(invalid_data) } /// Like [`SkipMap::mmap`], but with a custom comparator. @@ -235,25 +257,31 @@ impl SkipMap { #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] pub fn mmap_with_comparator>( path: P, - lock: bool, + open_options: OpenOptions, + mmap_options: MmapOptions, cmp: C, ) -> std::io::Result { - let arena = Arena::mmap(Node::::min_cap(), path, lock)?; - Self::new_in(arena, cmp, true) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e)) + let alignment = Node::::alignment() as usize; + let min_cap = Node::::min_cap(); + let arena = Arena::mmap(path, open_options, mmap_options, min_cap, alignment)?; + Self::new_in(arena, cmp, true).map_err(invalid_data) } /// Like [`SkipMap::mmap_anon`], but with a custom comparator. #[cfg(all(feature = "memmap", not(target_family = "wasm")))] #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - pub fn mmap_anon_with_comparator(cap: usize, cmp: C) -> std::io::Result { - let arena = Arena::new_anonymous_mmap(cap, Node::::min_cap())?; - Self::new_in(arena, cmp, false) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e)) + pub fn mmap_anon_with_comparator(mmap_options: MmapOptions, cmp: C) -> std::io::Result { + let alignment = Node::::alignment() as usize; + let min_cap = Node::::min_cap(); + let arena = Arena::new_anonymous_mmap(mmap_options, min_cap, alignment)?; + Self::new_in(arena, cmp, false).map_err(invalid_data) } /// Clear the skiplist to empty and re-initialize. - pub fn clear(&mut self) -> Result<(), Error> { + /// + /// # Safety + /// This mehod is not concurrency safe, invokers must ensure that no other threads are accessing the skipmap. + pub unsafe fn clear(&mut self) -> Result<(), Error> { if self.ro { return Err(Error::Readonly); } @@ -277,8 +305,7 @@ impl SkipMap { self.head = head; self.tail = tail; - self.arena.height.store(1, Ordering::Release); - self.arena.len.store(0, Ordering::Release); + self.arena.clear(); Ok(()) } @@ -288,7 +315,6 @@ impl SkipMap { let head = NodePtr::new(ptr, offset); let (ptr, offset) = arena.tail_ptr(Node::::MAX_NODE_SIZE as u32, Node::::alignment()); let tail = NodePtr::new(ptr, offset); - return Ok(Self::construct(arena, head, tail, ro, cmp)); } @@ -418,17 +444,14 @@ impl SkipMap { return Err(Error::Readonly); } + let copy = |mut buf: OccupiedValue| { + let _ = buf.write(value); + Ok(()) + }; + let val_len = value.len() as u32; + self - .insert_in::( - trailer, - key, - value.len() as u32, - |mut buf| { - let _ = buf.write(value); - Ok(()) - }, - &mut Inserter::default(), - ) + .insert_in::(trailer, key, val_len, copy, &mut Inserter::default()) .map_err(|e| e.expect_right("must be map::Error")) } @@ -572,7 +595,7 @@ impl SkipMap { // Try to increase self.height via CAS. let mut list_height = self.height(); while height > list_height { - match self.arena.height.compare_exchange_weak( + match self.arena.atomic_height().compare_exchange_weak( list_height, height, Ordering::SeqCst, @@ -860,7 +883,7 @@ impl SkipMap { ins.spl[i].prev = nd; } } - self.arena.len.fetch_add(1, Ordering::AcqRel); + self.arena.incr_len(); self.arena.update_max_version(version); self.arena.update_min_version(version); Ok(None) diff --git a/src/map/node.rs b/src/map/node.rs index a6a4e65f..79951495 100644 --- a/src/map/node.rs +++ b/src/map/node.rs @@ -126,8 +126,8 @@ impl Node { #[inline] pub(super) const fn alignment() -> u32 { let alignment = mem::align_of::(); - let alignment = if alignment < mem::size_of::() { - mem::size_of::() + let alignment = if alignment < mem::align_of::() { + mem::align_of::() } else { alignment }; diff --git a/src/map/tests.rs b/src/map/tests.rs index 99dfea30..342a31ec 100644 --- a/src/map/tests.rs +++ b/src/map/tests.rs @@ -65,14 +65,21 @@ fn test_empty() { fn test_empty_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_empty_mmap_mut"); - empty_in(SkipMap::mmap_mut(p, 1000, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(1000)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + + empty_in(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_empty_mmap_anon() { - empty_in(SkipMap::mmap_anon(1000).unwrap()); + let mmap_options = MmapOptions::default().len(1000); + empty_in(SkipMap::mmap_anon(mmap_options).unwrap()); } fn full_in(l: impl FnOnce(usize) -> SkipMap) { @@ -109,14 +116,25 @@ fn test_full() { fn test_full_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_full_mmap_mut"); - full_in(|n| SkipMap::mmap_mut(p, n, true).unwrap()); + + full_in(|n| { + let open_options = OpenOptions::default() + .create_new(Some(n as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + SkipMap::mmap_mut(p, open_options, mmap_options).unwrap() + }); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_full_mmap_anon() { - full_in(|n| SkipMap::mmap_anon(n).unwrap()); + full_in(|n| { + let mmap_options = MmapOptions::default().len(n); + SkipMap::mmap_anon(mmap_options).unwrap() + }); } fn basic_in(mut l: SkipMap) { @@ -200,8 +218,11 @@ fn basic_in(mut l: SkipMap) { assert_eq!(arr, [a2, a1, b2, b1]); } - l.clear().unwrap(); + unsafe { + l.clear().unwrap(); + } + let l = l.clone(); { let mut it = l.iter_all_versions(0); assert!(it.seek_lower_bound(Bound::Unbounded).is_none()); @@ -227,14 +248,20 @@ fn test_basic() { fn test_basic_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_basic_mmap_mut"); - basic_in(SkipMap::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + basic_in(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_basic_mmap_anon() { - basic_in(SkipMap::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::default().len(ARENA_SIZE); + basic_in(SkipMap::mmap_anon(mmap_options).unwrap()); } fn iter_all_versions_mvcc(l: SkipMap) { @@ -342,14 +369,20 @@ fn test_iter_all_versions_mvcc_mmap_mut() { let p = dir .path() .join("test_skipmap_iter_all_versions_mvcc_mmap_mut"); - iter_all_versions_mvcc(SkipMap::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + iter_all_versions_mvcc(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_iter_all_versions_mvcc_mmap_anon() { - iter_all_versions_mvcc(SkipMap::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::default().len(ARENA_SIZE); + iter_all_versions_mvcc(SkipMap::mmap_anon(mmap_options).unwrap()); } fn ordering() { @@ -438,14 +471,20 @@ fn test_get_mvcc() { fn test_get_mvcc_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_get_mvcc_mmap_mut"); - get_mvcc(SkipMap::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + get_mvcc(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_get_mvcc_mmap_anon() { - get_mvcc(SkipMap::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::default().len(ARENA_SIZE); + get_mvcc(SkipMap::mmap_anon(mmap_options).unwrap()); } fn gt_in(l: SkipMap) { @@ -538,14 +577,20 @@ fn test_gt() { fn test_gt_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_gt_mmap_mut"); - gt_in(SkipMap::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + gt_in(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_gt_mmap_anon() { - gt_in(SkipMap::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::default().len(ARENA_SIZE); + gt_in(SkipMap::mmap_anon(mmap_options).unwrap()); } fn ge_in(l: SkipMap) { @@ -636,14 +681,20 @@ fn test_ge() { fn test_ge_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_ge_mmap_mut"); - ge_in(SkipMap::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + ge_in(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_ge_mmap_anon() { - ge_in(SkipMap::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::default().len(ARENA_SIZE); + ge_in(SkipMap::mmap_anon(mmap_options).unwrap()); } fn le_in(l: SkipMap) { @@ -748,14 +799,20 @@ fn test_le() { fn test_le_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_le_mmap_mut"); - gt_in(SkipMap::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + gt_in(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_le_mmap_anon() { - gt_in(SkipMap::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::default().len(ARENA_SIZE); + gt_in(SkipMap::mmap_anon(mmap_options).unwrap()); } fn lt_in(l: SkipMap) { @@ -842,14 +899,20 @@ fn test_lt() { fn test_lt_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_lt_mmap_mut"); - lt_in(SkipMap::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + lt_in(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_lt_mmap_anon() { - lt_in(SkipMap::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::default().len(ARENA_SIZE); + lt_in(SkipMap::mmap_anon(mmap_options).unwrap()); } fn test_basic_large_testcases_in(l: Arc) { @@ -884,7 +947,12 @@ fn test_basic_large_testcases_mmap_mut() { let p = dir .path() .join("test_skipmap_basic_large_testcases_mmap_mut"); - let l = Arc::new(SkipMap::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + let l = Arc::new(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); test_basic_large_testcases_in(l); } @@ -892,7 +960,8 @@ fn test_basic_large_testcases_mmap_mut() { #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_basic_large_testcases_mmap_anon() { - let l = Arc::new(SkipMap::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::default().len(ARENA_SIZE); + let l = Arc::new(SkipMap::mmap_anon(mmap_options).unwrap()); test_basic_large_testcases_in(l); } @@ -937,8 +1006,13 @@ fn test_concurrent_basic() { fn test_concurrent_basic_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_concurrent_basic_mmap_mut"); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); let l = Arc::new( - SkipMap::mmap_mut(p, ARENA_SIZE, true) + SkipMap::mmap_mut(p, open_options, mmap_options) .unwrap() .with_yield_now(), ); @@ -949,8 +1023,9 @@ fn test_concurrent_basic_mmap_mut() { #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_concurrent_basic_mmap_anon() { + let mmap_options = MmapOptions::default().len(ARENA_SIZE); test_concurrent_basic_runner(Arc::new( - SkipMap::mmap_anon(ARENA_SIZE).unwrap().with_yield_now(), + SkipMap::mmap_anon(mmap_options).unwrap().with_yield_now(), )); } @@ -996,8 +1071,13 @@ fn test_concurrent_basic_big_values_mmap_mut() { let p = dir .path() .join("test_skipmap_concurrent_basic_big_values_mmap_mut"); + let open_options = OpenOptions::default() + .create_new(Some(120 << 20)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); test_concurrent_basic_big_values_runner(Arc::new( - SkipMap::mmap_mut(p, 120 << 20, true) + SkipMap::mmap_mut(p, open_options, mmap_options) .unwrap() .with_yield_now(), )); @@ -1007,8 +1087,9 @@ fn test_concurrent_basic_big_values_mmap_mut() { #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_concurrent_basic_big_values_mmap_anon() { + let mmap_options = MmapOptions::default().len(120 << 20); test_concurrent_basic_big_values_runner(Arc::new( - SkipMap::mmap_anon(120 << 20).unwrap().with_yield_now(), + SkipMap::mmap_anon(mmap_options).unwrap().with_yield_now(), )); } @@ -1071,8 +1152,14 @@ fn test_concurrent_one_key() { fn test_concurrent_one_key_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_concurrent_one_key_mmap_mut"); + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u64)) + .read(true) + .write(true) + .shrink_on_drop(true); + let mmap_options = MmapOptions::default(); concurrent_one_key(Arc::new( - SkipMap::mmap_mut(p, ARENA_SIZE, true) + SkipMap::mmap_mut(p, open_options, mmap_options) .unwrap() .with_yield_now(), )); @@ -1082,8 +1169,9 @@ fn test_concurrent_one_key_mmap_mut() { #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_concurrent_one_key_mmap_anon() { + let mmap_options = MmapOptions::default().len(ARENA_SIZE); concurrent_one_key(Arc::new( - SkipMap::mmap_anon(ARENA_SIZE).unwrap().with_yield_now(), + SkipMap::mmap_anon(mmap_options).unwrap().with_yield_now(), )); } @@ -1120,14 +1208,20 @@ fn test_iter_all_versionsator_next_mmap_mut() { let p = dir .path() .join("test_skipmap_iter_all_versionsator_next_mmap_mut"); - iter_all_versionsator_next(SkipMap::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + iter_all_versionsator_next(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_iter_all_versionsator_next_mmap_anon() { - iter_all_versionsator_next(SkipMap::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::default().len(ARENA_SIZE); + iter_all_versionsator_next(SkipMap::mmap_anon(mmap_options).unwrap()); } fn range_next(l: SkipMap) { @@ -1168,14 +1262,20 @@ fn test_range_next() { fn test_range_next_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_range_next_mmap_mut"); - iter_all_versionsator_next(SkipMap::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + iter_all_versionsator_next(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_range_next_mmap_anon() { - iter_all_versionsator_next(SkipMap::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::default().len(ARENA_SIZE); + iter_all_versionsator_next(SkipMap::mmap_anon(mmap_options).unwrap()); } fn iter_all_versionsator_prev(l: SkipMap) { @@ -1211,14 +1311,20 @@ fn test_iter_all_versionsator_prev_mmap_mut() { let p = dir .path() .join("test_skipmap_iter_all_versionsator_prev_mmap_mut"); - iter_all_versionsator_prev(SkipMap::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + iter_all_versionsator_prev(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_iter_all_versionsator_prev_mmap_anon() { - iter_all_versionsator_prev(SkipMap::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::default().len(ARENA_SIZE); + iter_all_versionsator_prev(SkipMap::mmap_anon(mmap_options).unwrap()); } fn range_prev(l: SkipMap) { @@ -1259,14 +1365,20 @@ fn test_range_next_back() { fn test_range_prev_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_range_prev_mmap_mut"); - range_prev(SkipMap::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + range_prev(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_range_prev_mmap_anon() { - range_prev(SkipMap::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::default().len(ARENA_SIZE); + range_prev(SkipMap::mmap_anon(mmap_options).unwrap()); } fn iter_all_versionsator_seek_ge(l: SkipMap) { @@ -1332,14 +1444,20 @@ fn test_iter_all_versionsator_seek_ge_mmap_mut() { let p = dir .path() .join("test_skipmap_iter_all_versionsator_seek_ge_mmap_mut"); - iter_all_versionsator_seek_ge(SkipMap::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + iter_all_versionsator_seek_ge(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_iter_all_versionsator_seek_ge_mmap_anon() { - iter_all_versionsator_seek_ge(SkipMap::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::default().len(ARENA_SIZE); + iter_all_versionsator_seek_ge(SkipMap::mmap_anon(mmap_options).unwrap()); } fn iter_all_versionsator_seek_lt(l: SkipMap) { @@ -1392,14 +1510,20 @@ fn test_iter_all_versionsator_seek_lt_mmap_mut() { let p = dir .path() .join("test_skipmap_iter_all_versionsator_seek_lt_mmap_mut"); - iter_all_versionsator_seek_lt(SkipMap::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + iter_all_versionsator_seek_lt(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_iter_all_versionsator_seek_lt_mmap_anon() { - iter_all_versionsator_seek_lt(SkipMap::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::default().len(ARENA_SIZE); + iter_all_versionsator_seek_lt(SkipMap::mmap_anon(mmap_options).unwrap()); } fn range(l: SkipMap) { @@ -1500,14 +1624,20 @@ fn test_range() { fn test_range_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_range_mmap_mut"); - range(SkipMap::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + range(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_range_mmap_anon() { - range(SkipMap::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::default().len(ARENA_SIZE); + range(SkipMap::mmap_anon(mmap_options).unwrap()); } fn iter_latest(l: SkipMap) { @@ -1550,14 +1680,20 @@ fn test_iter_latest() { fn test_iter_latest_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_iter_latest_mmap_mut"); - iter_latest(SkipMap::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + iter_latest(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_iter_latest_mmap_anon() { - iter_latest(SkipMap::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::default().len(ARENA_SIZE); + iter_latest(SkipMap::mmap_anon(mmap_options).unwrap()); } fn range_latest(l: SkipMap) { @@ -1600,14 +1736,20 @@ fn test_range_latest() { fn test_range_latest_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_range_latest_mmap_mut"); - range_latest(SkipMap::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + range_latest(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_range_latest_mmap_anon() { - range_latest(SkipMap::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::default().len(ARENA_SIZE); + range_latest(SkipMap::mmap_anon(mmap_options).unwrap()); } #[test] @@ -1617,14 +1759,25 @@ fn test_reopen_mmap() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("reopen_skipmap"); { - let l = SkipMap::mmap_mut(&p, ARENA_SIZE, true).unwrap(); + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u64)) + .read(true) + .write(true) + .lock_exclusive(true); + let mmap_options = MmapOptions::default(); + let l = SkipMap::mmap_mut(&p, open_options, mmap_options).unwrap(); for i in 0..1000 { l.insert(0, &key(i), &new_value(i)).unwrap(); } l.flush().unwrap(); } - let l = SkipMap::mmap(&p, false).unwrap(); + let open_options = OpenOptions::default() + .read(true) + .lock_shared(true) + .shrink_on_drop(true); + let mmap_options = MmapOptions::default(); + let l = SkipMap::mmap(&p, open_options, mmap_options).unwrap(); assert_eq!(1000, l.len()); for i in 0..1000 { let k = key(i); @@ -1644,7 +1797,13 @@ fn test_reopen_mmap2() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("reopen_skipmap2"); { - let l = SkipMap::mmap_mut_with_comparator(&p, ARENA_SIZE, true, Ascend).unwrap(); + let open_options = OpenOptions::default() + .create(Some(ARENA_SIZE as u64)) + .read(true) + .write(true) + .lock_shared(true); + let mmap_options = MmapOptions::default(); + let l = SkipMap::mmap_mut_with_comparator(&p, open_options, mmap_options, Ascend).unwrap(); let mut data = (0..1000).collect::>(); data.shuffle(&mut rand::thread_rng()); for i in data { @@ -1655,7 +1814,10 @@ fn test_reopen_mmap2() { assert_eq!(l.min_version(), 0); } - let l = SkipMap::::mmap_with_comparator(&p, false, Ascend).unwrap(); + let open_options = OpenOptions::default().read(true); + let mmap_options = MmapOptions::default(); + let l = + SkipMap::::mmap_with_comparator(&p, open_options, mmap_options, Ascend).unwrap(); assert_eq!(1000, l.len()); let mut data = (0..1000).collect::>(); data.shuffle(&mut rand::thread_rng()); @@ -1711,7 +1873,12 @@ fn test_insert_with_panic() { fn test_insert_with_panic_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_insert_with_mmap_mut"); - insert_with_panic(SkipMap::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + insert_with_panic(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] @@ -1719,7 +1886,8 @@ fn test_insert_with_panic_mmap_mut() { #[cfg_attr(miri, ignore)] #[should_panic] fn test_insert_with_panic_mmap_anon() { - insert_with_panic(SkipMap::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::default().len(ARENA_SIZE); + insert_with_panic(SkipMap::mmap_anon(mmap_options).unwrap()); } fn insert_with(l: SkipMap) { @@ -1762,12 +1930,18 @@ fn test_insert_with() { fn test_insert_with_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipmap_insert_with_mmap_mut"); - insert_with(SkipMap::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::default() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::default(); + insert_with(SkipMap::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_insert_with_mmap_anon() { - insert_with(SkipMap::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::default().len(ARENA_SIZE); + insert_with(SkipMap::mmap_anon(mmap_options).unwrap()); } diff --git a/src/options.rs b/src/options.rs new file mode 100644 index 00000000..d79aa363 --- /dev/null +++ b/src/options.rs @@ -0,0 +1,536 @@ +use fs4::FileExt; +use memmap2::MmapOptions as Mmap2Options; +use std::{ + fs::{File, OpenOptions as StdOpenOptions}, + io, + path::Path, +}; + +/// Options for opening a file for memory mapping. +pub struct OpenOptions { + opts: StdOpenOptions, + create: Option, + create_new: Option, + shrink_on_drop: bool, + lock_shared: bool, + lock_exclusive: bool, +} + +impl From for OpenOptions { + fn from(opts: StdOpenOptions) -> Self { + Self { + opts, + create_new: None, + create: None, + shrink_on_drop: false, + lock_shared: false, + lock_exclusive: true, + } + } +} + +impl Default for OpenOptions { + /// Creates a blank new set of options ready for configuration. + /// + /// All options are initially set to `false`. + /// + /// # Examples + /// + /// ```rust + /// use skl::OpenOptions; + /// + /// let options = OpenOptions::default(); + /// ``` + fn default() -> Self { + Self::new() + } +} + +impl OpenOptions { + /// Creates a blank new set of options ready for configuration. + /// + /// All options are initially set to `false`. + /// + /// # Examples + /// + /// ```rust + /// use skl::OpenOptions; + /// + /// let mut options = OpenOptions::new(); + /// ``` + #[inline] + pub fn new() -> Self { + Self { + opts: StdOpenOptions::new(), + create: None, + create_new: None, + shrink_on_drop: false, + lock_shared: false, + lock_exclusive: true, + } + } + + /// Sets the option for read access. + /// + /// This option, when true, will indicate that the file should be + /// `read`-able if opened. + /// + /// # Examples + /// + /// ```rust + /// use skl::OpenOptions; + /// + /// let opts = OpenOptions::new().read(true); + /// ``` + #[inline] + pub fn read(mut self, read: bool) -> Self { + self.opts.read(read); + self + } + + /// Sets the option for write access. + /// + /// This option, when true, will indicate that the file should be + /// `write`-able if opened. + /// + /// If the file already exists, any write calls on it will overwrite its + /// contents, without truncating it. + /// + /// # Examples + /// + /// ```rust + /// use skl::OpenOptions; + /// + /// let opts = OpenOptions::new().write(true); + /// ``` + #[inline] + pub fn write(mut self, write: bool) -> Self { + self.opts.write(write); + self + } + + /// Sets the option for the append mode. + /// + /// This option, when true, means that writes will append to a file instead + /// of overwriting previous contents. + /// Note that setting `.write(true).append(true)` has the same effect as + /// setting only `.append(true)`. + /// + /// For most filesystems, the operating system guarantees that all writes are + /// atomic: no writes get mangled because another process writes at the same + /// time. + /// + /// One maybe obvious note when using append-mode: make sure that all data + /// that belongs together is written to the file in one operation. This + /// can be done by concatenating strings before passing them to [`write()`], + /// or using a buffered writer (with a buffer of adequate size), + /// and calling [`flush()`] when the message is complete. + /// + /// If a file is opened with both read and append access, beware that after + /// opening, and after every write, the position for reading may be set at the + /// end of the file. So, before writing, save the current position (using + /// [seek]\([SeekFrom](std::io::SeekFrom)::[Current]\(opts))), and restore it before the next read. + /// + /// ## Note + /// + /// This function doesn't create the file if it doesn't exist. Use the + /// [`OpenOptions::create`] method to do so. + /// + /// [`write()`]: std::io::Write::write "io::Write::write" + /// [`flush()`]: std::io::Write::flush "io::Write::flush" + /// [seek]: std::io::Seek::seek "io::Seek::seek" + /// [Current]: std::io::SeekFrom::Current "io::SeekFrom::Current" + /// + /// # Examples + /// + /// ```rust + /// use skl::OpenOptions; + /// + /// let opts = OpenOptions::new().append(true); + /// ``` + #[inline] + pub fn append(mut self, append: bool) -> Self { + self.opts.append(append); + self + } + + /// Sets the option for truncating a previous file. + /// + /// If a file is successfully opened with this option set it will truncate + /// the file to opts length if it already exists. + /// + /// The file must be opened with write access for truncate to work. + /// + /// # Examples + /// + /// ```rust + /// use skl::OpenOptions; + /// + /// let opts = OpenOptions::new().write(true).truncate(true); + /// ``` + #[inline] + pub fn truncate(mut self, truncate: bool) -> Self { + self.opts.truncate(truncate); + self + } + + /// Sets the option to create a new file, or open it if it already exists. + /// If the file does not exist, it is created and set the lenght of the file to the given size. + /// + /// In order for the file to be created, [`OpenOptions::write`] or + /// [`OpenOptions::append`] access must be used. + /// + /// See also [`std::fs::write()`][std::fs::write] for a simple function to + /// create a file with some given data. + /// + /// # Examples + /// + /// ```rust + /// use skl::OpenOptions; + /// + /// let opts = OpenOptions::new().write(true).create(Some(1000)); + /// ``` + /// + /// ```rust + /// use skl::OpenOptions; + /// + /// let opts = OpenOptions::new().write(true).create(None); + /// ``` + #[inline] + pub fn create(mut self, size: Option) -> Self { + match size { + Some(size) => { + self.opts.create(true); + self.create = Some(size); + } + None => { + self.opts.create(false); + self.create = None; + } + } + self + } + + /// Sets the option to create a new file and set the file length to the given value, failing if it already exists. + /// + /// No file is allowed to exist at the target location, also no (dangling) symlink. In this + /// way, if the call succeeds, the file returned is guaranteed to be new. + /// + /// This option is useful because it is atomic. Otherwise between checking + /// whether a file exists and creating a new one, the file may have been + /// created by another process (a TOCTOU race condition / attack). + /// + /// If `.create_new(true)` is set, [`.create()`] and [`.truncate()`] are + /// ignored. + /// + /// The file must be opened with write or append access in order to create + /// a new file. + /// + /// [`.create()`]: OpenOptions::create + /// [`.truncate()`]: OpenOptions::truncate + /// + /// # Examples + /// + /// ```rust + /// use skl::OpenOptions; + /// + /// let file = OpenOptions::new() + /// .write(true) + /// .create_new(Some(1000)); + /// ``` + /// + /// ```rust + /// use skl::OpenOptions; + /// + /// let opts = OpenOptions::new() + /// .write(true) + /// .create_new(None); + /// ``` + #[inline] + pub fn create_new(mut self, size: Option) -> Self { + match size { + Some(size) => { + self.opts.create_new(true); + self.create_new = Some(size); + } + None => { + self.opts.create_new(false); + self.create_new = None; + } + } + self + } + + /// Sets the option to make the file shrink to the used size when dropped. + /// + /// This option, when true, will indicate that the file should be shrunk to + /// the size of the data written to it when the file is dropped. + /// + /// # Examples + /// + /// ```rust + /// use skl::OpenOptions; + /// + /// let opts = OpenOptions::new().shrink_on_drop(true); + /// ``` + #[inline] + pub fn shrink_on_drop(mut self, shrink_on_drop: bool) -> Self { + self.shrink_on_drop = shrink_on_drop; + self + } + + /// Sets the option to lock the file for shared read access. + /// + /// This option, when true, will indicate that the file should be locked for + /// shared read access. + /// + /// # Examples + /// + /// ```rust + /// use skl::OpenOptions; + /// + /// let opts = OpenOptions::new().lock_shared(true); + /// ``` + #[inline] + pub fn lock_shared(mut self, lock_shared: bool) -> Self { + self.lock_shared = lock_shared; + self + } + + /// Sets the option to lock the file for exclusive write access. + /// + /// This option, when true, will indicate that the file should be locked for + /// exclusive write access. + /// + /// # Examples + /// + /// ```rust + /// use skl::OpenOptions; + /// + /// let opts = OpenOptions::new().lock_exclusive(true); + /// ``` + #[inline] + pub fn lock_exclusive(mut self, lock_exclusive: bool) -> Self { + self.lock_exclusive = lock_exclusive; + self + } + + pub(crate) fn open>(&self, path: P) -> io::Result { + if let Some(size) = self.create_new { + return self.opts.open(path).and_then(|f| { + if self.lock_exclusive { + f.lock_exclusive()?; + } else if self.lock_shared { + f.lock_shared()?; + } + + f.set_len(size).map(|_| f) + }); + } + + if let Some(size) = self.create { + return if path.as_ref().exists() { + self.opts.open(path).and_then(|f| { + if self.lock_exclusive { + f.lock_exclusive()?; + } else if self.lock_shared { + f.lock_shared()?; + } + Ok(f) + }) + } else { + self.opts.open(path).and_then(|f| { + if self.lock_exclusive { + f.lock_exclusive()?; + } else if self.lock_shared { + f.lock_shared()?; + } + + f.set_len(size).map(|_| f) + }) + }; + } + + self.opts.open(path).and_then(|f| { + if self.lock_exclusive { + f.lock_exclusive()?; + } else if self.lock_shared { + f.lock_shared()?; + } + Ok(f) + }) + } + + #[inline] + pub(crate) const fn is_lock(&self) -> bool { + self.lock_shared || self.lock_exclusive + } + + #[inline] + pub(crate) const fn is_shrink_on_drop(&self) -> bool { + self.shrink_on_drop + } +} + +/// A memory map options for file backed [`SkipMap`](super::SkipMap) and [`SkipSet`](super::SkipSet), +/// providing advanced options and flags for specifying memory map behavior. +#[derive(Clone, Debug)] +pub struct MmapOptions(Mmap2Options); + +impl Default for MmapOptions { + fn default() -> Self { + Self::new() + } +} + +impl From for MmapOptions { + fn from(opts: Mmap2Options) -> Self { + Self(opts) + } +} + +impl MmapOptions { + /// Creates a new set of options for configuring and creating a memory map. + /// + /// # Example + /// + /// ```rust + /// use skl::MmapOptions; + /// + /// // Create a new memory map options. + /// let mut mmap_options = MmapOptions::new(); + /// ``` + #[inline] + pub fn new() -> Self { + Self(Mmap2Options::new()) + } + + /// Configures the created memory mapped buffer to be `len` bytes long. + /// + /// This option is mandatory for anonymous memory maps. + /// + /// For file-backed memory maps, the length will default to the file length. + /// + /// # Example + /// + /// ``` + /// use skl::MmapOptions; + /// + /// let opts = MmapOptions::new().len(9); + /// ``` + #[inline] + pub fn len(mut self, len: usize) -> Self { + self.0.len(len); + self + } + + /// Configures the memory map to start at byte `offset` from the beginning of the file. + /// + /// This option has no effect on anonymous memory maps. + /// + /// By default, the offset is 0. + /// + /// # Example + /// + /// ``` + /// use skl::MmapOptions; + /// + /// let opts = MmapOptions::new().offset(30); + /// ``` + #[inline] + pub fn offset(mut self, offset: u64) -> Self { + self.0.offset(offset); + self + } + + /// Configures the anonymous memory map to be suitable for a process or thread stack. + /// + /// This option corresponds to the `MAP_STACK` flag on Linux. It has no effect on Windows. + /// + /// This option has no effect on file-backed memory maps. + /// + /// # Example + /// + /// ``` + /// use skl::MmapOptions; + /// + /// let stack = MmapOptions::new().stack(); + /// ``` + #[inline] + pub fn stack(mut self) -> Self { + self.0.stack(); + self + } + + /// Configures the anonymous memory map to be allocated using huge pages. + /// + /// This option corresponds to the `MAP_HUGETLB` flag on Linux. It has no effect on Windows. + /// + /// The size of the requested page can be specified in page bits. If not provided, the system + /// default is requested. The requested length should be a multiple of this, or the mapping + /// will fail. + /// + /// This option has no effect on file-backed memory maps. + /// + /// # Example + /// + /// ``` + /// use skl::MmapOptions; + /// + /// let stack = MmapOptions::new().huge(Some(21)).len(2*1024*1024); + /// ``` + #[inline] + pub fn huge(mut self, page_bits: Option) -> Self { + self.0.huge(page_bits); + self + } + + /// Populate (prefault) page tables for a mapping. + /// + /// For a file mapping, this causes read-ahead on the file. This will help to reduce blocking on page faults later. + /// + /// This option corresponds to the `MAP_POPULATE` flag on Linux. It has no effect on Windows. + /// + /// # Example + /// + /// ``` + /// use skl::MmapOptions; + /// + /// + /// let opts = MmapOptions::new().populate(); + /// ``` + #[inline] + pub fn populate(mut self) -> Self { + self.0.populate(); + self + } + + #[inline] + pub(crate) unsafe fn map(&self, file: &File) -> io::Result { + self.0.map(file) + } + + #[inline] + pub(crate) unsafe fn map_mut(&self, file: &File) -> io::Result { + self.0.map_mut(file) + } + + #[inline] + pub(crate) fn map_anon(&self) -> io::Result { + self.0.map_anon() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_from() { + let opts = StdOpenOptions::new(); + let _open_opts = OpenOptions::from(opts); + + let opts = Mmap2Options::new(); + let _mmap_opts = MmapOptions::from(opts); + } +} diff --git a/src/set.rs b/src/set.rs index cc64e29c..87b46ec5 100644 --- a/src/set.rs +++ b/src/set.rs @@ -3,6 +3,8 @@ use core::{ ops::{Bound, RangeBounds}, }; +#[cfg(all(feature = "memmap", not(target_family = "wasm")))] +use super::{invalid_data, MmapOptions, OpenOptions}; use crate::Trailer; use super::{arena::Arena, sync::Ordering, Ascend, Comparator, MAX_HEIGHT}; @@ -48,12 +50,26 @@ unsafe impl Send for SkipSet {} unsafe impl Sync for SkipSet {} // --------------------------------Public Methods-------------------------------- +impl Clone for SkipSet { + fn clone(&self) -> Self { + Self { + arena: self.arena.clone(), + head: self.head, + tail: self.tail, + ro: self.ro, + #[cfg(all(test, feature = "std"))] + yield_now: self.yield_now, + cmp: self.cmp.clone(), + } + } +} + impl SkipSet { /// Returns the height of the highest tower within any of the nodes that /// have ever been allocated as part of this skiplist. #[inline] pub fn height(&self) -> u32 { - self.arena.height.load(Ordering::Acquire) + self.arena.height() } /// Returns the number of bytes that have allocated from the arena. @@ -77,7 +93,7 @@ impl SkipSet { /// Returns the number of entries in the skipset. #[inline] pub fn len(&self) -> usize { - self.arena.len.load(Ordering::Acquire) as usize + self.arena.len() as usize } /// Returns true if the skipset is empty. @@ -95,13 +111,13 @@ impl SkipSet { /// Returns the maximum version of all entries in the set. #[inline] pub fn max_version(&self) -> u64 { - self.arena.max_version.load(Ordering::Acquire) + self.arena.max_version() } /// Returns the minimum version of all entries in the set. #[inline] pub fn min_version(&self) -> u64 { - self.arena.min_version.load(Ordering::Acquire) + self.arena.min_version() } /// Flushes outstanding memory map modifications to disk. @@ -169,10 +185,10 @@ impl SkipSet { #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] pub fn mmap_mut>( path: P, - cap: usize, - lock: bool, + open_options: OpenOptions, + mmap_options: MmapOptions, ) -> std::io::Result { - Self::mmap_mut_with_comparator(path, cap, lock, Ascend) + Self::mmap_mut_with_comparator(path, open_options, mmap_options, Ascend) } /// Open an exist file and mmap it to create skipset. @@ -180,8 +196,12 @@ impl SkipSet { /// `lock`: whether to lock the underlying file or not #[cfg(all(feature = "memmap", not(target_family = "wasm")))] #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - pub fn mmap>(path: P, lock: bool) -> std::io::Result { - Self::mmap_with_comparator(path, lock, Ascend) + pub fn mmap>( + path: P, + open_options: OpenOptions, + mmap_options: MmapOptions, + ) -> std::io::Result { + Self::mmap_with_comparator(path, open_options, mmap_options, Ascend) } /// Create a new skipset according to the given capacity, and mmap anon. @@ -201,15 +221,15 @@ impl SkipSet { /// [`SkipSet::new`]: #method.new #[cfg(all(feature = "memmap", not(target_family = "wasm")))] #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - pub fn mmap_anon(cap: usize) -> std::io::Result { - Self::mmap_anon_with_comparator(cap, Ascend) + pub fn mmap_anon(opts: MmapOptions) -> std::io::Result { + Self::mmap_anon_with_comparator(opts, Ascend) } } impl SkipSet { /// Like [`SkipSet::new`], but with a custom comparator. pub fn with_comparator(cap: usize, cmp: C) -> Result { - let arena = Arena::new_vec(cap, Node::::min_cap()); + let arena = Arena::new_vec(cap, Node::::min_cap(), Node::::alignment() as usize); Self::new_in(arena, cmp, false) } @@ -218,13 +238,14 @@ impl SkipSet { #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] pub fn mmap_mut_with_comparator>( path: P, - cap: usize, - lock: bool, + open_options: OpenOptions, + mmap_options: MmapOptions, cmp: C, ) -> std::io::Result { - let arena = Arena::mmap_mut(cap, Node::::min_cap(), path, lock)?; - Self::new_in(arena, cmp, false) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e)) + let alignment = Node::::alignment() as usize; + let min_cap = Node::::min_cap(); + let arena = Arena::mmap_mut(path, open_options, mmap_options, min_cap, alignment)?; + Self::new_in(arena, cmp, false).map_err(invalid_data) } /// Like [`SkipSet::mmap`], but with a custom comparator. @@ -232,25 +253,31 @@ impl SkipSet { #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] pub fn mmap_with_comparator>( path: P, - lock: bool, + open_options: OpenOptions, + mmap_options: MmapOptions, cmp: C, ) -> std::io::Result { - let arena = Arena::mmap(Node::::min_cap(), path, lock)?; - Self::new_in(arena, cmp, true) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e)) + let alignment = Node::::alignment() as usize; + let min_cap = Node::::min_cap(); + let arena = Arena::mmap(path, open_options, mmap_options, min_cap, alignment)?; + Self::new_in(arena, cmp, true).map_err(invalid_data) } /// Like [`SkipSet::mmap_anon`], but with a custom comparator. #[cfg(all(feature = "memmap", not(target_family = "wasm")))] #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - pub fn mmap_anon_with_comparator(cap: usize, cmp: C) -> std::io::Result { - let arena = Arena::new_anonymous_mmap(cap, Node::::min_cap())?; - Self::new_in(arena, cmp, false) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e)) + pub fn mmap_anon_with_comparator(opts: MmapOptions, cmp: C) -> std::io::Result { + let alignment = Node::::alignment() as usize; + let min_cap = Node::::min_cap(); + let arena = Arena::new_anonymous_mmap(opts, min_cap, alignment)?; + Self::new_in(arena, cmp, false).map_err(invalid_data) } /// Clear the skiplist to empty and re-initialize. - pub fn clear(&mut self) { + /// + /// # Safety + /// This mehod is not concurrency safe, invokers must ensure that no other threads are accessing the skipset. + pub unsafe fn clear(&mut self) { let head = Node::new_empty_node_ptr(&self.arena) .expect("arena is not large enough to hold the head node"); let tail = Node::new_empty_node_ptr(&self.arena) @@ -270,8 +297,7 @@ impl SkipSet { self.head = head; self.tail = tail; - self.arena.height.store(1, Ordering::Release); - self.arena.len.store(0, Ordering::Release); + self.arena.clear(); } fn new_in(arena: Arena, cmp: C, ro: bool) -> Result { @@ -489,7 +515,7 @@ impl SkipSet { // Try to increase self.height via CAS. let mut list_height = self.height(); while height > list_height { - match self.arena.height.compare_exchange_weak( + match self.arena.atomic_height().compare_exchange_weak( list_height, height, Ordering::SeqCst, @@ -776,7 +802,7 @@ impl SkipSet { ins.spl[i].prev = nd; } } - self.arena.len.fetch_add(1, Ordering::AcqRel); + self.arena.incr_len(); self.arena.update_max_version(version); self.arena.update_min_version(version); Ok(None) diff --git a/src/set/tests.rs b/src/set/tests.rs index 3baa97f6..705be074 100644 --- a/src/set/tests.rs +++ b/src/set/tests.rs @@ -56,14 +56,21 @@ fn test_empty() { fn test_empty_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipset_empty_mmap_mut"); - empty_in(SkipSet::mmap_mut(p, 1000, true).unwrap()); + let open_options = OpenOptions::new() + .create_new(Some(1000)) + .read(true) + .write(true) + .lock_shared(true); + let mmap_options = MmapOptions::new(); + empty_in(SkipSet::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_empty_mmap_anon() { - empty_in(SkipSet::mmap_anon(1000).unwrap()); + let mmap_options = MmapOptions::new().len(1000); + empty_in(SkipSet::mmap_anon(mmap_options).unwrap()); } fn full_in(l: impl FnOnce(usize) -> SkipSet) { @@ -98,14 +105,26 @@ fn test_full() { fn test_full_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipset_full_mmap_mut"); - full_in(|n| SkipSet::mmap_mut(p, n, true).unwrap()); + + full_in(|n| { + let open_options = OpenOptions::new() + .create_new(Some(n as u64)) + .read(true) + .write(true) + .lock_exclusive(true); + let mmap_options = MmapOptions::new(); + SkipSet::mmap_mut(p, open_options, mmap_options).unwrap() + }); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_full_mmap_anon() { - full_in(|n| SkipSet::mmap_anon(n).unwrap()); + full_in(|n| { + let opts = MmapOptions::new().len(n); + SkipSet::mmap_anon(opts).unwrap() + }); } fn basic_in(mut l: SkipSet) { @@ -181,8 +200,9 @@ fn basic_in(mut l: SkipSet) { assert_eq!(arr, [a2, a1, b2, b1]); } - l.clear(); + unsafe { l.clear() }; + let l = l.clone(); { let it = l.iter_all_versions(0); assert!(it.min().is_none()); @@ -208,14 +228,20 @@ fn test_basic() { fn test_basic_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipset_basic_mmap_mut"); - basic_in(SkipSet::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + basic_in(SkipSet::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_basic_mmap_anon() { - basic_in(SkipSet::mmap_anon(ARENA_SIZE).unwrap()); + let opts = MmapOptions::new().len(ARENA_SIZE); + basic_in(SkipSet::mmap_anon(opts).unwrap()); } fn iter_all_versions_mvcc(l: SkipSet) { @@ -313,14 +339,20 @@ fn test_iter_all_versions_mvcc_mmap_mut() { let p = dir .path() .join("test_skipset_iter_all_versions_mvcc_mmap_mut"); - iter_all_versions_mvcc(SkipSet::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + iter_all_versions_mvcc(SkipSet::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_iter_all_versions_mvcc_mmap_anon() { - iter_all_versions_mvcc(SkipSet::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::new().len(ARENA_SIZE); + iter_all_versions_mvcc(SkipSet::mmap_anon(mmap_options).unwrap()); } fn ordering() { @@ -400,14 +432,21 @@ fn test_get_mvcc() { fn test_get_mvcc_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipset_get_mvcc_mmap_mut"); - get_mvcc(SkipSet::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + + get_mvcc(SkipSet::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_get_mvcc_mmap_anon() { - get_mvcc(SkipSet::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::new().len(ARENA_SIZE); + get_mvcc(SkipSet::mmap_anon(mmap_options).unwrap()); } fn gt_in(l: SkipSet) { @@ -488,14 +527,21 @@ fn test_gt() { fn test_gt_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipset_gt_mmap_mut"); - gt_in(SkipSet::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + + gt_in(SkipSet::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_gt_mmap_anon() { - gt_in(SkipSet::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::new().len(ARENA_SIZE); + gt_in(SkipSet::mmap_anon(mmap_options).unwrap()); } fn ge_in(l: SkipSet) { @@ -574,14 +620,21 @@ fn test_ge() { fn test_ge_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipset_ge_mmap_mut"); - ge_in(SkipSet::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + + ge_in(SkipSet::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_ge_mmap_anon() { - ge_in(SkipSet::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::new().len(ARENA_SIZE); + ge_in(SkipSet::mmap_anon(mmap_options).unwrap()); } fn le_in(l: SkipSet) { @@ -686,14 +739,20 @@ fn test_le() { fn test_le_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipset_le_mmap_mut"); - gt_in(SkipSet::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + gt_in(SkipSet::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_le_mmap_anon() { - gt_in(SkipSet::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::new().len(ARENA_SIZE); + gt_in(SkipSet::mmap_anon(mmap_options).unwrap()); } fn lt_in(l: SkipSet) { @@ -780,14 +839,20 @@ fn test_lt() { fn test_lt_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipset_lt_mmap_mut"); - lt_in(SkipSet::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + lt_in(SkipSet::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_lt_mmap_anon() { - lt_in(SkipSet::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::new().len(ARENA_SIZE); + lt_in(SkipSet::mmap_anon(mmap_options).unwrap()); } fn test_basic_large_testcases_in(l: Arc) { @@ -821,7 +886,13 @@ fn test_basic_large_testcases_mmap_mut() { let p = dir .path() .join("test_skipset_basic_large_testcases_mmap_mut"); - let l = Arc::new(SkipSet::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + + let l = Arc::new(SkipSet::mmap_mut(p, open_options, mmap_options).unwrap()); test_basic_large_testcases_in(l); } @@ -829,7 +900,8 @@ fn test_basic_large_testcases_mmap_mut() { #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_basic_large_testcases_mmap_anon() { - let l = Arc::new(SkipSet::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::new().len(ARENA_SIZE); + let l = Arc::new(SkipSet::mmap_anon(mmap_options).unwrap()); test_basic_large_testcases_in(l); } @@ -874,8 +946,14 @@ fn test_concurrent_basic() { fn test_concurrent_basic_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipset_concurrent_basic_mmap_mut"); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + let l = Arc::new( - SkipSet::mmap_mut(p, ARENA_SIZE, true) + SkipSet::mmap_mut(p, open_options, mmap_options) .unwrap() .with_yield_now(), ); @@ -886,8 +964,9 @@ fn test_concurrent_basic_mmap_mut() { #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_concurrent_basic_mmap_anon() { + let mmap_options = MmapOptions::new().len(ARENA_SIZE); test_concurrent_basic_runner(Arc::new( - SkipSet::mmap_anon(ARENA_SIZE).unwrap().with_yield_now(), + SkipSet::mmap_anon(mmap_options).unwrap().with_yield_now(), )); } @@ -933,8 +1012,14 @@ fn test_concurrent_basic_big_keys_mmap_mut() { let p = dir .path() .join("test_skipset_concurrent_basic_big_keys_mmap_mut"); + let open_options = OpenOptions::new() + .create_new(Some(120 << 20)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + test_concurrent_basic_big_keys_runner(Arc::new( - SkipSet::mmap_mut(p, 120 << 20, true) + SkipSet::mmap_mut(p, open_options, mmap_options) .unwrap() .with_yield_now(), )); @@ -944,8 +1029,10 @@ fn test_concurrent_basic_big_keys_mmap_mut() { #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_concurrent_basic_big_keys_mmap_anon() { + let mmap_options = MmapOptions::new().len(120 << 20); + test_concurrent_basic_big_keys_runner(Arc::new( - SkipSet::mmap_anon(120 << 20).unwrap().with_yield_now(), + SkipSet::mmap_anon(mmap_options).unwrap().with_yield_now(), )); } @@ -1000,8 +1087,13 @@ fn test_concurrent_one_key() { fn test_concurrent_one_key_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipset_concurrent_one_key_mmap_mut"); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); concurrent_one_key(Arc::new( - SkipSet::mmap_mut(p, ARENA_SIZE, true) + SkipSet::mmap_mut(p, open_options, mmap_options) .unwrap() .with_yield_now(), )); @@ -1011,8 +1103,9 @@ fn test_concurrent_one_key_mmap_mut() { #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_concurrent_one_key_mmap_anon() { + let mmap_options = MmapOptions::new().len(ARENA_SIZE); concurrent_one_key(Arc::new( - SkipSet::mmap_anon(ARENA_SIZE).unwrap().with_yield_now(), + SkipSet::mmap_anon(mmap_options).unwrap().with_yield_now(), )); } @@ -1048,14 +1141,20 @@ fn test_iter_all_versionsator_next_mmap_mut() { let p = dir .path() .join("test_skipset_iter_all_versionsator_next_mmap_mut"); - iter_all_versionsator_next(SkipSet::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + iter_all_versionsator_next(SkipSet::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_iter_all_versionsator_next_mmap_anon() { - iter_all_versionsator_next(SkipSet::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::new().len(ARENA_SIZE); + iter_all_versionsator_next(SkipSet::mmap_anon(mmap_options).unwrap()); } fn range_next(l: SkipSet) { @@ -1095,14 +1194,20 @@ fn test_range_next() { fn test_range_next_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipset_range_next_mmap_mut"); - iter_all_versionsator_next(SkipSet::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + iter_all_versionsator_next(SkipSet::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_range_next_mmap_anon() { - iter_all_versionsator_next(SkipSet::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::new().len(ARENA_SIZE); + iter_all_versionsator_next(SkipSet::mmap_anon(mmap_options).unwrap()); } fn iter_all_versionsator_prev(l: SkipSet) { @@ -1138,14 +1243,20 @@ fn test_iter_all_versionsator_prev_mmap_mut() { let p = dir .path() .join("test_skipset_iter_all_versionsator_prev_mmap_mut"); - iter_all_versionsator_prev(SkipSet::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + iter_all_versionsator_prev(SkipSet::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_iter_all_versionsator_prev_mmap_anon() { - iter_all_versionsator_prev(SkipSet::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::new().len(ARENA_SIZE); + iter_all_versionsator_prev(SkipSet::mmap_anon(mmap_options).unwrap()); } fn range_prev(l: SkipSet) { @@ -1185,14 +1296,20 @@ fn test_range_prev() { fn test_range_prev_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipset_range_prev_mmap_mut"); - range_prev(SkipSet::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + range_prev(SkipSet::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_range_prev_mmap_anon() { - range_prev(SkipSet::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::new().len(ARENA_SIZE); + range_prev(SkipSet::mmap_anon(mmap_options).unwrap()); } fn iter_all_versionsator_seek_ge(l: SkipSet) { @@ -1249,14 +1366,20 @@ fn test_iter_all_versionsator_seek_ge_mmap_mut() { let p = dir .path() .join("test_skipset_iter_all_versionsator_seek_ge_mmap_mut"); - iter_all_versionsator_seek_ge(SkipSet::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + iter_all_versionsator_seek_ge(SkipSet::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_iter_all_versionsator_seek_ge_mmap_anon() { - iter_all_versionsator_seek_ge(SkipSet::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::new().len(ARENA_SIZE); + iter_all_versionsator_seek_ge(SkipSet::mmap_anon(mmap_options).unwrap()); } fn iter_all_versionsator_seek_lt(l: SkipSet) { @@ -1305,14 +1428,20 @@ fn test_iter_all_versionsator_seek_lt_mmap_mut() { let p = dir .path() .join("test_skipset_iter_all_versionsator_seek_lt_mmap_mut"); - iter_all_versionsator_seek_lt(SkipSet::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + iter_all_versionsator_seek_lt(SkipSet::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_iter_all_versionsator_seek_lt_mmap_anon() { - iter_all_versionsator_seek_lt(SkipSet::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::new().len(ARENA_SIZE); + iter_all_versionsator_seek_lt(SkipSet::mmap_anon(mmap_options).unwrap()); } fn range(l: SkipSet) { @@ -1403,14 +1532,20 @@ fn test_range() { fn test_range_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipset_range_mmap_mut"); - range(SkipSet::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + range(SkipSet::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_range_mmap_anon() { - range(SkipSet::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::new().len(ARENA_SIZE); + range(SkipSet::mmap_anon(mmap_options).unwrap()); } fn iter_latest(l: SkipSet) { @@ -1449,14 +1584,20 @@ fn test_iter_latest() { fn test_iter_latest_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipset_iter_latest_mmap_mut"); - iter_latest(SkipSet::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + iter_latest(SkipSet::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_iter_latest_mmap_anon() { - iter_latest(SkipSet::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::new().len(ARENA_SIZE); + iter_latest(SkipSet::mmap_anon(mmap_options).unwrap()); } fn range_latest(l: SkipSet) { @@ -1495,14 +1636,20 @@ fn test_range_latest() { fn test_range_latest_mmap_mut() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("test_skipset_range_latest_mmap_mut"); - range_latest(SkipSet::mmap_mut(p, ARENA_SIZE, true).unwrap()); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + range_latest(SkipSet::mmap_mut(p, open_options, mmap_options).unwrap()); } #[test] #[cfg(feature = "memmap")] #[cfg_attr(miri, ignore)] fn test_range_latest_mmap_anon() { - range_latest(SkipSet::mmap_anon(ARENA_SIZE).unwrap()); + let mmap_options = MmapOptions::new().len(ARENA_SIZE); + range_latest(SkipSet::mmap_anon(mmap_options).unwrap()); } #[test] @@ -1512,14 +1659,21 @@ fn test_reopen_mmap() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("reopen_skipmap"); { - let l = SkipSet::mmap_mut(&p, ARENA_SIZE, true).unwrap(); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + let l = SkipSet::mmap_mut(&p, open_options, mmap_options).unwrap(); for i in 0..1000 { l.insert(0, &key(i)).unwrap(); } l.flush().unwrap(); } - let l = SkipSet::mmap(&p, false).unwrap(); + let open_options = OpenOptions::new().read(true); + let mmap_options = MmapOptions::new(); + let l = SkipSet::mmap(&p, open_options, mmap_options).unwrap(); assert_eq!(1000, l.len()); for i in 0..1000 { let k = key(i); @@ -1538,7 +1692,12 @@ fn test_reopen_mmap2() { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join("reopen_skipset2"); { - let l = SkipSet::mmap_mut_with_comparator(&p, ARENA_SIZE, true, Ascend).unwrap(); + let open_options = OpenOptions::new() + .create_new(Some(ARENA_SIZE as u64)) + .read(true) + .write(true); + let mmap_options = MmapOptions::new(); + let l = SkipSet::mmap_mut_with_comparator(&p, open_options, mmap_options, Ascend).unwrap(); let mut data = (0..1000).collect::>(); data.shuffle(&mut rand::thread_rng()); for i in data { @@ -1549,7 +1708,10 @@ fn test_reopen_mmap2() { assert_eq!(l.min_version(), 0); } - let l = SkipSet::::mmap_with_comparator(&p, false, Ascend).unwrap(); + let open_options = OpenOptions::new().read(true); + let mmap_options = MmapOptions::new(); + let l = + SkipSet::::mmap_with_comparator(&p, open_options, mmap_options, Ascend).unwrap(); assert_eq!(1000, l.len()); let mut data = (0..1000).collect::>(); data.shuffle(&mut rand::thread_rng());