From e3466cb4f3cd46b4a3f296552e4c528f01a21d74 Mon Sep 17 00:00:00 2001 From: Al Liu Date: Tue, 12 Nov 2024 17:52:56 +0800 Subject: [PATCH] 0.20.0 (#48) - Add dynamic `SkipMap`s - Support create multiple `SkipMap`s on the same Arena - Improve docs on some constructor methods --- .github/workflows/ci.yml | 54 +- CHANGELOG.md | 6 + Cargo.toml | 41 +- README.md | 8 +- benches/bench.rs | 14 +- ci/sanitizer.sh | 8 +- examples/heap.rs | 11 +- examples/mmap.rs | 12 +- examples/mmap_anon.rs | 11 +- examples/multiple_maps.rs | 78 + integration/src/bin/test-mmap-anon.rs | 15 +- integration/src/bin/test-mmap.rs | 15 +- integration/src/bin/test-vec.rs | 15 +- src/allocator.rs | 84 +- src/dynamic.rs | 24 + src/dynamic/builder.rs | 190 ++ src/dynamic/builder/memmap.rs | 228 ++ src/dynamic/list.rs | 1252 +++++++++ src/dynamic/list/api.rs | 336 +++ src/dynamic/list/api/update.rs | 456 ++++ src/dynamic/list/entry.rs | 298 +++ src/{base => dynamic/list}/iterator.rs | 0 src/dynamic/list/iterator/all_versions.rs | 692 +++++ src/dynamic/list/iterator/iter.rs | 162 ++ src/dynamic/multiple_version.rs | 1460 ++++++++++ src/dynamic/unique.rs | 1179 ++++++++ src/generic.rs | 22 + src/generic/builder.rs | 138 + src/generic/builder/memmap.rs | 228 ++ src/{base.rs => generic/list.rs} | 260 +- src/{base => generic/list}/api.rs | 133 +- src/{base => generic/list}/api/update.rs | 44 +- src/{base => generic/list}/entry.rs | 57 +- src/generic/list/iterator.rs | 5 + .../list}/iterator/all_versions.rs | 74 +- src/{base => generic/list}/iterator/iter.rs | 52 +- src/{traits => generic}/multiple_version.rs | 543 +++- src/{traits/map.rs => generic/unique.rs} | 470 +++- src/lib.rs | 134 +- src/options.rs | 979 +++++-- src/options/open_options.rs | 254 +- src/ref_counter.rs | 65 + src/sync.rs | 10 +- src/sync/map.rs | 81 +- src/sync/multiple_version.rs | 84 +- src/tests.rs | 91 +- src/tests/dynamic.rs | 62 + src/tests/dynamic/map.rs | 1686 ++++++++++++ src/tests/dynamic/multiple_version.rs | 2363 +++++++++++++++++ src/tests/generic.rs | 62 + src/tests/{ => generic}/map.rs | 265 +- src/tests/{ => generic}/multiple_version.rs | 291 +- src/traits.rs | 348 ++- src/types.rs | 156 +- src/unsync.rs | 9 +- src/unsync/map.rs | 58 +- src/unsync/multiple_version.rs | 67 +- 57 files changed, 14007 insertions(+), 1733 deletions(-) create mode 100644 examples/multiple_maps.rs create mode 100644 src/dynamic.rs create mode 100644 src/dynamic/builder.rs create mode 100644 src/dynamic/builder/memmap.rs create mode 100644 src/dynamic/list.rs create mode 100644 src/dynamic/list/api.rs create mode 100644 src/dynamic/list/api/update.rs create mode 100644 src/dynamic/list/entry.rs rename src/{base => dynamic/list}/iterator.rs (100%) create mode 100644 src/dynamic/list/iterator/all_versions.rs create mode 100644 src/dynamic/list/iterator/iter.rs create mode 100644 src/dynamic/multiple_version.rs create mode 100644 src/dynamic/unique.rs create mode 100644 src/generic.rs create mode 100644 src/generic/builder.rs create mode 100644 src/generic/builder/memmap.rs rename src/{base.rs => generic/list.rs} (89%) rename src/{base => generic/list}/api.rs (77%) rename src/{base => generic/list}/api/update.rs (94%) rename src/{base => generic/list}/entry.rs (86%) create mode 100644 src/generic/list/iterator.rs rename src/{base => generic/list}/iterator/all_versions.rs (90%) rename src/{base => generic/list}/iterator/iter.rs (70%) rename src/{traits => generic}/multiple_version.rs (67%) rename src/{traits/map.rs => generic/unique.rs} (66%) create mode 100644 src/ref_counter.rs create mode 100644 src/tests/dynamic.rs create mode 100644 src/tests/dynamic/map.rs create mode 100644 src/tests/dynamic/multiple_version.rs create mode 100644 src/tests/generic.rs rename src/tests/{ => generic}/map.rs (86%) rename src/tests/{ => generic}/multiple_version.rs (89%) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 993059f..41914ca 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -250,15 +250,24 @@ jobs: - x86_64-apple-darwin - aarch64-apple-darwin cfg: - - unsync_map - - unsync_versioned - - sync_map - - sync_versioned - - sync_map_concurrent - - sync_multiple_version_concurrent - - sync_map_concurrent_with_optimistic_freelist - - sync_map_concurrent_with_pessimistic_freelist - - sync_multiple_version_concurrent_with_pessimistic_freelist + - generic_unsync_map + - generic_unsync_versioned + - generic_sync_map + - generic_sync_versioned + - generic_sync_map_concurrent + - generic_sync_multiple_version_concurrent + - generic_sync_map_concurrent_with_optimistic_freelist + - generic_sync_map_concurrent_with_pessimistic_freelist + - generic_sync_multiple_version_concurrent_with_pessimistic_freelist + - dynamic_unsync_map + - dynamic_unsync_versioned + - dynamic_sync_map + - dynamic_sync_versioned + - dynamic_sync_map_concurrent + - dynamic_sync_multiple_version_concurrent + - dynamic_sync_map_concurrent_with_optimistic_freelist + - dynamic_sync_map_concurrent_with_pessimistic_freelist + - dynamic_sync_multiple_version_concurrent_with_pessimistic_freelist # Exclude invalid combinations exclude: - os: ubuntu-latest @@ -302,15 +311,24 @@ jobs: - x86_64-apple-darwin - aarch64-apple-darwin cfg: - - unsync_map - - unsync_versioned - - sync_map - - sync_versioned - - sync_map_concurrent - - sync_multiple_version_concurrent - - sync_map_concurrent_with_optimistic_freelist - - sync_map_concurrent_with_pessimistic_freelist - - sync_multiple_version_concurrent_with_pessimistic_freelist + - generic_unsync_map + - generic_unsync_versioned + - generic_sync_map + - generic_sync_versioned + - generic_sync_map_concurrent + - generic_sync_multiple_version_concurrent + - generic_sync_map_concurrent_with_optimistic_freelist + - generic_sync_map_concurrent_with_pessimistic_freelist + - generic_sync_multiple_version_concurrent_with_pessimistic_freelist + - dynamic_unsync_map + - dynamic_unsync_versioned + - dynamic_sync_map + - dynamic_sync_versioned + - dynamic_sync_map_concurrent + - dynamic_sync_multiple_version_concurrent + - dynamic_sync_map_concurrent_with_optimistic_freelist + - dynamic_sync_map_concurrent_with_pessimistic_freelist + - dynamic_sync_multiple_version_concurrent_with_pessimistic_freelist # Exclude invalid combinations exclude: - os: ubuntu-latest diff --git a/CHANGELOG.md b/CHANGELOG.md index 80de7a4..52011a6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # CHANGELOG +## 0.20.0 + +- Add dynamic `SkipMap`s +- Support create multiple `SkipMap`s on the same `Allocator` +- Improve docs on some constructor methods + ## 0.19.0 - Cleanup structures and remove `Trailer`, `TrailedMap` and `FullMap` diff --git a/Cargo.toml b/Cargo.toml index 53ff5d6..02db04e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "skl" -version = "0.19.2" +version = "0.20.0" edition = "2021" rust-version = "1.81.0" repository = "https://github.com/al8n/skl" @@ -30,6 +30,11 @@ name = "mmap-anon" path = "examples/mmap_anon.rs" required-features = ["memmap"] +[[example]] +name = "multiple_maps" +path = "examples/multiple_maps.rs" +required-features = ["memmap"] + [features] default = ["std"] alloc = ["rarena-allocator/alloc", "dbutils/alloc"] @@ -46,7 +51,7 @@ getrandom = { version = "0.2", features = ["js"] } among = { version = "0.1", default-features = false, features = ["either"] } arbitrary-int = { version = "1.2", default-features = false } bitflags = "2" -dbutils = { version = "0.9", default-features = false } +dbutils = { version = "0.10", default-features = false } either = { version = "1", default-features = false } memchr = { version = "2", default-features = false, optional = true } rand = { version = "0.8", default-features = false, features = ["getrandom"] } @@ -84,15 +89,25 @@ members = ["integration"] rust_2018_idioms = "warn" single_use_lifetimes = "warn" unexpected_cfgs = { level = "warn", check-cfg = [ - 'cfg(all_tests)', - 'cfg(test_unsync_versioned)', - 'cfg(test_unsync_map)', - 'cfg(test_sync_versioned)', - 'cfg(test_sync_map)', - 'cfg(test_sync_multiple_version_concurrent)', - 'cfg(test_sync_map_concurrent)', - 'cfg(test_sync_multiple_version_concurrent_with_optimistic_freelist)', - 'cfg(test_sync_map_concurrent_with_optimistic_freelist)', - 'cfg(test_sync_multiple_version_concurrent_with_pessimistic_freelist)', - 'cfg(test_sync_map_concurrent_with_pessimistic_freelist)', + 'cfg(all_skl_tests)', + 'cfg(test_generic_unsync_versioned)', + 'cfg(test_generic_unsync_map)', + 'cfg(test_generic_sync_versioned)', + 'cfg(test_generic_sync_map)', + 'cfg(test_generic_sync_multiple_version_concurrent)', + 'cfg(test_generic_sync_map_concurrent)', + 'cfg(test_generic_sync_multiple_version_concurrent_with_optimistic_freelist)', + 'cfg(test_generic_sync_map_concurrent_with_optimistic_freelist)', + 'cfg(test_generic_sync_multiple_version_concurrent_with_pessimistic_freelist)', + 'cfg(test_generic_sync_map_concurrent_with_pessimistic_freelist)', + 'cfg(test_dynamic_unsync_versioned)', + 'cfg(test_dynamic_unsync_map)', + 'cfg(test_dynamic_sync_versioned)', + 'cfg(test_dynamic_sync_map)', + 'cfg(test_dynamic_sync_multiple_version_concurrent)', + 'cfg(test_dynamic_sync_map_concurrent)', + 'cfg(test_dynamic_sync_multiple_version_concurrent_with_optimistic_freelist)', + 'cfg(test_dynamic_sync_map_concurrent_with_optimistic_freelist)', + 'cfg(test_dynamic_sync_multiple_version_concurrent_with_pessimistic_freelist)', + 'cfg(test_dynamic_sync_map_concurrent_with_pessimistic_freelist)', ] } diff --git a/README.md b/README.md index e1ce22f..662cda9 100644 --- a/README.md +++ b/README.md @@ -28,14 +28,14 @@ ```toml [dependencies] - skl = "0.19" + skl = "0.20" ``` - Enable memory map backend ```toml [dependencies] - skl = { version = "0.19", features = ["memmap"] } + skl = { version = "0.20", features = ["memmap"] } ``` ## Features @@ -88,7 +88,7 @@ Please see [examples](https://github.com/al8n/skl/tree/main/examples) folder for ```sh MIRIFLAGS="-Zmiri-strict-provenance -Zmiri-disable-isolation -Zmiri-symbolic-alignment-check" \ - RUSTFLAGS = "--cfg all_tests" \ + RUSTFLAGS = "--cfg all_skl_tests" \ cargo miri test --all-features ``` @@ -96,7 +96,7 @@ Please see [examples](https://github.com/al8n/skl/tree/main/examples) folder for ```sh MIRIFLAGS="-Zmiri-strict-provenance -Zmiri-disable-isolation -Zmiri-symbolic-alignment-check -Zmiri-tree-borrows" \ - RUSTFLAGS = "--cfg all_tests" \ + RUSTFLAGS = "--cfg all_skl_tests" \ cargo miri test --all-features ``` diff --git a/benches/bench.rs b/benches/bench.rs index e19e8f7..55cd433 100644 --- a/benches/bench.rs +++ b/benches/bench.rs @@ -1,9 +1,9 @@ use criterion::*; use parking_lot::Mutex; use rand::prelude::*; -use skl::{ - map::{sync::SkipMap, Map}, - *, +use skl::generic::{ + unique::{sync::SkipMap, Map}, + Builder, }; use std::{ collections::*, @@ -45,9 +45,9 @@ fn random_key(rng: &mut ThreadRng) -> Vec { fn bench_read_write_fixed_skiplist_frac(b: &mut Bencher<'_>, frac: &usize) { let frac = *frac; let value = b"00123".to_vec(); - let list = Options::new() + let list = Builder::new() .with_capacity(512 << 20) - .alloc::<_, _, SkipMap<[u8], [u8]>>() + .alloc::>() .unwrap(); let l = list.clone(); let stop = Arc::new(AtomicBool::new(false)); @@ -170,9 +170,9 @@ fn bench_write_fixed_map(c: &mut Criterion) { } fn bench_write_fixed_skiplist(c: &mut Criterion) { - let list = Options::new() + let list = Builder::new() .with_capacity(512 << 20) - .alloc::<_, _, SkipMap<[u8], [u8]>>() + .alloc::>() .unwrap(); let l = list.clone(); let value = b"00123".to_vec(); diff --git a/ci/sanitizer.sh b/ci/sanitizer.sh index 8bfea94..56279a8 100755 --- a/ci/sanitizer.sh +++ b/ci/sanitizer.sh @@ -5,19 +5,19 @@ set -ex export ASAN_OPTIONS="detect_odr_violation=0 detect_leaks=0" # Run address sanitizer -RUSTFLAGS="--cfg all_tests -Z sanitizer=address" \ +RUSTFLAGS="--cfg all_skl_tests -Z sanitizer=address" \ cargo test --lib --all-features --target x86_64-unknown-linux-gnu # Run leak sanitizer -RUSTFLAGS="--cfg all_tests -Z sanitizer=leak" \ +RUSTFLAGS="--cfg all_skl_tests -Z sanitizer=leak" \ cargo test --lib --all-features --target x86_64-unknown-linux-gnu # Run memory sanitizer -RUSTFLAGS="--cfg all_tests -Zsanitizer=memory -Zsanitizer-memory-track-origins" \ +RUSTFLAGS="--cfg all_skl_tests -Zsanitizer=memory -Zsanitizer-memory-track-origins" \ RUSTDOCFLAGS="-Zsanitizer=memory -Zsanitizer-memory-track-origins" \ cargo test -Zbuild-std --release --tests --target x86_64-unknown-linux-gnu --features memmap # Run thread sanitizer -RUSTFLAGS="--cfg all_tests -Z sanitizer=thread" \ +RUSTFLAGS="--cfg all_skl_tests -Z sanitizer=thread" \ cargo -Zbuild-std test --lib --target x86_64-unknown-linux-gnu --features memmap diff --git a/examples/heap.rs b/examples/heap.rs index a509da0..169a933 100644 --- a/examples/heap.rs +++ b/examples/heap.rs @@ -1,6 +1,9 @@ use skl::{ - map::{sync::SkipMap, Map}, - Arena, Options, + generic::{ + unique::{sync::SkipMap, Map}, + Builder, + }, + Arena, }; pub fn key(i: usize) -> Vec { @@ -14,9 +17,9 @@ pub fn new_value(i: usize) -> Vec { fn main() { const N: usize = 1000; - let l = Options::new() + let l = Builder::new() .with_capacity(1 << 20) - .alloc::<_, _, SkipMap<[u8], [u8]>>() + .alloc::>() .unwrap(); for i in 0..N { diff --git a/examples/mmap.rs b/examples/mmap.rs index ad5a955..5e48672 100644 --- a/examples/mmap.rs +++ b/examples/mmap.rs @@ -1,6 +1,9 @@ use skl::{ - map::{sync::SkipMap, Map}, - Arena, Options, + generic::{ + unique::{sync::SkipMap, Map}, + Builder, + }, + Arena, }; pub fn key(i: usize) -> Vec { @@ -10,16 +13,17 @@ pub fn key(i: usize) -> Vec { pub fn new_value(i: usize) -> Vec { format!("{:05}", i).into_bytes() } + fn main() { const N: usize = 1000; let l = unsafe { - Options::new() + Builder::new() .with_capacity(1 << 20) .with_read(true) .with_write(true) .with_create_new(true) - .map_mut::<[u8], [u8], SkipMap<[u8], [u8]>, _>("test.wal") + .map_mut::, _>("test.wal") .unwrap() }; diff --git a/examples/mmap_anon.rs b/examples/mmap_anon.rs index dfae666..6a661d4 100644 --- a/examples/mmap_anon.rs +++ b/examples/mmap_anon.rs @@ -1,6 +1,9 @@ use skl::{ - map::{sync::SkipMap, Map}, - Arena, Options, + generic::{ + unique::{sync::SkipMap, Map}, + Builder, + }, + Arena, }; pub fn key(i: usize) -> Vec { @@ -14,9 +17,9 @@ pub fn new_value(i: usize) -> Vec { fn main() { const N: usize = 1000; - let l = Options::new() + let l = Builder::new() .with_capacity(1 << 20) - .map_anon::<[u8], [u8], SkipMap<[u8], [u8]>>() + .map_anon::>() .unwrap(); for i in 0..N { diff --git a/examples/multiple_maps.rs b/examples/multiple_maps.rs new file mode 100644 index 0000000..00e27ac --- /dev/null +++ b/examples/multiple_maps.rs @@ -0,0 +1,78 @@ +use skl::{ + generic::{ + unique::{sync::SkipMap, Map}, + Builder, + }, + Arena, +}; + +pub fn key(i: usize) -> Vec { + format!("{:05}", i).into_bytes() +} + +pub fn new_value(i: usize) -> Vec { + format!("{:05}", i).into_bytes() +} + +/// This example demonstrates how to create multiple SkipMaps on the same ARENA allocator. +fn main() { + unsafe { + let header = { + let l = Builder::new() + .with_create_new(true) + .with_read(true) + .with_write(true) + .with_capacity(1024 * 1024) + .map_mut::, _>("multiple_maps.wal") + .unwrap(); + let l2 = SkipMap::<[u8], [u8]>::create_from_allocator(l.allocator().clone()).unwrap(); + let h2 = l2.header().copied().unwrap(); + + + let t1 = std::thread::spawn(move || { + for i in 0..500 { + l.get_or_insert(key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + } + l.flush().unwrap(); + }); + + let t2 = std::thread::spawn(move || { + for i in 500..1000 { + l2.get_or_insert(key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + } + l2.flush().unwrap(); + }); + + t1.join().unwrap(); + t2.join().unwrap(); + + h2 + }; + + let l = Builder::new() + .with_read(true) + .with_write(true) + .with_capacity((1024 * 1024 * 2) as u32) + .map_mut::, _>("multiple_maps.wal") + .unwrap(); + let l2 = SkipMap::<[u8], [u8]>::open_from_allocator(header, l.allocator().clone()).unwrap(); + assert_eq!(500, l.len()); + assert_eq!(500, l2.len()); + + for i in 0..500 { + let k = key(i); + let ent = l.get(k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + assert_eq!(ent.key(), k.as_slice()); + } + + for i in 500..1000 { + let k = key(i); + let ent = l2.get(k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + assert_eq!(ent.key(), k.as_slice()); + } + } +} diff --git a/integration/src/bin/test-mmap-anon.rs b/integration/src/bin/test-mmap-anon.rs index 171e4c6..62d344a 100644 --- a/integration/src/bin/test-mmap-anon.rs +++ b/integration/src/bin/test-mmap-anon.rs @@ -1,16 +1,19 @@ use integration::{big_value, key, new_value}; use skl::{ - map::{sync::SkipMap, Map}, - *, + generic::{ + unique::{sync::SkipMap, Map}, + Builder, + }, + Arena, }; fn main() { { const N: usize = 10; - let l = Options::new() + let l = Builder::new() .with_capacity(1 << 20) - .map_anon::<[u8], [u8], SkipMap<[u8], [u8]>>() + .map_anon::>() .unwrap(); for i in 0..N { let l = l.clone(); @@ -43,9 +46,9 @@ fn main() { { const N2: usize = 100; - let l = Options::new() + let l = Builder::new() .with_capacity(120 << 20) - .map_anon::<[u8], [u8], SkipMap<[u8], [u8]>>() + .map_anon::>() .unwrap(); for i in 0..N2 { let l = l.clone(); diff --git a/integration/src/bin/test-mmap.rs b/integration/src/bin/test-mmap.rs index fbd1572..e978a3e 100644 --- a/integration/src/bin/test-mmap.rs +++ b/integration/src/bin/test-mmap.rs @@ -1,7 +1,10 @@ use integration::{key, new_value}; use skl::{ - map::{sync::SkipMap, Map}, - *, + generic::{ + unique::{sync::SkipMap, Map}, + Builder, + }, + Arena, }; fn main() { @@ -11,12 +14,12 @@ fn main() { const N: usize = 10; let l = unsafe { - Options::new() + Builder::new() .with_capacity(1 << 20) .with_create_new(true) .with_read(true) .with_write(true) - .map_mut::<[u8], [u8], SkipMap<[u8], [u8]>, _>(&p) + .map_mut::, _>(&p) .unwrap() }; @@ -52,11 +55,11 @@ fn main() { const N2: usize = 10; let l = unsafe { - Options::new() + Builder::new() .with_capacity(120 << 20) .with_read(true) .with_write(true) - .map_mut::<[u8], [u8], SkipMap<[u8], [u8]>, _>(&p) + .map_mut::, _>(&p) .unwrap() }; diff --git a/integration/src/bin/test-vec.rs b/integration/src/bin/test-vec.rs index fde5d25..c168550 100644 --- a/integration/src/bin/test-vec.rs +++ b/integration/src/bin/test-vec.rs @@ -1,15 +1,18 @@ use integration::{big_value, key, new_value}; use skl::{ - map::{sync::SkipMap, Map}, - *, + generic::{ + unique::{sync::SkipMap, Map}, + Builder, + }, + Arena, }; fn main() { { const N: usize = 10; - let l = Options::new() + let l = Builder::new() .with_capacity(1 << 20) - .alloc::<_, _, SkipMap<[u8], [u8]>>() + .alloc::>() .unwrap(); for i in 0..N { let l = l.clone(); @@ -41,9 +44,9 @@ fn main() { { const N2: usize = 10; - let l = Options::new() + let l = Builder::new() .with_capacity(120 << 20) - .alloc::<_, _, SkipMap<[u8], [u8]>>() + .alloc::>() .unwrap(); for i in 0..N2 { let l = l.clone(); diff --git a/src/allocator.rs b/src/allocator.rs index 741b25e..e39b35c 100644 --- a/src/allocator.rs +++ b/src/allocator.rs @@ -12,6 +12,7 @@ use super::{ use core::{marker::PhantomData, mem, ptr::NonNull, sync::atomic::Ordering}; +/// The allocator used to allocate nodes in the `SkipMap`. pub trait Allocator: Sealed {} impl Allocator for T where T: Sealed {} @@ -23,7 +24,7 @@ mod sealed { use among::Among; - use crate::internal::Flags; + use crate::{internal::Flags, Header}; use super::*; @@ -440,7 +441,7 @@ mod sealed { } } - pub trait Header: core::fmt::Debug { + pub trait Meta: core::fmt::Debug { fn new(magic_version: u16) -> Self; fn magic_version(&self) -> u16; @@ -473,18 +474,15 @@ mod sealed { impl AllocatorExt for T {} pub trait AllocatorExt: Allocator { - /// Checks if the arena has enough capacity to store the skiplist, - /// and returns the data offset. + /// Returns the header of the arena. #[inline] - fn check_capacity(&self, max_height: u8) -> Result { + fn calculate_header(&self, max_height: u8) -> Result { let offset = self.data_offset(); - let meta_end = if self.options().unify() { - let alignment = mem::align_of::(); + let meta_end = { + let alignment = mem::align_of::(); let meta_offset = (offset + alignment - 1) & !(alignment - 1); - meta_offset + mem::size_of::() - } else { - offset + meta_offset + mem::size_of::() }; let alignment = mem::align_of::(); @@ -501,33 +499,35 @@ mod sealed { return Err(Error::ArenaTooSmall); } - Ok(tail_end as u32) + Ok(Header::new( + offset as u32, + head_offset as u32, + tail_offset as u32, + )) } #[inline] fn get_pointers( &self, + header: Header, ) -> ( - NonNull, + NonNull, ::Pointer, ::Pointer, ) { unsafe { - let offset = self.data_offset(); - let meta = self.get_aligned_pointer::(offset); + let offset = header.meta_offset() as usize; + let meta = self.get_aligned_pointer::(offset); - let offset = self.offset(meta as _) + mem::size_of::(); - let head_ptr = self.get_aligned_pointer::(offset); - let head_offset = self.offset(head_ptr as _); + let head_offset = header.head_node_offset() as usize; + let head_ptr = self.get_aligned_pointer::(head_offset); let head = <::Pointer as NodePointer>::new( head_offset as u32, NonNull::new_unchecked(head_ptr as _), ); - let (value_offset, _) = head.value_pointer().load(); - let offset = value_offset; - let tail_ptr = self.get_aligned_pointer::(offset as usize); - let tail_offset = self.offset(tail_ptr as _); + let tail_offset = header.tail_node_offset() as usize; + let tail_ptr = self.get_aligned_pointer::(tail_offset); let tail = <::Pointer as NodePointer>::new( tail_offset as u32, NonNull::new_unchecked(tail_ptr as _), @@ -540,7 +540,7 @@ mod sealed { pub trait Sealed: Sized + Clone + core::fmt::Debug + core::ops::Deref { - type Header: Header; + type Meta: Meta; type Node: Node; @@ -548,6 +548,8 @@ mod sealed { fn options(&self) -> &Options; + fn arena(&self) -> &Self::Allocator; + #[inline] fn reserved_bytes(&self) -> usize { ArenaAllocator::reserved_bytes(Deref::deref(self)) @@ -586,7 +588,7 @@ mod sealed { fn fetch_vacant_key<'a, 'b: 'a, E>( &'a self, key_size: u32, - key: impl FnOnce(&mut VacantBuffer<'a>) -> Result<(), E>, + key: impl FnOnce(&mut VacantBuffer<'a>) -> Result, ) -> Result<(u32, VacantBuffer<'a>), Either> { let (key_offset, key_size) = self .alloc_bytes(key_size) @@ -619,7 +621,7 @@ mod sealed { &'a self, size: u32, offset: u32, - f: impl FnOnce(&mut VacantBuffer<'a>) -> Result<(), E>, + f: impl FnOnce(&mut VacantBuffer<'a>) -> Result, ) -> Result<(u32, Pointer), E> { let buf = self.get_pointer_mut(offset as usize); let mut oval = VacantBuffer::new(size as usize, NonNull::new_unchecked(buf)); @@ -649,7 +651,7 @@ mod sealed { &'a self, offset: u32, size: u32, - f: impl FnOnce(&mut VacantBuffer<'a>) -> Result<(), E>, + f: impl FnOnce(&mut VacantBuffer<'a>) -> Result, ) -> Result<(u32, Pointer), E> { let buf = self.get_pointer_mut(offset as usize); let mut oval = VacantBuffer::new(size as usize, NonNull::new_unchecked(buf)); @@ -677,14 +679,17 @@ mod sealed { } #[inline] - fn allocate_header(&self, magic_version: u16) -> Result, ArenaError> { + fn allocate_header( + &self, + magic_version: u16, + ) -> Result<(usize, NonNull), ArenaError> { // Safety: meta does not need to be dropped, and it is recoverable. unsafe { - let mut meta = self.alloc::()?; + let mut meta = self.alloc::()?; meta.detach(); - meta.write(Self::Header::new(magic_version)); - Ok(meta.as_mut_ptr()) + meta.write(Self::Meta::new(magic_version)); + Ok((meta.offset(), meta.as_mut_ptr())) } } @@ -703,8 +708,8 @@ mod sealed { &'a self, version: Version, height: u32, - key_builder: KeyBuilder) -> Result<(), KE>>, - value_builder: ValueBuilder) -> Result<(), VE>>, + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, ) -> Result<(::Pointer, Deallocator), Among> { let (key_size, kf) = key_builder.into_components(); let (value_size, vf) = value_builder.into_components(); @@ -806,7 +811,7 @@ mod sealed { version: Version, height: u32, key_size: usize, - kf: impl FnOnce(&mut VacantBuffer<'a>) -> Result<(), E>, + kf: impl FnOnce(&mut VacantBuffer<'a>) -> Result, ) -> Result<(::Pointer, Deallocator), Either> { let key_size = key_size as u32; @@ -863,7 +868,7 @@ mod sealed { height: u32, key_size: usize, key_offset: u32, - value_builder: ValueBuilder) -> Result<(), E>>, + value_builder: ValueBuilder) -> Result>, ) -> Result<(::Pointer, Deallocator), Either> { let (value_size, vf) = value_builder.into_components(); @@ -940,7 +945,7 @@ mod sealed { fn allocate_and_update_value<'a, E>( &'a self, node: &::Pointer, - value_builder: ValueBuilder) -> Result<(), E>>, + value_builder: ValueBuilder) -> Result>, ) -> Result<(), Either> { let (value_size, f) = value_builder.into_components(); let value_size = value_size as u32; @@ -1015,10 +1020,8 @@ impl core::ops::Deref for GenericAllocator { } } -impl Sealed - for GenericAllocator -{ - type Header = H; +impl Sealed for GenericAllocator { + type Meta = H; type Node = N; @@ -1038,6 +1041,11 @@ impl Sealed } } + #[inline] + fn arena(&self) -> &Self::Allocator { + &self.arena + } + #[inline] fn max_key_size(&self) -> usize { self.opts.max_key_size().into() diff --git a/src/dynamic.rs b/src/dynamic.rs new file mode 100644 index 0000000..f5ddb18 --- /dev/null +++ b/src/dynamic.rs @@ -0,0 +1,24 @@ +#![allow(single_use_lifetimes)] + +mod list; + +/// Dynamic key-value `SkipMap` implementation with multiple versions support. +pub mod multiple_version; + +/// Dynamic key-value `SkipMap` implementation without multiple versions support. +pub mod unique; + +mod builder; +pub use builder::Builder; + +/// Iterators for the skipmaps. +pub mod iter { + pub use super::list::iterator::{Iter, IterAll}; +} + +/// Entry references for the skipmaps. +pub mod entry { + pub use super::list::{EntryRef, VersionedEntryRef}; +} + +pub use dbutils::equivalentor::*; diff --git a/src/dynamic/builder.rs b/src/dynamic/builder.rs new file mode 100644 index 0000000..42ad5e8 --- /dev/null +++ b/src/dynamic/builder.rs @@ -0,0 +1,190 @@ +use core::mem; + +use dbutils::equivalentor::{Ascend, Comparator}; + +use crate::{ + allocator::Sealed, + error::Error, + options::{CompressionPolicy, Freelist}, + traits::Constructable, + types::{Height, KeySize}, + Arena, Options, +}; + +#[cfg(all(feature = "memmap", not(target_family = "wasm")))] +#[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] +mod memmap; + +/// A builder for creating a dynamic key-value `SkipMap`. +#[derive(Debug, Clone)] +pub struct Builder { + options: Options, + cmp: C, +} + +impl Default for Builder +where + C: Default, +{ + #[inline] + fn default() -> Self { + Self { + options: Options::new(), + cmp: C::default(), + } + } +} + +impl From for Builder +where + C: Default, +{ + #[inline] + fn from(options: Options) -> Self { + Self { + options, + cmp: C::default(), + } + } +} + +impl From> for Options { + #[inline] + fn from(builder: Builder) -> Self { + builder.options + } +} + +impl Builder { + /// Create a new builder with the given options. + /// + /// ## Example + /// + /// ```rust + /// use skl::dynamic::Builder; + /// + /// let builder = Builder::new(); + /// ``` + #[inline] + pub const fn new() -> Self { + Self { + options: Options::new(), + cmp: Ascend, + } + } +} + +impl Builder { + /// Get the options of the builder. + /// + /// ## Example + /// + /// ```rust + /// use skl::dynamic::Builder; + /// + /// let builder = Builder::new(); + /// let options = builder.options(); + /// ``` + #[inline] + pub const fn options(&self) -> &Options { + &self.options + } + + /// Set the options for the builder. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::Builder, Options}; + /// + /// let builder = Builder::new().with_options(Options::default()); + /// ``` + #[inline] + pub fn with_options(self, options: Options) -> Self { + Self { + options, + cmp: self.cmp, + } + } + + /// Get the comparator of the builder. + /// + /// ## Example + /// + /// ```rust + /// use skl::dynamic::Builder; + /// + /// let builder = Builder::new().comparator(); + /// ``` + #[inline] + pub const fn comparator(&self) -> &C { + &self.cmp + } + + /// Set the comparator for the builder. + /// + /// ## Example + /// + /// ```rust + /// use skl::dynamic::{Builder, Descend}; + /// + /// let builder = Builder::new().with_comparator(Descend); + /// + /// assert_eq!(builder.comparator(), &Descend); + /// ``` + #[inline] + pub fn with_comparator(self, cmp: NC) -> Builder { + Builder { + options: self.options, + cmp, + } + } + + crate::__builder_opts!(dynamic::Builder); +} + +impl Builder { + /// Create a new map which is backed by a `AlignedVec`. + /// + /// **Note:** The capacity stands for how many memory allocated, + /// it does not mean the skiplist can store `cap` entries. + /// + /// **What the difference between this method and [`Builder::map_anon`]?** + /// + /// 1. This method will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// 2. Where as [`Builder::map_anon`] will use mmap anonymous to require memory from the OS. + /// If you require very large contiguous memory regions, `mmap` might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// ## Example + /// + /// ```rust + /// use skl::dynamic::{unique::sync, multiple_version::unsync, Builder}; + /// + /// let map = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let arena = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// ``` + #[inline] + pub fn alloc(self) -> Result + where + T: Arena, + T::Constructable: Constructable, + { + let node_align = + mem::align_of::<<::Allocator as Sealed>::Node>(); + + let Self { options, cmp } = self; + options + .to_arena_options() + .with_maximum_alignment(node_align) + .alloc::<<::Allocator as Sealed>::Allocator>() + .map_err(Into::into) + .and_then(|arena| T::construct(arena, options, false, cmp)) + } +} diff --git a/src/dynamic/builder/memmap.rs b/src/dynamic/builder/memmap.rs new file mode 100644 index 0000000..b639e45 --- /dev/null +++ b/src/dynamic/builder/memmap.rs @@ -0,0 +1,228 @@ +use core::mem; + +use dbutils::equivalentor::Comparator; +use either::Either; + +use super::Builder; +use crate::{ + allocator::{Node, Sealed}, + error::{bad_magic_version, bad_version, flags_mismtach, invalid_data}, + options::CURRENT_VERSION, + traits::Constructable, + Arena, +}; + +impl Builder { + /// Create a new map which is backed by a anonymous memory map. + /// + /// **What the difference between this method and [`Builder::alloc`]?** + /// + /// 1. This method will use mmap anonymous to require memory from the OS directly. + /// If you require very large contiguous memory regions, this method might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// 2. Where as [`Builder::alloc`] will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// ## Example + /// + /// ```rust + /// use skl::dynamic::{unique::sync, multiple_version::unsync, Builder}; + /// + /// let map = Builder::new().with_capacity(1024).map_anon::().unwrap(); + /// + /// let arena = Builder::new().with_capacity(1024).map_anon::().unwrap(); + /// ``` + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn map_anon(self) -> std::io::Result + where + T: Arena, + T::Constructable: Constructable, + { + let node_align = + mem::align_of::<<::Allocator as Sealed>::Node>(); + let Builder { options, cmp } = self; + + options + .to_arena_options() + .with_maximum_alignment(node_align) + .map_anon::<<::Allocator as Sealed>::Allocator>() + .map_err(Into::into) + .and_then(|arena| T::construct(arena, options, false, cmp).map_err(invalid_data)) + } + + /// Opens a read-only map which backed by file-backed memory map. + /// + /// ## Safety + /// - The file must be created with the same [`Comparator`]. + /// - All file-backed memory map constructors are marked `unsafe` because of the potential for + /// *Undefined Behavior* (UB) using the map if the underlying file is subsequently modified, in or + /// out of process. Applications must consider the risk and take appropriate precautions when + /// using file-backed maps. Solutions such as file permissions, locks or process-private (e.g. + /// unlinked) files exist but are platform specific and limited. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map(self, path: P) -> std::io::Result + where + T: Arena, + T::Constructable: Constructable, + P: AsRef, + { + self + .map_with_path_builder::(|| Ok(path.as_ref().to_path_buf())) + .map_err(Either::unwrap_right) + } + + /// Opens a read-only map which backed by file-backed memory map with a path builder. + /// + /// ## Safety + /// - The file must be created with the same [`Comparator`]. + /// - All file-backed memory map constructors are marked `unsafe` because of the potential for + /// *Undefined Behavior* (UB) using the map if the underlying file is subsequently modified, in or + /// out of process. Applications must consider the risk and take appropriate precautions when + /// using file-backed maps. Solutions such as file permissions, locks or process-private (e.g. + /// unlinked) files exist but are platform specific and limited. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_path_builder( + self, + path_builder: PB, + ) -> Result> + where + T: Arena, + T::Constructable: Constructable, + PB: FnOnce() -> Result, + { + use crate::allocator::Meta as _; + + let node_align = + mem::align_of::<<::Allocator as Sealed>::Node>(); + + let Self { options, cmp } = self; + let magic_version = options.magic_version(); + + #[allow(clippy::bind_instead_of_map)] + options + .to_arena_options() + .with_unify(true) + .with_read(true) + .with_create(false) + .with_create_new(false) + .with_write(false) + .with_truncate(false) + .with_append(false) + .with_maximum_alignment(node_align) + .map_with_path_builder::<<::Allocator as Sealed>::Allocator, _, _>(path_builder) + .and_then(|arena| { + T::construct(arena, options, true, cmp) + .map_err(invalid_data) + .and_then(|map| { + let flags = map.meta().flags(); + let node_flags = <<::Allocator as Sealed>::Node as Node>::flags(); + + if flags != node_flags { + return Err(flags_mismtach(flags, node_flags)); + } + + if Arena::magic_version(&map) != magic_version { + Err(bad_magic_version()) + } else if map.as_ref().version() != CURRENT_VERSION { + Err(bad_version()) + } else { + Ok(map) + } + }) + .map_err(Either::Right) + }) + } + + /// Creates a new map or reopens a map which backed by a file backed memory map. + /// + /// ## Safety + /// - If you are reopening a file, then the file must be created with the same [`Comparator`]. + /// - All file-backed memory map constructors are marked `unsafe` because of the potential for + /// *Undefined Behavior* (UB) using the map if the underlying file is subsequently modified, in or + /// out of process. Applications must consider the risk and take appropriate precautions when + /// using file-backed maps. Solutions such as file permissions, locks or process-private (e.g. + /// unlinked) files exist but are platform specific and limited. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut(self, path: P) -> std::io::Result + where + T: Arena, + T::Constructable: Constructable, + P: AsRef, + { + self + .map_mut_with_path_builder::(|| Ok(path.as_ref().to_path_buf())) + .map_err(Either::unwrap_right) + } + + /// Creates a new map or reopens a map which backed by a file backed memory map with path builder. + /// + /// # Safety + /// - If you are reopening a file, then the file must be created with the same [`Comparator`]. + /// - All file-backed memory map constructors are marked `unsafe` because of the potential for + /// *Undefined Behavior* (UB) using the map if the underlying file is subsequently modified, in or + /// out of process. Applications must consider the risk and take appropriate precautions when + /// using file-backed maps. Solutions such as file permissions, locks or process-private (e.g. + /// unlinked) files exist but are platform specific and limited. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub unsafe fn map_mut_with_path_builder( + self, + path_builder: PB, + ) -> Result> + where + T: Arena, + T::Constructable: Constructable, + PB: FnOnce() -> Result, + { + use crate::allocator::Meta as _; + + let node_align = + mem::align_of::<<::Allocator as Sealed>::Node>(); + let Self { options, cmp } = self; + let magic_version = options.magic_version(); + let path = path_builder().map_err(Either::Left)?; + let exist = path.exists(); + + #[allow(clippy::bind_instead_of_map)] + options + .to_arena_options() + .with_maximum_alignment(node_align) + .with_unify(true) + .map_mut::<<::Allocator as Sealed>::Allocator, _>(path) + .map_err(Either::Right) + .and_then(|arena| { + T::construct(arena, options, exist, cmp) + .map_err(invalid_data) + .and_then(|map| { + let flags = map.meta().flags(); + let node_flags = + <<::Allocator as Sealed>::Node as Node>::flags(); + + if flags != node_flags { + return Err(flags_mismtach(flags, node_flags)); + } + + if Arena::magic_version(&map) != magic_version { + Err(bad_magic_version()) + } else if map.as_ref().version() != CURRENT_VERSION { + Err(bad_version()) + } else { + Ok(map) + } + }) + .map_err(Either::Right) + }) + } +} diff --git a/src/dynamic/list.rs b/src/dynamic/list.rs new file mode 100644 index 0000000..6547ca9 --- /dev/null +++ b/src/dynamic/list.rs @@ -0,0 +1,1252 @@ +use core::{cmp, ptr::NonNull, sync::atomic::Ordering}; + +use among::Among; +use dbutils::{ + buffer::VacantBuffer, + equivalentor::{Ascend, Comparator}, +}; +use either::Either; +use rarena_allocator::Allocator as _; + +use crate::{ + allocator::{Allocator, Deallocator, Meta, Node, NodePointer, Pointer, ValuePointer}, + encode_key_size_and_height, + error::Error, + internal::RefMeta, + options::CompressionPolicy, + random_height, + ref_counter::RefCounter, + traits::Constructable, + types::{internal::ValuePointer as ValuePointerType, Height, KeyBuilder, ValueBuilder}, + FindResult, Header, Inserter, Splice, Version, +}; + +mod entry; +pub use entry::{EntryRef, VersionedEntryRef}; + +mod api; +pub(super) mod iterator; + +type UpdateOk<'a, 'b, A, RC, C> = Either< + Option>, + Result, VersionedEntryRef<'a, A, RC, C>>, +>; + +/// A fast, cocnurrent map implementation based on skiplist that supports forward +/// and backward iteration. +#[derive(Debug)] +pub struct SkipList +where + A: Allocator, + R: RefCounter, +{ + pub(crate) arena: A, + meta: RefMeta, + head: ::Pointer, + tail: ::Pointer, + header: Option
, + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + on_disk: bool, + /// If set to true by tests, then extra delays are added to make it easier to + /// detect unusual race conditions. + #[cfg(all(test, feature = "std"))] + yield_now: bool, + + cmp: C, +} + +unsafe impl Send for SkipList +where + C: Send, + A: Allocator + Send, + R: RefCounter + Send, +{ +} + +unsafe impl Sync for SkipList +where + C: Sync, + A: Allocator + Sync, + R: RefCounter + Sync, +{ +} + +impl Clone for SkipList +where + C: Clone, + A: Allocator, + R: RefCounter, +{ + fn clone(&self) -> Self { + Self { + arena: self.arena.clone(), + meta: self.meta.clone(), + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + on_disk: self.on_disk, + head: self.head, + tail: self.tail, + header: self.header, + #[cfg(all(test, feature = "std"))] + yield_now: self.yield_now, + cmp: self.cmp.clone(), + } + } +} + +impl SkipList +where + A: Allocator, + R: RefCounter, +{ + #[inline] + pub(crate) fn meta(&self) -> &A::Meta { + &self.meta + } +} + +impl Constructable for SkipList +where + A: Allocator, + R: RefCounter, +{ + type Allocator = A; + type Comparator = C; + + #[inline] + fn allocator(&self) -> &Self::Allocator { + &self.arena + } + + #[inline] + fn allocator_mut(&mut self) -> &mut Self::Allocator { + &mut self.arena + } + + #[inline] + fn magic_version(&self) -> u16 { + self.meta().magic_version() + } + + #[inline] + fn len(&self) -> usize { + self.meta().len() as usize + } + + #[inline] + fn height(&self) -> u8 { + self.meta().height() + } + + #[inline] + fn random_height(&self) -> crate::Height { + random_height(self.arena.max_height()) + } + + #[inline] + fn header(&self) -> Option<&Header> { + self.header.as_ref() + } + + #[inline] + fn construct( + arena: Self::Allocator, + meta: core::ptr::NonNull<::Meta>, + head: <::Node as crate::allocator::Node>::Pointer, + tail: <::Node as crate::allocator::Node>::Pointer, + header: Option
, + cmp: Self::Comparator, + ) -> Self { + Self { + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + on_disk: arena.is_ondisk(), + meta: RefMeta::new(meta, arena.unify()), + arena, + head, + tail, + header, + #[cfg(all(test, feature = "std"))] + yield_now: false, + cmp, + } + } +} + +impl SkipList +where + A: Allocator, + R: RefCounter, +{ + fn new_node<'a, E>( + &'a self, + version: Version, + height: u32, + key: &Key<'a, '_, A>, + value_builder: Option) -> Result>>, + ) -> Result<(::Pointer, Deallocator), Either> { + let (nd, deallocator) = match key { + Key::Occupied(key) => { + let klen = key.len(); + let kb = KeyBuilder::new(klen, |buf: &mut VacantBuffer<'_>| { + buf.put_slice_unchecked(key); + Ok(klen) + }); + let vb = value_builder.unwrap(); + self + .arena + .allocate_entry_node::<(), E>(version, height, kb, vb) + .map_err(Among::into_middle_right)? + } + Key::Vacant { buf: key, offset } => self.arena.allocate_value_node::( + version, + height, + key.len(), + *offset, + value_builder.unwrap(), + )?, + Key::Pointer { offset, len, .. } => self.arena.allocate_value_node::( + version, + height, + *len as usize, + *offset, + value_builder.unwrap(), + )?, + Key::Remove(key) => { + let klen = key.len(); + self + .arena + .allocate_tombstone_node_with_key_builder::<()>(version, height, klen, |buf| { + buf + .put_slice(key) + .expect("buffer must be large enough for key"); + Ok(klen) + }) + .map_err(|e| Either::Right(e.unwrap_right()))? + } + Key::RemoveVacant { buf: key, offset } => self + .arena + .allocate_tombstone_node::<()>(version, height, *offset, key.len()) + .map_err(|e| Either::Right(e.unwrap_right()))?, + Key::RemovePointer { offset, len, .. } => self + .arena + .allocate_tombstone_node::<()>(version, height, *offset, *len as usize) + .map_err(|e| Either::Right(e.unwrap_right()))?, + }; + + // Try to increase self.height via CAS. + let meta = self.meta(); + let mut list_height = meta.height(); + while height as u8 > list_height { + match meta.compare_exchange_height_weak( + list_height, + height as u8, + Ordering::SeqCst, + Ordering::Acquire, + ) { + // Successfully increased skiplist.height. + Ok(_) => break, + Err(h) => list_height = h, + } + } + Ok((nd, deallocator)) + } +} + +impl SkipList +where + A: Allocator, + R: RefCounter, +{ + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + #[inline] + unsafe fn get_prev( + &self, + nd: ::Pointer, + height: usize, + ) -> ::Pointer { + if nd.is_null() { + return ::Pointer::NULL; + } + + if nd.offset() == self.head.offset() { + return self.head; + } + + let offset = nd.prev_offset(&self.arena, height); + ::Pointer::new(offset, unsafe { + NonNull::new_unchecked(self.arena.raw_mut_ptr().add(offset as usize)) + }) + } + + /// ## Safety + /// + /// - The caller must ensure that the node is allocated by the arena. + #[inline] + unsafe fn get_next( + &self, + nptr: ::Pointer, + height: usize, + ) -> ::Pointer { + if nptr.is_null() { + return ::Pointer::NULL; + } + + if nptr.offset() == self.tail.offset() { + return self.tail; + } + + let offset = nptr.next_offset(&self.arena, height); + ::Pointer::new(offset, unsafe { + NonNull::new_unchecked(self.arena.raw_mut_ptr().add(offset as usize)) + }) + } +} + +impl SkipList +where + A: Allocator, + C: Comparator, + R: RefCounter, +{ + unsafe fn move_to_prev<'a>( + &'a self, + nd: &mut ::Pointer, + version: Version, + contains_key: impl Fn(&[u8]) -> bool, + ) -> Option> { + loop { + unsafe { + if nd.is_null() || nd.offset() == self.head.offset() { + return None; + } + + if nd.version() > version { + *nd = self.get_prev(*nd, 0); + continue; + } + + let nk = nd.get_key(&self.arena); + if contains_key(nk) { + let pointer = nd.get_value_pointer::(); + let ent = + VersionedEntryRef::from_node_with_pointer(version, *nd, self, pointer, Some(nk)); + return Some(ent); + } + + *nd = self.get_prev(*nd, 0); + } + } + } + + unsafe fn move_to_prev_maximum_version<'a>( + &'a self, + nd: &mut ::Pointer, + version: Version, + contains_key: impl Fn(&[u8]) -> bool, + ) -> Option> { + loop { + unsafe { + if nd.is_null() || nd.offset() == self.head.offset() { + return None; + } + + if nd.version() > version { + *nd = self.get_prev(*nd, 0); + continue; + } + + let prev = self.get_prev(*nd, 0); + + if prev.is_null() || prev.offset() == self.head.offset() { + // prev is null or the head, we should try to see if we can return the current node. + if !nd.is_removed() { + // the current node is valid, we should return it. + let nk = nd.get_key(&self.arena); + + if contains_key(nk) { + let pointer = nd.get_value_pointer::(); + let ent = + VersionedEntryRef::from_node_with_pointer(version, *nd, self, pointer, Some(nk)); + return Some(ent); + } + } + + return None; + } + + // At this point, prev is not null and not the head. + // if the prev's version is greater than the query version or the prev's key is different from the current key, + // we should try to return the current node. + if prev.version() > version || nd.get_key(&self.arena).ne(prev.get_key(&self.arena)) { + let nk = nd.get_key(&self.arena); + + if !nd.is_removed() && contains_key(nk) { + let pointer = nd.get_value_pointer::(); + let ent = + VersionedEntryRef::from_node_with_pointer(version, *nd, self, pointer, Some(nk)); + return Some(ent); + } + } + + *nd = prev; + } + } + } + + unsafe fn move_to_next<'a>( + &'a self, + nd: &mut ::Pointer, + version: Version, + contains_key: impl Fn(&[u8]) -> bool, + ) -> Option> { + loop { + unsafe { + if nd.is_null() || nd.offset() == self.tail.offset() { + return None; + } + + if nd.version() > version { + *nd = self.get_next(*nd, 0); + continue; + } + + let nk = nd.get_key(&self.arena); + if contains_key(nk) { + let pointer = nd.get_value_pointer::(); + let ent = + VersionedEntryRef::from_node_with_pointer(version, *nd, self, pointer, Some(nk)); + return Some(ent); + } + + *nd = self.get_next(*nd, 0); + } + } + } + + unsafe fn move_to_next_maximum_version<'a>( + &'a self, + nd: &mut ::Pointer, + version: Version, + contains_key: impl Fn(&[u8]) -> bool, + ) -> Option> { + loop { + unsafe { + if nd.is_null() || nd.offset() == self.tail.offset() { + return None; + } + + // if the current version is larger than the query version, we should move next to find a smaller version. + let curr_version = nd.version(); + if curr_version > version { + *nd = self.get_next(*nd, 0); + continue; + } + + // if the entry with largest version is removed, we should skip this key. + if nd.is_removed() { + let mut next = self.get_next(*nd, 0); + let curr_key = nd.get_key(&self.arena); + loop { + if next.is_null() || next.offset() == self.tail.offset() { + return None; + } + + // if next's key is different from the current key, we should break the loop + if next.get_key(&self.arena) != curr_key { + *nd = next; + break; + } + + next = self.get_next(next, 0); + } + + continue; + } + + let nk = nd.get_key(&self.arena); + if contains_key(nk) { + let pointer = nd.get_value_pointer::(); + let ent = + VersionedEntryRef::from_node_with_pointer(version, *nd, self, pointer, Some(nk)); + return Some(ent); + } + + *nd = self.get_next(*nd, 0); + } + } + } + + /// finds the node near to key. + /// If less=true, it finds rightmost node such that node.key < key (if allow_equal=false) or + /// node.key <= key (if allow_equal=true). + /// If less=false, it finds leftmost node such that node.key > key (if allow_equal=false) or + /// node.key >= key (if allow_equal=true). + /// Returns the node found. The bool returned is true if the node has key equal to given key. + unsafe fn find_near( + &self, + version: Version, + key: &[u8], + less: bool, + allow_equal: bool, + ) -> (Option<::Pointer>, bool) { + let mut x = self.head; + let mut level = self.meta().height() as usize - 1; + + loop { + // Assume x.key < key. + let next = self.get_next(x, level); + let is_next_null = next.is_null(); + + if is_next_null || next.offset() == self.tail.offset() { + // x.key < key < END OF LIST + if level > 0 { + // Can descend further to iterate closer to the end. + level -= 1; + continue; + } + + // level == 0. Can't descend further. Let's return something that makes sense. + if !less { + return (None, false); + } + + // Try to return x. Make sure it is not a head node. + if x.offset() == self.head.offset() { + return (None, false); + } + + return (Some(x), false); + } + + // let next_node = next.as_ref(&self.arena); + let next_key = next.get_key(&self.arena); + let cmp = self + .cmp + .compare(key, next_key) + .then_with(|| next.version().cmp(&version)); + + match cmp { + cmp::Ordering::Greater => { + // x.key < next.key < key. We can continue to move right. + x = next; + continue; + } + cmp::Ordering::Equal => { + // x.key < key == next.key. + if allow_equal { + return (Some(next), true); + } + + if !less { + // We want >, so go to base level to grab the next bigger node. + return (Some(self.get_next(next, 0)), false); + } + + // We want <. If not base level, we should go closer in the next level. + if level > 0 { + level -= 1; + continue; + } + + // On base level, Return x. + return (Some(x), false); + } + // In other words, x.key < key < next. + cmp::Ordering::Less => { + if level > 0 { + level -= 1; + continue; + } + + // On base level. Need to return something. + if !less { + return (Some(next), false); + } + + // Try to return x. Make sure it is not a head node. + if x.offset() == self.head.offset() { + return (None, false); + } + + return (Some(x), false); + } + } + } + } + + /// Find the place to insert the key. + /// + /// ## Safety: + /// - All of splices in the inserter must be contains node ptrs are allocated by the current skip map. + unsafe fn find_splice<'a, 'b: 'a>( + &'a self, + version: Version, + key: Either<&'a [u8], &'b [u8]>, + ins: &mut Inserter<'a, ::Pointer>, + returned_when_found: bool, + ) -> (bool, Option, Option<::Pointer>) { + let list_height = self.meta().height() as u32; + let mut level = 0; + + let mut prev = self.head; + if ins.height < list_height { + // Our cached height is less than the list height, which means there were + // inserts that increased the height of the list. Recompute the splice from + // scratch. + ins.height = list_height; + level = ins.height as usize; + } else { + // Our cached height is equal to the list height. + while level < list_height as usize { + let spl = &ins.spl[level]; + if self.get_next(spl.prev, level).offset() != spl.next.offset() { + level += 1; + // One or more nodes have been inserted between the splice at this + // level. + continue; + } + + if spl.prev.offset() != self.head.offset() + && !self.key_is_after_node(spl.prev, version, key) + { + // Key lies before splice. + level = list_height as usize; + break; + } + + if spl.next.offset() != self.tail.offset() + && !self.key_is_after_node(spl.next, version, key) + { + // Key lies after splice. + level = list_height as usize; + break; + } + + // The splice brackets the key! + prev = spl.prev; + break; + } + } + + let mut found = false; + let mut found_key = None; + for lvl in (0..level).rev() { + let mut fr = self.find_splice_for_level(version, key, lvl, prev); + if fr.splice.next.is_null() { + fr.splice.next = self.tail; + } + + found = fr.found; + if let Some(key) = fr.found_key { + found_key.get_or_insert(key); + } + if found && returned_when_found { + return (found, found_key, fr.curr); + } + ins.spl[lvl] = fr.splice; + } + + (found, found_key, None) + } + + /// Find the splice for the given level. + /// + /// ## Safety + /// - `level` is less than `MAX_HEIGHT`. + /// - `start` must be allocated by self's arena. + unsafe fn find_splice_for_level<'a, 'b: 'a>( + &'a self, + version: Version, + key: Either<&'a [u8], &'b [u8]>, + level: usize, + start: ::Pointer, + ) -> FindResult<::Pointer> { + let mut prev = start; + + loop { + // Assume prev.key < key. + let next = self.get_next(prev, level); + if next.offset() == self.tail.offset() { + // Tail node, so done. + return FindResult { + splice: Splice { prev, next }, + found: false, + found_key: None, + curr: None, + }; + } + + // offset is not zero, so we can safely dereference the next node ptr. + // let next_node = next.as_ref(&self.arena); + let next_key = next.get_key(&self.arena); + + let cmp = Key::<'a, '_, A>::compare(key, next_key, &self.cmp); + + let mut found_key = None; + + match cmp { + cmp::Ordering::Equal => { + found_key = Some(Pointer { + offset: next.key_offset(), + size: next.key_size(), + height: Some(next.height()), + }); + } + cmp::Ordering::Greater | cmp::Ordering::Less if found_key.is_none() => { + found_key = self.try_get_pointer(&next, next_key, key); + } + _ => {} + } + + match cmp.then_with(|| next.version().cmp(&version)) { + // We are done for this level, since prev.key < key < next.key. + cmp::Ordering::Less => { + return FindResult { + splice: Splice { prev, next }, + found: false, + found_key, + curr: None, + }; + } + // Keep moving right on this level. + cmp::Ordering::Greater => prev = next, + cmp::Ordering::Equal => { + return FindResult { + splice: Splice { prev, next }, + found: true, + found_key, + curr: Some(next), + }; + } + } + } + } + + fn try_get_pointer<'a, 'b: 'a>( + &'a self, + next_node: &::Pointer, + next_key: &[u8], + key: Either<&'a [u8], &'b [u8]>, + ) -> Option { + match key { + Either::Left(key) | Either::Right(key) => match self.arena.options().compression_policy() { + CompressionPolicy::Fast => { + if next_key.starts_with(key) { + return Some(Pointer { + offset: next_node.key_offset(), + size: key.len() as u32, + height: Some(next_node.height()), + }); + } + } + #[cfg(feature = "experimental")] + CompressionPolicy::High => { + if let Some(idx) = memchr::memmem::find(next_key, key) { + return Some(Pointer { + offset: next_node.key_offset() + idx as u32, + size: key.len() as u32, + height: Some(next_node.height()), + }); + } + } + }, + } + + None + } + + /// ## Safety + /// - The caller must ensure that the node is allocated by the arena. + /// - The caller must ensure that the node is not null. + unsafe fn key_is_after_node<'a, 'b: 'a>( + &'a self, + nd: ::Pointer, + version: Version, + key: Either<&'a [u8], &'b [u8]>, + ) -> bool { + let nd_key = self + .arena + .get_bytes(nd.key_offset() as usize, nd.key_size() as usize); + match Key::<'a, '_, A>::compare(key, nd_key, &self.cmp) { + cmp::Ordering::Less => false, + cmp::Ordering::Greater => true, + cmp::Ordering::Equal => { + matches!(version.cmp(&nd.version()), cmp::Ordering::Less) + } + } + } + + #[inline] + fn validate(&self, height: Height, klen: usize, vlen: usize) -> Result<(), Error> { + if self.arena.read_only() { + return Err(Error::read_only()); + } + + let max_height = self.arena.max_height(); + if height < 1 || height > max_height { + return Err(Error::invalid_height(height, max_height)); + } + + let max_key_size = self.arena.max_key_size(); + if klen > max_key_size { + return Err(Error::invalid_key_size(klen, max_key_size)); + } + + let vlen = if vlen == <::ValuePointer as ValuePointer>::REMOVE as usize { + 0 + } else { + vlen + }; + + let max_value_size = self.arena.max_value_size(); + if vlen > max_value_size { + return Err(Error::invalid_value_size(vlen, max_value_size)); + } + + let entry_size = (vlen as u64 + klen as u64) + ::size(height.to_u8()) as u64; + if entry_size > u32::MAX as u64 { + return Err(Error::invalid_entry_size(entry_size, u32::MAX as u64)); + } + + Ok(()) + } + + #[allow(clippy::too_many_arguments)] + fn update<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: u32, + key: Key<'a, 'b, A>, + value_builder: Option) -> Result>>, + success: Ordering, + failure: Ordering, + mut ins: Inserter<'a, ::Pointer>, + upsert: bool, + ) -> Result, Either> { + let is_remove = key.is_remove(); + + // Safety: a fresh new Inserter, so safe here + let found_key = unsafe { + let (found, found_key, ptr) = self.find_splice(version, key.as_slice(), &mut ins, true); + if found_key.is_some() { + key.on_fail(&self.arena); + } + + if found { + let node_ptr = ptr.expect("the NodePtr cannot be `None` when we found"); + let k = found_key.expect("the key cannot be `None` when we found"); + let old = VersionedEntryRef::from_node(version, node_ptr, self, None); + + if upsert { + return self.upsert( + version, + old, + node_ptr, + &if is_remove { + Key::remove_pointer(&self.arena, k) + } else { + Key::pointer(&self.arena, k) + }, + value_builder, + success, + failure, + ); + } + + return Ok(Either::Left(if old.is_removed() { + None + } else { + Some(old) + })); + } + + found_key + }; + + #[cfg(all(test, feature = "std"))] + if self.yield_now { + // Add delay to make it easier to test race between this thread + // and another thread that sees the intermediate state between + // finding the splice and using it. + std::thread::yield_now(); + } + + let k = match found_key { + None => key, + Some(k) => { + if is_remove { + Key::remove_pointer(&self.arena, k) + } else { + Key::pointer(&self.arena, k) + } + } + }; + + let (unlinked_node, mut deallocator) = self + .new_node(version, height, &k, value_builder) + .inspect_err(|_| { + k.on_fail(&self.arena); + })?; + + let is_removed = unsafe { unlinked_node.get_value(&self.arena).is_none() }; + + // We always insert from the base level and up. After you add a node in base + // level, we cannot create a node in the level above because it would have + // discovered the node in the base level. + let mut invalid_data_splice = false; + + for i in 0..(height as usize) { + let mut prev = ins.spl[i].prev; + let mut next = ins.spl[i].next; + + if prev.is_null() { + // New node increased the height of the skiplist, so assume that the + // new level has not yet been populated. + if !next.is_null() { + panic!("next is expected to be nil, since prev is nil"); + } + + prev = self.head; + next = self.tail; + } + + // +----------------+ +------------+ +----------------+ + // | prev | | nd | | next | + // | prevNextOffset |---->| | | | + // | |<----| prevOffset | | | + // | | | nextOffset |---->| | + // | | | |<----| nextPrevOffset | + // +----------------+ +------------+ +----------------+ + // + // 1. Initialize prevOffset and nextOffset to point to prev and next. + // 2. CAS prevNextOffset to repoint from next to nd. + // 3. CAS nextPrevOffset to repoint from prev to nd. + unsafe { + loop { + let prev_offset = prev.offset(); + let next_offset = next.offset(); + unlinked_node.write_tower(&self.arena, i, prev_offset, next_offset); + + // Check whether next has an updated link to prev. If it does not, + // that can mean one of two things: + // 1. The thread that added the next node hasn't yet had a chance + // to add the prev link (but will shortly). + // 2. Another thread has added a new node between prev and next. + // + // Safety: we already check next is not null + let next_prev_offset = next.prev_offset(&self.arena, i); + if next_prev_offset != prev_offset { + // Determine whether #1 or #2 is true by checking whether prev + // is still pointing to next. As long as the atomic operations + // have at least acquire/release semantics (no need for + // sequential consistency), this works, as it is equivalent to + // the "publication safety" pattern. + let prev_next_offset = prev.next_offset(&self.arena, i); + if prev_next_offset == next_offset { + // Ok, case #1 is true, so help the other thread along by + // updating the next node's prev link. + + let _ = next.cas_prev_offset( + &self.arena, + i, + next_prev_offset, + prev_offset, + Ordering::SeqCst, + Ordering::Acquire, + ); + } + } + + match prev.cas_next_offset( + &self.arena, + i, + next.offset(), + unlinked_node.offset(), + Ordering::SeqCst, + Ordering::Acquire, + ) { + Ok(_) => { + // Managed to insert nd between prev and next, so update the next + // node's prev link and go to the next level. + #[cfg(all(test, feature = "std"))] + if self.yield_now { + // Add delay to make it easier to test race between this thread + // and another thread that sees the intermediate state between + // setting next and setting prev. + std::thread::yield_now(); + } + + let _ = next.cas_prev_offset( + &self.arena, + i, + prev_offset, + unlinked_node.offset(), + Ordering::SeqCst, + Ordering::Acquire, + ); + + break; + } + + Err(_) => { + // let unlinked_node = nd; + + // CAS failed. We need to recompute prev and next. It is unlikely to + // be helpful to try to use a different level as we redo the search, + // because it is unlikely that lots of nodes are inserted between prev + // and next. + let fr = self.find_splice_for_level( + version, + Either::Left(unlinked_node.get_key(&self.arena)), + i, + prev, + ); + if fr.found { + if i != 0 { + panic!("how can another thread have inserted a node at a non-base level?"); + } + + let node_ptr = fr + .curr + .expect("the current should not be `None` when we found"); + let old = VersionedEntryRef::from_node(version, node_ptr, self, None); + + if upsert { + // let curr = nd.as_ref(&self.arena); + let (new_value_offset, new_value_size) = unlinked_node.value_pointer().load(); + deallocator.dealloc_node_and_key(&self.arena); + + return self + .upsert_value( + version, + old, + node_ptr, + &if is_removed { + Key::::remove_pointer(&self.arena, fr.found_key.unwrap()) + } else { + Key::::pointer(&self.arena, fr.found_key.unwrap()) + }, + new_value_offset, + new_value_size, + success, + failure, + ) + .map_err(Either::Right); + } + + deallocator.dealloc(&self.arena); + return Ok(Either::Left(if old.is_removed() { + None + } else { + Some(old) + })); + } + + if let Some(p) = fr.found_key { + // if key is already in the underlying allocator, we should deallocate the key + // in deallocator, and let the underlying allocator reclaim it, so that we do not store the same key twice. + if deallocator.key.is_some() { + unlinked_node.set_key_offset(p.offset); + unlinked_node + .set_key_size_and_height(encode_key_size_and_height(p.size, p.height.unwrap())); + deallocator.dealloc_key_by_ref(&self.arena) + } + } + + invalid_data_splice = true; + prev = fr.splice.prev; + next = fr.splice.next; + } + } + } + } + } + + // If we had to recompute the splice for a level, invalidate the entire + // cached splice. + if invalid_data_splice { + ins.height = 0; + } else { + // The splice was valid. We inserted a node between spl[i].prev and + // spl[i].next. Optimistically update spl[i].prev for use in a subsequent + // call to add. + for i in 0..(height as usize) { + ins.spl[i].prev = unlinked_node; + } + } + let meta = self.meta(); + meta.increase_len(); + meta.update_maximum_version(version); + meta.update_minimum_version(version); + + Ok(Either::Left(None)) + } + + #[allow(clippy::too_many_arguments)] + unsafe fn upsert_value<'a, 'b: 'a>( + &'a self, + version: Version, + old: VersionedEntryRef<'a, A, R, C>, + old_node: ::Pointer, + key: &Key<'a, 'b, A>, + value_offset: u32, + value_size: u32, + success: Ordering, + failure: Ordering, + ) -> Result, Error> { + match key { + Key::Occupied(_) | Key::Vacant { .. } | Key::Pointer { .. } => { + old_node.update_value(&self.arena, value_offset, value_size); + + Ok(Either::Left(if old.is_removed() { + None + } else { + Some(old) + })) + } + Key::Remove(_) | Key::RemoveVacant { .. } | Key::RemovePointer { .. } => { + match old_node.clear_value(&self.arena, success, failure) { + Ok(_) => Ok(Either::Left(None)), + Err((offset, len)) => Ok(Either::Right(Err( + VersionedEntryRef::from_node_with_pointer( + version, + old_node, + self, + ValuePointerType::new(offset, len), + None, + ), + ))), + } + } + } + } + + #[allow(clippy::too_many_arguments)] + unsafe fn upsert<'a, 'b: 'a, E>( + &'a self, + version: Version, + old: VersionedEntryRef<'a, A, R, C>, + old_node: ::Pointer, + key: &Key<'a, 'b, A>, + value_builder: Option) -> Result>>, + success: Ordering, + failure: Ordering, + ) -> Result, Either> { + match key { + Key::Occupied(_) | Key::Vacant { .. } | Key::Pointer { .. } => self + .arena + .allocate_and_update_value(&old_node, value_builder.unwrap()) + .map(|_| Either::Left(if old.is_removed() { None } else { Some(old) })), + Key::Remove(_) | Key::RemoveVacant { .. } | Key::RemovePointer { .. } => { + match old_node.clear_value(&self.arena, success, failure) { + Ok(_) => Ok(Either::Left(None)), + Err((offset, len)) => Ok(Either::Right(Err( + VersionedEntryRef::from_node_with_pointer( + version, + old_node, + self, + ValuePointerType::new(offset, len), + None, + ), + ))), + } + } + } + } +} + +pub(crate) enum Key<'a, 'b: 'a, A> { + Occupied(&'b [u8]), + Vacant { + buf: VacantBuffer<'a>, + offset: u32, + }, + Pointer { + arena: &'a A, + offset: u32, + len: u32, + }, + Remove(&'b [u8]), + #[allow(dead_code)] + RemoveVacant { + buf: VacantBuffer<'a>, + offset: u32, + }, + RemovePointer { + arena: &'a A, + offset: u32, + len: u32, + }, +} + +impl Key<'_, '_, A> { + #[inline] + pub(crate) fn on_fail(&self, arena: &A) { + match self { + Self::Occupied(_) | Self::Remove(_) | Self::Pointer { .. } | Self::RemovePointer { .. } => {} + Self::Vacant { buf, offset } | Self::RemoveVacant { buf, offset } => unsafe { + arena.dealloc(*offset, buf.capacity() as u32); + }, + } + } +} + +impl<'a, 'b: 'a, A: Allocator> Key<'a, 'b, A> { + #[inline] + fn as_slice(&self) -> Either<&'a [u8], &'b [u8]> { + match self { + Self::Occupied(key) | Self::Remove(key) => Either::Right(key), + Self::Vacant { buf, .. } | Self::RemoveVacant { buf, .. } => Either::Left(buf.as_slice()), + Self::Pointer { arena, offset, len } | Self::RemovePointer { arena, offset, len } => unsafe { + Either::Left(arena.get_bytes(*offset as usize, *len as usize)) + }, + } + } +} + +impl<'a, 'b: 'a, A> Key<'a, 'b, A> +where + A: Allocator, +{ + #[inline] + fn compare(this: Either<&'a [u8], &'b [u8]>, other: &'a [u8], cmp: &C) -> cmp::Ordering + where + C: Comparator, + { + match this { + Either::Left(key) | Either::Right(key) => cmp.compare(key, other), + } + } +} + +impl Key<'_, '_, A> { + /// Returns `true` if the key is a remove operation. + #[inline] + pub(crate) fn is_remove(&self) -> bool { + matches!( + self, + Self::Remove(_) | Self::RemoveVacant { .. } | Self::RemovePointer { .. } + ) + } +} + +impl<'a, A> Key<'a, '_, A> { + #[inline] + const fn pointer(arena: &'a A, pointer: Pointer) -> Self { + Self::Pointer { + arena, + offset: pointer.offset, + len: pointer.size, + } + } + + #[inline] + const fn remove_pointer(arena: &'a A, pointer: Pointer) -> Self { + Self::RemovePointer { + arena, + offset: pointer.offset, + len: pointer.size, + } + } +} diff --git a/src/dynamic/list/api.rs b/src/dynamic/list/api.rs new file mode 100644 index 0000000..8e2ce7d --- /dev/null +++ b/src/dynamic/list/api.rs @@ -0,0 +1,336 @@ +use core::{ + borrow::Borrow, + mem, + ops::{Bound, RangeBounds}, +}; + +use dbutils::{buffer::VacantBuffer, equivalentor::Comparator}; +use rarena_allocator::Allocator as _; + +use crate::{ + allocator::{Allocator, Meta, Node, NodePointer}, + error::Error, + random_height, + types::{Height, ValueBuilder}, + Header, Version, +}; + +use super::{iterator, EntryRef, RefCounter, SkipList, VersionedEntryRef}; + +mod update; + +type RemoveValueBuilder = + ValueBuilder) -> Result>>; + +impl SkipList +where + A: Allocator, + R: RefCounter, +{ + /// Sets remove on drop, only works on mmap with a file backend. + /// + /// Default is `false`. + /// + /// > **WARNING:** Once set to `true`, the backed file will be removed when the allocator is dropped, even though the file is opened in + /// > read-only mode. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn remove_on_drop(&self, val: bool) { + self.arena.remove_on_drop(val); + } + + /// Returns the header of the `SkipList`. + /// + /// By default, `SkipList` will allocate meta, head node, and tail node in the ARENA, + /// and the data section will be allocated after the tail node. + /// + /// This method will return the header of the `SkipList`. + #[inline] + pub const fn header(&self) -> Option<&Header> { + self.header.as_ref() + } + + /// Returns the version number of the [`SkipList`]. + #[inline] + pub fn version(&self) -> u16 { + self.arena.magic_version() + } + + /// Returns the magic version number of the [`SkipList`]. + /// + /// This value can be used to check the compatibility for application using [`SkipList`]. + #[inline] + pub fn magic_version(&self) -> u16 { + self.meta().magic_version() + } + + /// Returns the height of the highest tower within any of the nodes that + /// have ever been allocated as part of this skiplist. + #[inline] + pub fn height(&self) -> u8 { + self.meta().height() + } + + /// Returns the number of entries in the skipmap. + #[inline] + pub fn len(&self) -> usize { + self.meta().len() as usize + } + + /// Returns true if the skipmap is empty. + #[inline] + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Gets the number of pointers to this `SkipList` similar to [`Arc::strong_count`](std::sync::Arc::strong_count). + #[inline] + pub fn refs(&self) -> usize { + self.arena.refs() + } + + /// Returns the maximum version of all entries in the map. + #[inline] + pub fn maximum_version(&self) -> u64 { + self.meta().maximum_version() + } + + /// Returns the minimum version of all entries in the map. + #[inline] + pub fn minimum_version(&self) -> u64 { + self.meta().minimum_version() + } + + /// Returns `true` if the map may contain an entry whose version is less or equal to the given version. + #[inline] + pub fn may_contain_version(&self, version: Version) -> bool { + self.minimum_version() <= version + } + + /// Returns a random generated height. + /// + /// This method is useful when you want to check if the underlying allocator can allocate a node. + #[inline] + pub fn random_height(&self) -> Height { + random_height(self.arena.options().max_height()) + } + + /// Returns the estimated size of a node with the given height and key/value sizes. + /// + /// **Note**: The returned size is only an estimate and may not be accurate, which means that the actual size is less than or equal to the returned size. + #[inline] + pub fn estimated_node_size(height: Height, key_size: usize, value_size: usize) -> usize { + let height: usize = height.into(); + 7 // max padding + + mem::size_of::() + + mem::size_of::<::Link>() * height + + key_size + + value_size + } + + /// Flushes outstanding memory map modifications to disk. + /// + /// When this method returns with a non-error result, + /// all outstanding changes to a file-backed memory map are guaranteed to be durably stored. + /// The file's metadata (including last modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush(&self) -> std::io::Result<()> { + self.arena.flush() + } + + /// Asynchronously flushes outstanding memory map modifications to disk. + /// + /// This method initiates flushing modified pages to durable storage, but it will not wait for + /// the operation to complete before returning. The file's metadata (including last + /// modification timestamp) may not be updated. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn flush_async(&self) -> std::io::Result<()> { + self.arena.flush_async() + } +} + +impl SkipList +where + A: Allocator, + C: Comparator, + RC: RefCounter, +{ + /// Returns `true` if the key exists in the map. + /// + /// This method will return `false` if the entry is marked as removed. If you want to check if the key exists even if it is marked as removed, + /// you can use [`contains_key_versioned`](SkipList::contains_key_versioned). + #[inline] + pub fn contains_key(&self, version: Version, key: &[u8]) -> bool { + self.get(version, key).is_some() + } + + /// Returns `true` if the key exists in the map, even if it is marked as removed. + #[inline] + pub fn contains_key_versioned(&self, version: Version, key: &[u8]) -> bool { + self.get_versioned(version, key).is_some() + } + + /// Returns the first entry in the map. + pub fn first(&self, version: Version) -> Option> { + self.iter(version).next() + } + + /// Returns the last entry in the map. + pub fn last(&self, version: Version) -> Option> { + self.iter(version).last() + } + + /// Returns the first entry in the map. + pub fn first_versioned(&self, version: Version) -> Option> { + self.iter_all_versions(version).next() + } + + /// Returns the last entry in the map. + pub fn last_versioned(&self, version: Version) -> Option> { + self.iter_all_versions(version).last() + } + + /// Returns the value associated with the given key, if it exists. + /// + /// This method will return `None` if the entry is marked as removed. If you want to get the entry even if it is marked as removed, + /// you can use [`get_versioned`](SkipList::get_versioned). + pub fn get(&self, version: Version, key: &[u8]) -> Option> { + unsafe { + let (n, eq) = self.find_near(version, key, false, true); // findLessOrEqual. + + let node = n?; + let raw_node_key = node.get_key(&self.arena); + let (value, pointer) = node.get_value_with_pointer(&self.arena); + if eq { + return value.map(|_| { + EntryRef(VersionedEntryRef::from_node_with_pointer( + version, + node, + self, + pointer, + Some(raw_node_key), + )) + }); + } + + if !self.cmp.equivalent(key, raw_node_key) { + return None; + } + + if node.version() > version { + return None; + } + + value.map(|_| { + EntryRef(VersionedEntryRef::from_node_with_pointer( + version, + node, + self, + pointer, + Some(raw_node_key), + )) + }) + } + } + + /// Returns the value associated with the given key, if it exists. + /// + /// The difference between `get` and `get_versioned` is that `get_versioned` will return the value even if the entry is removed. + pub fn get_versioned( + &self, + version: Version, + key: &[u8], + ) -> Option> { + unsafe { + let (n, eq) = self.find_near(version, key, false, true); // findLessOrEqual. + + let node = n?; + let raw_node_key = node.get_key(&self.arena); + let (_, pointer) = node.get_value_with_pointer(&self.arena); + if eq { + return Some(VersionedEntryRef::from_node_with_pointer( + version, + node, + self, + pointer, + Some(raw_node_key), + )); + } + + if !self.cmp.equivalent(key, raw_node_key) { + return None; + } + + if node.version() > version { + return None; + } + + Some(VersionedEntryRef::from_node_with_pointer( + version, + node, + self, + pointer, + Some(raw_node_key), + )) + } + } + + /// Returns an `EntryRef` pointing to the highest element whose key is below the given bound. + /// If no such element is found then `None` is returned. + pub fn upper_bound( + &self, + version: Version, + upper: Bound<&[u8]>, + ) -> Option> { + self.iter(version).seek_upper_bound(upper) + } + + /// Returns an `EntryRef` pointing to the lowest element whose key is above the given bound. + /// If no such element is found then `None` is returned. + pub fn lower_bound( + &self, + version: Version, + lower: Bound<&[u8]>, + ) -> Option> { + self.iter(version).seek_lower_bound(lower) + } + + /// Returns a new iterator, this iterator will yield the latest version of all entries in the map less or equal to the given version. + #[inline] + pub fn iter(&self, version: Version) -> iterator::Iter<'_, A, RC, C> { + iterator::Iter::new(version, self) + } + + /// Returns a new iterator, this iterator will yield all versions for all entries in the map less or equal to the given version. + #[inline] + pub fn iter_all_versions(&self, version: Version) -> iterator::IterAll<'_, A, RC, C> { + iterator::IterAll::new(version, self, true) + } + + /// Returns a iterator that within the range, this iterator will yield the latest version of all entries in the range less or equal to the given version. + #[inline] + pub fn range(&self, version: Version, range: R) -> iterator::Iter<'_, A, RC, C, Q, R> + where + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds, + { + iterator::Iter::range(version, self, range) + } + + /// Returns a iterator that within the range, this iterator will yield all versions for all entries in the range less or equal to the given version. + #[inline] + pub fn range_all_versions( + &self, + version: Version, + range: R, + ) -> iterator::IterAll<'_, A, RC, C, Q, R> + where + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds, + { + iterator::IterAll::range(version, self, range, true) + } +} diff --git a/src/dynamic/list/api/update.rs b/src/dynamic/list/api/update.rs new file mode 100644 index 0000000..bb5f50c --- /dev/null +++ b/src/dynamic/list/api/update.rs @@ -0,0 +1,456 @@ +use super::{ + super::{Inserter, Key, RefCounter}, + Allocator, EntryRef, Error, Height, RemoveValueBuilder, SkipList, ValueBuilder, Version, +}; +use crate::KeyBuilder; +use among::Among; +use core::sync::atomic::Ordering; +use dbutils::{buffer::VacantBuffer, equivalentor::Comparator}; +use either::Either; + +impl SkipList +where + A: Allocator, + C: Comparator, + R: RefCounter, +{ + /// Upserts a new key-value pair if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert`](SkipList::get_or_insert), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + #[inline] + pub fn insert<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self.insert_at_height(version, self.random_height(), key, value) + } + + /// Upserts a new key-value pair at the given height if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_at_height`](SkipList::get_or_insert_at_height), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + pub fn insert_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self.validate(height, key.len(), value.len())?; + + let val_len = value.len(); + let copy = |buf: &mut VacantBuffer<'_>| { + buf.put_slice(value)?; + Result::<_, dbutils::error::InsufficientBuffer>::Ok(val_len) + }; + + self + .update( + version, + height.into(), + Key::Occupied(key), + Some(ValueBuilder::new(val_len, copy)), + Ordering::Relaxed, + Ordering::Relaxed, + Inserter::default(), + true, + ) + .map(|old| { + old.expect_left("insert must get InsertOk").and_then(|old| { + if old.is_removed() { + None + } else { + Some(EntryRef(old)) + } + }) + }) + .map_err(Either::unwrap_right) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_at_height_with_value_builder`](SkipList::get_or_insert_at_height_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + #[allow(single_use_lifetimes)] + pub fn insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result>, + ) -> Result>, Either> { + self + .validate(height, key.len(), value_builder.size()) + .map_err(Either::Right)?; + + self + .update( + version, + height.into(), + Key::Occupied(key), + Some(value_builder), + Ordering::Relaxed, + Ordering::Relaxed, + Inserter::default(), + true, + ) + .map(|old| { + old.expect_left("insert must get InsertOk").and_then(|old| { + if old.is_removed() { + None + } else { + Some(EntryRef(old)) + } + }) + }) + } + + /// Inserts a new key-value pair at height if it does not yet exist. + /// + /// Unlike [`insert_at_height`](SkipList::insert_at_height), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + pub fn get_or_insert_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self.validate(height, key.len(), value.len())?; + + let val_len = value.len(); + let copy = |buf: &mut VacantBuffer<'_>| { + buf.put_slice(value)?; + Result::<_, dbutils::error::InsufficientBuffer>::Ok(val_len) + }; + + self + .update( + version, + height.into(), + Key::Occupied(key), + Some(ValueBuilder::new(val_len, copy)), + Ordering::Relaxed, + Ordering::Relaxed, + Inserter::default(), + false, + ) + .map(|old| { + old.expect_left("insert must get InsertOk").and_then(|old| { + if old.is_removed() { + None + } else { + Some(EntryRef(old)) + } + }) + }) + .map_err(Either::unwrap_right) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_value_builder`](SkipList::insert_at_height_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + #[allow(single_use_lifetimes)] + pub fn get_or_insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result>, + ) -> Result>, Either> { + self + .validate(height, key.len(), value_builder.size()) + .map_err(Either::Right)?; + + self + .update( + version, + height.into(), + Key::Occupied(key), + Some(value_builder), + Ordering::Relaxed, + Ordering::Relaxed, + Inserter::default(), + false, + ) + .map(|old| { + old.expect_left("insert must get InsertOk").and_then(|old| { + if old.is_removed() { + None + } else { + Some(EntryRef(old)) + } + }) + }) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders`](SkipList::get_or_insert_with_builders), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + pub fn insert_at_height_with_builders<'a, 'b: 'a, KE, VE>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result>, Among> { + self + .validate(height, key_builder.size(), value_builder.size()) + .map_err(Among::Right)?; + + let (key_size, key) = key_builder.into_components(); + let (offset, vk) = self + .arena + .fetch_vacant_key(key_size as u32, key) + .map_err(Among::from_either_to_left_right)?; + + self + .update( + version, + height.into(), + Key::Vacant { offset, buf: vk }, + Some(value_builder), + Ordering::Relaxed, + Ordering::Relaxed, + Inserter::default(), + true, + ) + .map(|old| { + old.expect_left("insert must get InsertOk").and_then(|old| { + if old.is_removed() { + None + } else { + Some(EntryRef(old)) + } + }) + }) + .map_err(Among::from_either_to_middle_right) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_builders`](SkipList::insert_at_height_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + pub fn get_or_insert_at_height_with_builders<'a, KE, VE>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result>, Among> { + self + .validate(height, key_builder.size(), value_builder.size()) + .map_err(Among::Right)?; + + let (key_size, key) = key_builder.into_components(); + let (offset, vk) = self + .arena + .fetch_vacant_key(key_size as u32, key) + .map_err(Among::from_either_to_left_right)?; + + self + .update( + version, + height.into(), + Key::Vacant { offset, buf: vk }, + Some(value_builder), + Ordering::Relaxed, + Ordering::Relaxed, + Inserter::default(), + false, + ) + .map(|old| { + old.expect_left("insert must get InsertOk").and_then(|old| { + if old.is_removed() { + None + } else { + Some(EntryRef(old)) + } + }) + }) + .map_err(Among::from_either_to_middle_right) + } + + /// Removes the key-value pair if it exists. A CAS operation will be used to ensure the operation is atomic. + /// + /// Unlike [`get_or_remove_at_height`](SkipList::get_or_remove_at_height), this method will remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)`: + /// - if the remove operation is successful or the key is marked in remove status by other threads. + /// - Returns `Ok(Either::Right(current))` if the key with the given version already exists + /// and the entry is not successfully removed because of an update on this entry happens in another thread. + #[allow(single_use_lifetimes)] + pub fn compare_remove_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + success: Ordering, + failure: Ordering, + ) -> Result>, Error> { + self.validate(height, key.len(), 0)?; + + self + .update( + version, + height.into(), + Key::Remove(key), + Option::>::None, + success, + failure, + Inserter::default(), + true, + ) + .map(|res| match res { + Either::Left(_) => None, + Either::Right(res) => match res { + Ok(old) => { + if old.is_removed() { + None + } else { + Some(EntryRef(old)) + } + } + Err(current) => { + if current.is_removed() { + None + } else { + Some(EntryRef(current)) + } + } + }, + }) + .map_err(Either::unwrap_right) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove_at_height`](SkipList::compare_remove_at_height), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + #[allow(single_use_lifetimes)] + pub fn get_or_remove_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + ) -> Result>, Error> { + self.validate(height, key.len(), 0)?; + + self + .update( + version, + height.into(), + Key::Remove(key), + Option::>::None, + Ordering::Relaxed, + Ordering::Relaxed, + Inserter::default(), + false, + ) + .map(|res| match res { + Either::Left(old) => match old { + Some(old) => { + if old.is_removed() { + None + } else { + Some(EntryRef(old)) + } + } + None => None, + }, + _ => unreachable!("get_or_remove does not use CAS, so it must return `Either::Left`"), + }) + .map_err(Either::unwrap_right) + } + + /// Gets or removes the key-value pair if it exists. + /// Unlike [`compare_remove_at_height`](SkipList::compare_remove_at_height), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_remove a key and you know the key size but you do not have the key + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + pub fn get_or_remove_at_height_with_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result>, + ) -> Result>, Either> { + self + .validate(height, key_builder.size(), 0) + .map_err(Either::Right)?; + + let (key_size, key) = key_builder.into_components(); + let (offset, vk) = self.arena.fetch_vacant_key(key_size as u32, key)?; + let key = Key::RemoveVacant { offset, buf: vk }; + self + .update( + version, + height.into(), + key, + Option::>::None, + Ordering::Relaxed, + Ordering::Relaxed, + Inserter::default(), + false, + ) + .map(|res| match res { + Either::Left(old) => match old { + Some(old) => { + if old.is_removed() { + None + } else { + Some(EntryRef(old)) + } + } + None => None, + }, + _ => unreachable!("get_or_remove does not use CAS, so it must return `Either::Left`"), + }) + .map_err(|e| match e { + Either::Right(e) => Either::Right(e), + _ => unreachable!(), + }) + } +} diff --git a/src/dynamic/list/entry.rs b/src/dynamic/list/entry.rs new file mode 100644 index 0000000..cb31b31 --- /dev/null +++ b/src/dynamic/list/entry.rs @@ -0,0 +1,298 @@ +use crate::{ + allocator::{Allocator, Node, NodePointer, WithVersion}, + dynamic::list::SkipList, + ref_counter::RefCounter, + types::internal::ValuePointer, + Version, +}; +use dbutils::equivalentor::Comparator; + +/// A versioned entry reference of the skipmap. +/// +/// Compared to the [`EntryRef`], this one's value can be `None` which means the entry is removed. +pub struct VersionedEntryRef<'a, A, R, C> +where + A: Allocator, + R: RefCounter, +{ + pub(super) list: &'a SkipList, + pub(super) key: &'a [u8], + pub(super) value: Option<&'a [u8]>, + pub(super) version: Version, + pub(super) query_version: Version, + pub(super) ptr: ::Pointer, +} + +impl core::fmt::Debug for VersionedEntryRef<'_, A, R, C> +where + A: Allocator, + R: RefCounter, +{ + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_struct("VersionedEntryRef") + .field("key", &self.key()) + .field("value", &self.value()) + .field("version", &self.version) + .finish() + } +} + +impl Clone for VersionedEntryRef<'_, A, R, C> +where + A: Allocator, + R: RefCounter, +{ + fn clone(&self) -> Self { + *self + } +} + +impl Copy for VersionedEntryRef<'_, A, R, C> +where + A: Allocator, + R: RefCounter, +{ +} + +impl<'a, A, R, C> VersionedEntryRef<'a, A, R, C> +where + A: Allocator, + R: RefCounter, +{ + /// Returns the reference to the key + #[inline] + pub const fn key(&self) -> &'a [u8] { + self.key + } + + /// Returns the reference to the value, `None` means the entry is removed. + #[inline] + pub const fn value(&self) -> Option<&'a [u8]> { + self.value + } + + /// Returns if the entry is marked as removed + #[inline] + pub fn is_removed(&self) -> bool { + self.value().is_none() + } +} + +impl VersionedEntryRef<'_, A, R, C> +where + C: Comparator, + A: Allocator, + R: RefCounter, +{ + /// Returns the next entry in the map. + #[inline] + pub fn next(&self) -> Option { + self.next_in(true) + } + + /// Returns the previous entry in the map. + #[inline] + pub fn prev(&self) -> Option { + self.prev_in(true) + } + + fn next_in(&self, all_versions: bool) -> Option { + let mut nd = self.ptr; + if all_versions { + unsafe { + nd = self.list.get_next(nd, 0); + self + .list + .move_to_next(&mut nd, self.query_version, |_| true) + } + } else { + unsafe { + nd = self.list.get_next(nd, 0); + self + .list + .move_to_next_maximum_version(&mut nd, self.query_version, |_| true) + } + } + } + + fn prev_in(&self, all_versions: bool) -> Option { + let mut nd = self.ptr; + if all_versions { + unsafe { + nd = self.list.get_prev(nd, 0); + self + .list + .move_to_prev(&mut nd, self.query_version, |_| true) + } + } else { + unsafe { + nd = self.list.get_prev(nd, 0); + self + .list + .move_to_prev_maximum_version(&mut nd, self.query_version, |_| true) + } + } + } +} + +impl VersionedEntryRef<'_, A, R, C> +where + A: Allocator, + A::Node: WithVersion, + R: RefCounter, +{ + /// Returns the version of the entry + #[inline] + pub const fn version(&self) -> Version { + self.version + } +} + +impl<'a, A, R, C> VersionedEntryRef<'a, A, R, C> +where + A: Allocator, + R: RefCounter, +{ + #[inline] + pub(crate) fn from_node( + query_version: Version, + node: ::Pointer, + list: &'a SkipList, + key: Option<&'a [u8]>, + ) -> Self { + unsafe { + let (value, _) = node.get_value_with_pointer(&list.arena); + + let key = match key { + Some(key) => key, + None => node.get_key(&list.arena), + }; + + Self { + list, + key, + value, + version: node.version(), + query_version, + ptr: node, + } + } + } + + #[inline] + pub(crate) fn from_node_with_pointer( + query_version: Version, + node: ::Pointer, + list: &'a SkipList, + pointer: ValuePointer, + key: Option<&'a [u8]>, + ) -> Self { + unsafe { + let value = + node.get_value_by_value_offset(&list.arena, pointer.value_offset, pointer.value_len); + + let key = match key { + Some(key) => key, + None => node.get_key(&list.arena), + }; + + Self { + list, + key, + value, + version: node.version(), + query_version, + ptr: node, + } + } + } +} + +/// An entry reference to the skipmap's entry. +/// +/// Compared to the [`VersionedEntryRef`], this one's value cannot be `None`. +pub struct EntryRef<'a, A, R, C>(pub(crate) VersionedEntryRef<'a, A, R, C>) +where + A: Allocator, + R: RefCounter; + +impl core::fmt::Debug for EntryRef<'_, A, R, C> +where + A: Allocator, + R: RefCounter, +{ + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_struct("EntryRef") + .field("key", &self.key()) + .field("value", &self.value()) + .finish() + } +} + +impl Clone for EntryRef<'_, A, R, C> +where + A: Allocator, + R: RefCounter, +{ + #[inline] + fn clone(&self) -> Self { + *self + } +} + +impl Copy for EntryRef<'_, A, R, C> +where + A: Allocator, + R: RefCounter, +{ +} + +impl EntryRef<'_, A, R, C> +where + C: Comparator, + A: Allocator, + R: RefCounter, +{ + /// Returns the next entry in the map. + #[inline] + pub fn next(&self) -> Option { + self.0.next_in(false).map(Self) + } + + /// Returns the previous entry in the map. + #[inline] + pub fn prev(&self) -> Option { + self.0.prev_in(false).map(Self) + } +} + +impl EntryRef<'_, A, R, C> +where + A: Allocator, + A::Node: WithVersion, + R: RefCounter, +{ + /// Returns the version of the entry + #[inline] + pub const fn version(&self) -> Version { + self.0.version() + } +} + +impl<'a, A, R, C> EntryRef<'a, A, R, C> +where + A: Allocator, + R: RefCounter, +{ + /// Returns the reference to the key + #[inline] + pub fn key(&self) -> &'a [u8] { + self.0.key() + } + + /// Returns the reference to the value + #[inline] + pub fn value(&self) -> &'a [u8] { + self.0.value().expect("EntryRef's value cannot be `None`") + } +} diff --git a/src/base/iterator.rs b/src/dynamic/list/iterator.rs similarity index 100% rename from src/base/iterator.rs rename to src/dynamic/list/iterator.rs diff --git a/src/dynamic/list/iterator/all_versions.rs b/src/dynamic/list/iterator/all_versions.rs new file mode 100644 index 0000000..6b9955c --- /dev/null +++ b/src/dynamic/list/iterator/all_versions.rs @@ -0,0 +1,692 @@ +use super::super::{Allocator, NodePointer, RefCounter, SkipList, Version, VersionedEntryRef}; +use crate::allocator::Node; +use core::{ + borrow::Borrow, + ops::{Bound, RangeBounds}, +}; +use dbutils::equivalentor::{Comparator, RangeComparator}; + +/// An iterator over the skipmap (this iterator will yields all versions). The current state of the iterator can be cloned by +/// simply value copying the struct. +pub struct IterAll<'a, A, RC, C, Q = [u8], R = core::ops::RangeFull> +where + A: Allocator, + RC: RefCounter, + Q: ?Sized, +{ + pub(super) map: &'a SkipList, + pub(super) version: Version, + pub(super) range: Option, + pub(super) all_versions: bool, + pub(super) head: Option>, + pub(super) tail: Option>, + pub(super) _phantom: core::marker::PhantomData, +} + +impl Clone for IterAll<'_, A, RC, C, Q, R> +where + A: Allocator, + RC: RefCounter, + Q: ?Sized, + R: Clone, +{ + fn clone(&self) -> Self { + Self { + map: self.map, + head: self.head, + tail: self.tail, + version: self.version, + range: self.range.clone(), + all_versions: self.all_versions, + _phantom: core::marker::PhantomData, + } + } +} + +impl Copy for IterAll<'_, A, RC, C, Q, R> +where + A: Allocator, + RC: RefCounter, + Q: ?Sized, + R: Copy, +{ +} + +impl<'a, A, RC, C> IterAll<'a, A, RC, C> +where + A: Allocator, + RC: RefCounter, +{ + #[inline] + pub(crate) const fn new( + version: Version, + map: &'a SkipList, + all_versions: bool, + ) -> Self { + Self { + map, + head: None, + tail: None, + version, + range: None, + all_versions, + _phantom: core::marker::PhantomData, + } + } +} + +impl<'a, A, RC, C, Q, R> IterAll<'a, A, RC, C, Q, R> +where + A: Allocator, + RC: RefCounter, + Q: ?Sized, +{ + #[inline] + pub(crate) fn range( + version: Version, + map: &'a SkipList, + r: R, + all_versions: bool, + ) -> Self { + Self { + map, + head: None, + tail: None, + version, + range: Some(r), + all_versions, + _phantom: core::marker::PhantomData, + } + } +} + +impl<'a, A, RC, C, Q, R> IterAll<'a, A, RC, C, Q, R> +where + A: Allocator, + RC: RefCounter, + R: RangeBounds, + Q: ?Sized, +{ + /// Returns the start bound of the iterator. + #[inline] + pub fn start_bound(&self) -> Bound<&Q> { + self + .range + .as_ref() + .map(|r| r.start_bound()) + .unwrap_or(Bound::Unbounded) + } + + /// Returns the end bound of the iterator. + #[inline] + pub fn end_bound(&self) -> Bound<&Q> { + self + .range + .as_ref() + .map(|r| r.end_bound()) + .unwrap_or(Bound::Unbounded) + } + + /// Returns the entry at the current head position of the iterator. + #[inline] + pub const fn head(&self) -> Option<&VersionedEntryRef<'a, A, RC, C>> { + self.head.as_ref() + } + + /// Returns the entry at the current tail position of the iterator. + #[inline] + pub const fn tail(&self) -> Option<&VersionedEntryRef<'a, A, RC, C>> { + self.tail.as_ref() + } +} + +impl<'a, A, RC, C, Q, R> IterAll<'a, A, RC, C, Q, R> +where + A: Allocator, + C: Comparator, + RC: RefCounter, + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds, +{ + /// Advances to the next position. Returns the key and value if the + /// iterator is pointing at a valid entry, and `None` otherwise. + fn next_in(&mut self) -> Option> { + unsafe { + let mut next_head = match self.head.as_ref() { + Some(head) => self.map.get_next(head.ptr, 0), + None => self.map.get_next(self.map.head, 0), + }; + + let next_head = if self.all_versions { + self + .map + .move_to_next(&mut next_head, self.version, |nk| self.check_bounds(nk)) + } else { + self + .map + .move_to_next_maximum_version(&mut next_head, self.version, |nk| { + if let Some(ref head) = self.head { + head.key().ne(nk) && self.check_bounds(nk) + } else { + self.check_bounds(nk) + } + }) + }; + + match (&next_head, &self.tail) { + (Some(next), Some(t)) + if next + .key() + .cmp(t.key()) + .then_with(|| t.version.cmp(&next.version)) + .is_ge() => + { + self.head = next_head; + None + } + (Some(_), _) => { + self.head = next_head; + next_head + } + (None, _) => { + self.head = next_head; + None + } + } + } + } + + /// Advances to the prev position. Returns the key and value if the + /// iterator is pointing at a valid entry, and `None` otherwise. + fn prev(&mut self) -> Option> { + unsafe { + let mut next_tail = match self.tail.as_ref() { + Some(tail) => self.map.get_prev(tail.ptr, 0), + None => self.map.get_prev(self.map.tail, 0), + }; + + let next_tail = if self.all_versions { + self + .map + .move_to_prev(&mut next_tail, self.version, |nk| self.check_bounds(nk)) + } else { + self + .map + .move_to_prev_maximum_version(&mut next_tail, self.version, |nk| { + if let Some(ref tail) = self.tail { + tail.key().ne(nk) && self.check_bounds(nk) + } else { + self.check_bounds(nk) + } + }) + }; + + match (&self.head, &next_tail) { + // The prev key is smaller than the latest head key we observed with this iterator. + (Some(h), Some(next)) + if h + .key() + .cmp(next.key()) + .then_with(|| h.version.cmp(&next.version)) + .is_ge() => + { + self.tail = next_tail; + None + } + (_, Some(_)) => { + self.tail = next_tail; + next_tail + } + (_, None) => { + self.tail = next_tail; + None + } + } + } + } + + fn range_next_in(&mut self) -> Option> { + unsafe { + let mut next_head = match self.head.as_ref() { + Some(head) => self.map.get_next(head.ptr, 0), + None => match self.range.as_ref().unwrap().start_bound() { + Bound::Included(key) => self + .map + .find_near(self.version, key.borrow(), false, true) + .0 + .unwrap_or(::Pointer::NULL), + Bound::Excluded(key) => self + .map + .find_near(Version::MIN, key.borrow(), false, false) + .0 + .unwrap_or(::Pointer::NULL), + Bound::Unbounded => self.map.get_next(self.map.head, 0), + }, + }; + + self.head = if self.all_versions { + self + .map + .move_to_next(&mut next_head, self.version, |nk| self.check_bounds(nk)) + } else { + self + .map + .move_to_next_maximum_version(&mut next_head, self.version, |nk| { + if let Some(ref head) = self.head { + head.key().ne(nk) && self.check_bounds(nk) + } else { + self.check_bounds(nk) + } + }) + }; + + if let Some(ref h) = self.head { + match &self.tail { + Some(t) => { + let bound = Bound::Excluded(t.key()); + if !below_upper_bound(&self.map.cmp, bound, h.key()) { + self.head = None; + self.tail = None; + } + } + None => { + let bound = self.range.as_ref().unwrap().end_bound().map(|b| b.borrow()); + if !below_upper_bound_compare(&self.map.cmp, bound, h.key()) { + self.head = None; + self.tail = None; + } + } + } + } + + self.head + } + } + + fn range_prev(&mut self) -> Option> { + unsafe { + let mut next_tail = match self.tail.as_ref() { + Some(tail) => self.map.get_prev(tail.ptr, 0), + None => match self.range.as_ref().unwrap().end_bound() { + Bound::Included(key) => self + .map + .find_near(Version::MIN, key.borrow(), true, true) + .0 + .unwrap_or(::Pointer::NULL), + Bound::Excluded(key) => self + .map + .find_near(self.version, key.borrow(), true, false) + .0 + .unwrap_or(::Pointer::NULL), + Bound::Unbounded => self.map.get_prev(self.map.tail, 0), + }, + }; + + self.tail = if self.all_versions { + self + .map + .move_to_prev(&mut next_tail, self.version, |nk| self.check_bounds(nk)) + } else { + self + .map + .move_to_prev_maximum_version(&mut next_tail, self.version, |nk| { + if let Some(ref tail) = self.tail { + tail.key().ne(nk) && self.check_bounds(nk) + } else { + self.check_bounds(nk) + } + }) + }; + + if let Some(ref t) = self.tail { + match &self.head { + Some(h) => { + let bound = Bound::Excluded(h.key()); + if !above_lower_bound(&self.map.cmp, bound, t.key()) { + self.head = None; + self.tail = None; + } + } + None => { + let bound = self + .range + .as_ref() + .unwrap() + .start_bound() + .map(|b| b.borrow()); + if !above_lower_bound_compare(&self.map.cmp, bound, t.key()) { + self.head = None; + self.tail = None; + } + } + } + } + + self.tail + } + } +} + +impl<'a, A, RC, C, Q, R> IterAll<'a, A, RC, C, Q, R> +where + A: Allocator, + C: Comparator, + RC: RefCounter, + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds, +{ + /// Moves the iterator to the highest element whose key is below the given bound. + /// If no such element is found then `None` is returned. + /// + /// **Note:** This method will clear the current state of the iterator. + pub fn seek_upper_bound( + &mut self, + upper: Bound<&QR>, + ) -> Option> + where + QR: ?Sized + Borrow<[u8]>, + { + self.head = None; + self.tail = None; + + match upper { + Bound::Included(key) => self.seek_le(key).inspect(|ent| { + self.head = Some(*ent); + }), + Bound::Excluded(key) => self.seek_lt(key).inspect(|ent| { + self.head = Some(*ent); + }), + Bound::Unbounded => self.last(), + } + } + + /// Moves the iterator to the lowest element whose key is above the given bound. + /// If no such element is found then `None` is returned. + /// + /// **Note:** This method will clear the current state of the iterator. + pub fn seek_lower_bound( + &mut self, + lower: Bound<&QR>, + ) -> Option> + where + QR: ?Sized + Borrow<[u8]>, + { + self.head = None; + self.tail = None; + + match lower { + Bound::Included(key) => self.seek_ge(key).inspect(|ent| { + self.head = Some(*ent); + }), + Bound::Excluded(key) => self.seek_gt(key).inspect(|ent| { + self.head = Some(*ent); + }), + Bound::Unbounded => self.first(), + } + } + + /// Moves the iterator to the first entry whose key is greater than or + /// equal to the given key. Returns the key and value if the iterator is + /// pointing at a valid entry, and `None` otherwise. + fn seek_ge(&self, key: &QR) -> Option> + where + QR: ?Sized + Borrow<[u8]>, + { + unsafe { + let (n, _) = self.map.find_near(self.version, key.borrow(), false, true); + + let mut n = n?; + if n.is_null() || n.offset() == self.map.tail.offset() { + return None; + } + + if self.all_versions { + self.map.move_to_next(&mut n, self.version, |nk| { + if let Some(ref range) = self.range { + self.map.cmp.compare_contains(range, nk) + } else { + true + } + }) + } else { + self + .map + .move_to_next_maximum_version(&mut n, self.version, |nk| { + if let Some(ref range) = self.range { + self.map.cmp.compare_contains(range, nk) + } else { + true + } + }) + } + } + } + + /// Moves the iterator to the first entry whose key is greater than + /// the given key. Returns the key and value if the iterator is + /// pointing at a valid entry, and `None` otherwise. + fn seek_gt(&self, key: &QR) -> Option> + where + QR: ?Sized + Borrow<[u8]>, + { + unsafe { + let (n, _) = self.map.find_near(Version::MIN, key.borrow(), false, false); + + let mut n = n?; + if n.is_null() || n.offset() == self.map.tail.offset() { + return None; + } + + if self.all_versions { + self.map.move_to_next(&mut n, self.version, |nk| { + if let Some(ref range) = self.range { + self.map.cmp.compare_contains(range, nk) + } else { + true + } + }) + } else { + self + .map + .move_to_next_maximum_version(&mut n, self.version, |nk| { + if let Some(ref range) = self.range { + self.map.cmp.compare_contains(range, nk) + } else { + true + } + }) + } + } + } + + /// Moves the iterator to the first entry whose key is less than or + /// equal to the given key. Returns the key and value if the iterator is + /// pointing at a valid entry, and `None` otherwise. + fn seek_le(&self, key: &QR) -> Option> + where + QR: ?Sized + Borrow<[u8]>, + { + unsafe { + let (n, _) = self.map.find_near(Version::MIN, key.borrow(), true, true); // find less or equal. + + let mut n = n?; + if n.is_null() || n.offset() == self.map.head.offset() { + return None; + } + + if self.all_versions { + self.map.move_to_prev(&mut n, self.version, |nk| { + if let Some(ref range) = self.range { + self.map.cmp.compare_contains(range, nk) + } else { + true + } + }) + } else { + self + .map + .move_to_prev_maximum_version(&mut n, self.version, |nk| { + if let Some(ref range) = self.range { + self.map.cmp.compare_contains(range, nk) + } else { + true + } + }) + } + } + } + + /// Moves the iterator to the last entry whose key is less than the given + /// key. Returns the key and value if the iterator is pointing at a valid entry, + /// and `None` otherwise. + fn seek_lt(&self, key: &QR) -> Option> + where + QR: ?Sized + Borrow<[u8]>, + { + unsafe { + let (n, _) = self.map.find_near(self.version, key.borrow(), true, false); // find less or equal. + + let mut n = n?; + if n.is_null() || n.offset() == self.map.head.offset() { + return None; + } + + if self.all_versions { + self.map.move_to_prev(&mut n, self.version, |nk| { + if let Some(ref range) = self.range { + self.map.cmp.compare_contains(range, nk) + } else { + true + } + }) + } else { + self + .map + .move_to_prev_maximum_version(&mut n, self.version, |nk| self.check_bounds(nk)) + } + } + } + + #[inline] + fn first(&mut self) -> Option> { + self.head = None; + self.tail = None; + self.next() + } + + #[inline] + fn last(&mut self) -> Option> { + self.tail = None; + self.head = None; + self.prev() + } + + #[inline] + fn check_bounds(&self, nk: &'a [u8]) -> bool { + if let Some(ref range) = self.range { + self.map.cmp.compare_contains(range, nk) + } else { + true + } + } +} + +impl<'a, A, RC, C, Q, R> Iterator for IterAll<'a, A, RC, C, Q, R> +where + A: Allocator, + C: Comparator, + RC: RefCounter, + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds, +{ + type Item = VersionedEntryRef<'a, A, RC, C>; + + #[inline] + fn next(&mut self) -> Option { + if self.range.is_some() { + self.range_next_in() + } else { + self.next_in() + } + } + + #[inline] + fn last(mut self) -> Option + where + Self: Sized, + { + IterAll::last(&mut self) + } + + #[inline] + fn max(self) -> Option + where + Self: Sized, + Self::Item: Ord, + { + self.last() + } + + #[inline] + fn min(mut self) -> Option + where + Self: Sized, + Self::Item: Ord, + { + self.first() + } +} + +impl DoubleEndedIterator for IterAll<'_, A, RC, C, Q, R> +where + A: Allocator, + C: Comparator, + RC: RefCounter, + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds, +{ + #[inline] + fn next_back(&mut self) -> Option { + if self.range.is_some() { + self.range_prev() + } else { + self.prev() + } + } +} + +/// Helper function to check if a value is above a lower bound +fn above_lower_bound_compare(cmp: &C, bound: Bound<&[u8]>, other: &[u8]) -> bool { + match bound { + Bound::Unbounded => true, + Bound::Included(key) => cmp.compare(key, other).is_le(), + Bound::Excluded(key) => cmp.compare(key, other).is_lt(), + } +} + +/// Helper function to check if a value is above a lower bound +fn above_lower_bound(cmp: &C, bound: Bound<&[u8]>, other: &[u8]) -> bool { + match bound { + Bound::Unbounded => true, + Bound::Included(key) => cmp.compare(key, other).is_le(), + Bound::Excluded(key) => cmp.compare(key, other).is_lt(), + } +} + +/// Helper function to check if a value is below an upper bound +fn below_upper_bound_compare(cmp: &C, bound: Bound<&[u8]>, other: &[u8]) -> bool { + match bound { + Bound::Unbounded => true, + Bound::Included(key) => cmp.compare(key, other).is_ge(), + Bound::Excluded(key) => cmp.compare(key, other).is_gt(), + } +} + +/// Helper function to check if a value is below an upper bound +fn below_upper_bound(cmp: &C, bound: Bound<&[u8]>, other: &[u8]) -> bool { + match bound { + Bound::Unbounded => true, + Bound::Included(key) => cmp.compare(key, other).is_ge(), + Bound::Excluded(key) => cmp.compare(key, other).is_gt(), + } +} diff --git a/src/dynamic/list/iterator/iter.rs b/src/dynamic/list/iterator/iter.rs new file mode 100644 index 0000000..8e9e4da --- /dev/null +++ b/src/dynamic/list/iterator/iter.rs @@ -0,0 +1,162 @@ +use core::{ + borrow::Borrow, + ops::{Bound, RangeBounds}, +}; + +use dbutils::equivalentor::Comparator; + +use super::{ + super::{Allocator, EntryRef, RefCounter, SkipList, Version}, + IterAll, +}; + +/// An iterator over the skipmap. The current state of the iterator can be cloned by +/// simply value copying the struct. +pub struct Iter<'a, A, RC, C, Q = [u8], R = core::ops::RangeFull>(IterAll<'a, A, RC, C, Q, R>) +where + A: Allocator, + RC: RefCounter, + Q: ?Sized; + +impl Clone for Iter<'_, A, RC, C, Q, R> +where + A: Allocator, + RC: RefCounter, + Q: ?Sized, + R: Clone, +{ + fn clone(&self) -> Self { + Self(self.0.clone()) + } +} + +impl<'a, A, RC, C> Iter<'a, A, RC, C> +where + A: Allocator, + RC: RefCounter, +{ + #[inline] + pub(crate) const fn new(version: Version, map: &'a SkipList) -> Self { + Self(IterAll::new(version, map, false)) + } +} + +impl<'a, A, RC, C, Q, R> Iter<'a, A, RC, C, Q, R> +where + A: Allocator, + RC: RefCounter, + Q: ?Sized, +{ + #[inline] + pub(crate) fn range(version: Version, map: &'a SkipList, r: R) -> Self { + Self(IterAll::range(version, map, r, false)) + } +} + +impl Iter<'_, A, RC, C, Q, R> +where + A: Allocator, + RC: RefCounter, + Q: ?Sized, + R: RangeBounds, +{ + /// Returns the start bound of the iterator. + #[inline] + pub fn start_bound(&self) -> Bound<&Q> { + self.0.start_bound() + } + + /// Returns the end bound of the iterator. + #[inline] + pub fn end_bound(&self) -> Bound<&Q> { + self.0.end_bound() + } +} + +impl<'a, A, RC, C, Q, R> Iter<'a, A, RC, C, Q, R> +where + A: Allocator, + RC: RefCounter, + Q: ?Sized, + R: RangeBounds, +{ + /// Returns the entry at the current head position of the iterator. + #[inline] + pub fn head(&self) -> Option> { + self.0.head().map(|e| EntryRef::(*e)) + } + + /// Returns the entry at the current tail position of the iterator. + #[inline] + pub fn tail(&self) -> Option> { + self.0.tail().map(|e| EntryRef::(*e)) + } +} + +impl<'a, A, RC, C, Q, R> Iter<'a, A, RC, C, Q, R> +where + A: Allocator, + RC: RefCounter, + Q: ?Sized + Borrow<[u8]>, + C: Comparator, + R: RangeBounds, +{ + /// Moves the iterator to the highest element whose key is below the given bound. + /// If no such element is found then `None` is returned. + /// + /// **Note**: This method will clear the current state of the iterator. + pub fn seek_upper_bound(&mut self, upper: Bound<&QR>) -> Option> + where + QR: ?Sized + Borrow<[u8]>, + { + self.0.seek_upper_bound(upper).map(EntryRef) + } + + /// Moves the iterator to the lowest element whose key is above the given bound. + /// If no such element is found then `None` is returned. + /// + /// **Note**: This method will clear the current state of the iterator. + pub(crate) fn seek_lower_bound(&mut self, lower: Bound<&QR>) -> Option> + where + QR: ?Sized + Borrow<[u8]>, + { + self.0.seek_lower_bound(lower).map(EntryRef) + } +} + +impl<'a, A, RC, C, Q, R> Iterator for Iter<'a, A, RC, C, Q, R> +where + A: Allocator, + C: Comparator, + RC: RefCounter, + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds, +{ + type Item = EntryRef<'a, A, RC, C>; + + #[inline] + fn next(&mut self) -> Option { + self.0.next().map(EntryRef) + } + + #[inline] + fn last(self) -> Option + where + Self: Sized, + { + self.0.last().map(EntryRef) + } +} + +impl DoubleEndedIterator for Iter<'_, A, RC, C, Q, R> +where + A: Allocator, + RC: RefCounter, + Q: ?Sized + Borrow<[u8]>, + C: Comparator, + R: RangeBounds, +{ + fn next_back(&mut self) -> Option { + self.0.next_back().map(EntryRef) + } +} diff --git a/src/dynamic/multiple_version.rs b/src/dynamic/multiple_version.rs new file mode 100644 index 0000000..27fae9a --- /dev/null +++ b/src/dynamic/multiple_version.rs @@ -0,0 +1,1460 @@ +use core::{ + borrow::Borrow, + ops::{Bound, RangeBounds}, + sync::atomic::Ordering, +}; + +use among::Among; +use dbutils::{buffer::VacantBuffer, equivalentor::Comparator}; +use either::Either; + +use crate::{ + allocator::{Allocator, Sealed, WithVersion}, + error::Error, + ref_counter::RefCounter, + Arena, Header, Height, KeyBuilder, ValueBuilder, Version, +}; + +use super::list::{ + iterator::{Iter, IterAll}, + EntryRef, VersionedEntryRef, +}; + +/// Implementations for single-threaded environments. +pub mod unsync { + use dbutils::equivalentor::{Ascend, Comparator}; + + pub use crate::unsync::{multiple_version::Allocator, RefCounter}; + + #[cfg(any(all(test, not(miri)), all_skl_tests, test_dynamic_unsync_versioned,))] + mod tests { + crate::__dynamic_multiple_version_map_tests!("dynamic_unsync_multiple_version_map": super::SkipMap); + } + + type SkipList = super::super::list::SkipList; + + /// Iterator over the [`SkipMap`]. + pub type Iter<'a, C> = super::super::iter::Iter<'a, Allocator, RefCounter, C>; + + /// Iterator over a subset of the [`SkipMap`]. + pub type Range<'a, C, Q, R> = super::super::iter::Iter<'a, Allocator, RefCounter, C, Q, R>; + + /// The entry reference of the [`SkipMap`]. + pub type Entry<'a, C> = super::super::entry::EntryRef<'a, Allocator, RefCounter, C>; + + /// The versioned entry reference of the [`SkipMap`]. + pub type VersionedEntry<'a, C> = + super::super::entry::VersionedEntryRef<'a, Allocator, RefCounter, C>; + + /// Iterator over the [`SkipMap`]. + pub type IterAll<'a, C> = super::super::iter::IterAll<'a, Allocator, RefCounter, C>; + + /// Iterator over a subset of the [`SkipMap`]. + pub type RangeAll<'a, C, Q, R> = super::super::iter::IterAll<'a, Allocator, RefCounter, C, Q, R>; + + /// A fast, ARENA based `SkipMap` that supports multiple versions, forward and backward iteration. + /// + /// If you want to use in concurrent environment, you can use [`multiple_version::sync::SkipMap`](crate::dynamic::multiple_version::sync::SkipMap). + #[repr(transparent)] + pub struct SkipMap(SkipList); + + impl Clone for SkipMap { + #[inline] + fn clone(&self) -> Self { + Self(self.0.clone()) + } + } + + impl From> for SkipMap { + #[inline] + fn from(list: SkipList) -> Self { + Self(list) + } + } + + impl crate::traits::List for SkipMap { + type Constructable = SkipList; + + #[inline] + fn as_ref(&self) -> &Self::Constructable { + &self.0 + } + + #[inline] + fn as_mut(&mut self) -> &mut Self::Constructable { + &mut self.0 + } + + #[inline] + fn meta( + &self, + ) -> &<::Allocator as super::Sealed>::Meta + { + self.0.meta() + } + } + + impl super::Map for SkipMap { + type Allocator = Allocator; + type Comparator = C; + type RefCounter = RefCounter; + } +} + +/// Implementations for concurrent environments. +pub mod sync { + use dbutils::equivalentor::{Ascend, Comparator}; + + pub use crate::sync::{multiple_version::Allocator, RefCounter}; + + #[cfg(any(all(test, not(miri)), all_skl_tests, test_dynamic_sync_versioned,))] + mod tests { + crate::__dynamic_multiple_version_map_tests!("dynamic_sync_multiple_version_map": super::SkipMap); + } + + #[cfg(any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_sync_multiple_version_concurrent, + ))] + mod concurrent_tests { + crate::__dynamic_multiple_version_map_tests!(go "dynamic_sync_multiple_version_map": super::SkipMap => crate::tests::dynamic::TEST_OPTIONS); + } + + #[cfg(any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_sync_multiple_version_concurrent_with_optimistic_freelist, + ))] + mod concurrent_tests_with_optimistic_freelist { + crate::__dynamic_multiple_version_map_tests!(go "dynamic_sync_multiple_version_map": super::SkipMap => crate::tests::dynamic::TEST_OPTIONS_WITH_OPTIMISTIC_FREELIST); + } + + #[cfg(any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_sync_multiple_version_concurrent_with_pessimistic_freelist, + ))] + mod concurrent_tests_with_pessimistic_freelist { + crate::__dynamic_multiple_version_map_tests!(go "dynamic_sync_multiple_version_map": super::SkipMap => crate::tests::dynamic::TEST_OPTIONS_WITH_PESSIMISTIC_FREELIST); + } + + type SkipList = super::super::list::SkipList; + + /// Iterator over the [`SkipMap`]. + pub type Iter<'a, C> = super::super::iter::Iter<'a, Allocator, RefCounter, C>; + + /// Iterator over a subset of the [`SkipMap`]. + pub type Range<'a, C, Q, R> = super::super::iter::Iter<'a, Allocator, RefCounter, C, Q, R>; + + /// Iterator over the [`SkipMap`]. + pub type IterAll<'a, C> = super::super::iter::IterAll<'a, Allocator, RefCounter, C>; + + /// Iterator over a subset of the [`SkipMap`]. + pub type RangeAll<'a, C, Q, R> = super::super::iter::IterAll<'a, Allocator, RefCounter, C, Q, R>; + + /// The entry reference of the [`SkipMap`]. + pub type Entry<'a, C> = super::super::entry::EntryRef<'a, Allocator, RefCounter, C>; + + /// The versioned entry reference of the [`SkipMap`]. + pub type VersionedEntry<'a, C> = + super::super::entry::VersionedEntryRef<'a, Allocator, RefCounter, C>; + + /// A fast, lock-free, thread-safe ARENA based `SkipMap` that supports multiple versions, forward and backward iteration. + /// + /// If you want to use in non-concurrent environment, you can use [`multiple_version::unsync::SkipMap`](crate::dynamic::multiple_version::unsync::SkipMap). + #[repr(transparent)] + pub struct SkipMap(SkipList); + + impl Clone for SkipMap { + #[inline] + fn clone(&self) -> Self { + Self(self.0.clone()) + } + } + + impl From> for SkipMap { + #[inline] + fn from(list: SkipList) -> Self { + Self(list) + } + } + + impl crate::traits::List for SkipMap { + type Constructable = SkipList; + + #[inline] + fn as_ref(&self) -> &Self::Constructable { + &self.0 + } + + #[inline] + fn as_mut(&mut self) -> &mut Self::Constructable { + &mut self.0 + } + + #[inline] + fn meta( + &self, + ) -> &<::Allocator as super::Sealed>::Meta + { + self.0.meta() + } + } + + impl super::Map for SkipMap { + type Allocator = Allocator; + type RefCounter = RefCounter; + type Comparator = C; + } +} + +/// A fast, ARENA based `SkipMap` that supports multiple versions, forward and backward iteration. +/// +/// - For concurrent environment, use [`sync::SkipMap`]. +/// - For non-concurrent environment, use [`unsync::SkipMap`]. +pub trait Map +where + Self: Arena< + Constructable = super::list::SkipList, + >, + ::Node: WithVersion, +{ + /// The allocator type used to allocate nodes in the map. + type Allocator: Allocator; + /// The comparator type used to compare keys in the map. + type Comparator: Comparator; + /// The reference counter type of the map. + type RefCounter: RefCounter; + + /// Try creates from a `SkipMap` from an allocator directly. + /// + /// This method is not the ideal constructor, it is recommended to use [`Builder`](super::Builder) to create a `SkipMap`, + /// if you are not attempting to create multiple `SkipMap`s on the same allocator. + /// + /// Besides, the only way to reconstruct `SkipMap`s created by this method is to use the [`open_from_allocator(header: Header, arena: Self::Allocator, cmp: Self::Comparator)`](Map::open_from_allocator) method, + /// users must save the header to reconstruct the `SkipMap` by their own. + /// The header can be obtained by calling [`header`](Map::header) method. + #[inline] + fn create_from_allocator(arena: Self::Allocator, cmp: Self::Comparator) -> Result { + Self::try_create_from_allocator(arena, cmp) + } + + /// Try open a `SkipMap` from an allocator directly. + /// + /// See documentation for [`create_from_allocator`](Map::create_from_allocator) for more information. + /// + /// ## Safety + /// - The `header` must be the same as the one obtained from `SkipMap` when it was created. + /// - The `cmp` must be the same as the one used to create the `SkipMap`. + #[inline] + unsafe fn open_from_allocator( + header: Header, + arena: Self::Allocator, + cmp: Self::Comparator, + ) -> Result { + Self::try_open_from_allocator(arena, cmp, header) + } + + /// Returns the header of the `SkipMap`, which can be used to reconstruct the `SkipMap`. + /// + /// By default, `SkipMap` will allocate meta, head node, and tail node in the ARENA, + /// and the data section will be allocated after the tail node. + /// + /// This method will return the header in the ARENA. + #[inline] + fn header(&self) -> Option<&Header> { + self.as_ref().header() + } + + /// Returns the height of the highest tower within any of the nodes that + /// have ever been allocated as part of this skiplist. + #[inline] + fn height(&self) -> u8 { + self.as_ref().height() + } + + /// Returns the number of entries in the skipmap. + #[inline] + fn len(&self) -> usize { + self.as_ref().len() + } + + /// Returns true if the skipmap is empty. + #[inline] + fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Returns the maximum version of all entries in the map. + #[inline] + fn maximum_version(&self) -> Version { + self.as_ref().maximum_version() + } + + /// Returns the minimum version of all entries in the map. + #[inline] + fn minimum_version(&self) -> Version { + self.as_ref().minimum_version() + } + + /// Returns a random generated height. + /// + /// This method is useful when you want to check if the underlying allocator can allocate a node. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{multiple_version::{sync::SkipMap, Map}, Builder, Ascend}, Arena}; + /// + /// let map = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// let height = map.random_height(); + /// + /// let needed = SkipMap::::estimated_node_size(height, b"k1".len(), b"k2".len()); + /// ``` + #[inline] + fn random_height(&self) -> Height { + self.as_ref().random_height() + } + + /// Returns `true` if the map may contains an entry whose version is less than or equal to the given version. + #[inline] + fn may_contain_version(&self, v: Version) -> bool { + self.as_ref().may_contain_version(v) + } + + /// Returns `true` if the key exists in the map. + /// + /// This method will return `false` if the entry is marked as removed. If you want to check if the key exists even if it is marked as removed, + /// you can use [`contains_key_versioned`](Map::contains_key_versioned). + /// + /// ## Example + /// + /// ```rust + /// use skl::dynamic::{multiple_version::{sync::SkipMap, Map}, Builder}; + /// + /// let map = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// map.insert(0, b"hello", b"world").unwrap(); + /// + /// map.get_or_remove(1, b"hello").unwrap(); + /// + /// assert!(!map.contains_key(1, b"hello")); + /// assert!(map.contains_key_versioned(1, b"hello")); + /// ``` + #[inline] + fn contains_key(&self, version: Version, key: &Q) -> bool + where + Q: ?Sized + Borrow<[u8]>, + { + if !self.may_contain_version(version) { + return false; + } + + self.as_ref().get(version, key.borrow()).is_some() + } + + /// Returns `true` if the key exists in the map, even if it is marked as removed. + /// + /// ## Example + /// + /// ```rust + /// use skl::dynamic::{multiple_version::{sync::SkipMap, Map}, Builder}; + /// + /// let map = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// map.insert(0, b"hello", b"world").unwrap(); + /// + /// map.get_or_remove(1, b"hello").unwrap(); + /// + /// assert!(!map.contains_key(1, b"hello")); + /// assert!(map.contains_key_versioned(1, b"hello")); + /// ``` + #[inline] + fn contains_key_versioned(&self, version: Version, key: &Q) -> bool + where + Q: ?Sized + Borrow<[u8]>, + { + if !self.may_contain_version(version) { + return false; + } + + self.as_ref().contains_key_versioned(version, key.borrow()) + } + + /// Returns the first entry in the map. + #[inline] + fn first( + &self, + version: Version, + ) -> Option> { + if !self.may_contain_version(version) { + return None; + } + + self.as_ref().first(version) + } + + /// Returns the last entry in the map. + #[inline] + fn last( + &self, + version: Version, + ) -> Option> { + if !self.may_contain_version(version) { + return None; + } + + self.as_ref().last(version) + } + + /// Returns the first entry in the map. The returned entry may not be in valid state. (i.e. the entry is removed) + /// + /// The difference between [`first`](Map::first) and `first_versioned` is that `first_versioned` will return the value even if + /// the entry is removed or not in a valid state. + #[inline] + fn first_versioned( + &self, + version: Version, + ) -> Option> { + if !self.may_contain_version(version) { + return None; + } + + self.as_ref().first_versioned(version) + } + + /// Returns the last entry in the map. The returned entry may not be in valid state. (i.e. the entry is removed) + /// + /// The difference between [`last`](Map::last) and `last_versioned` is that `last_versioned` will return the value even if + /// the entry is removed or not in a valid state. + #[inline] + fn last_versioned( + &self, + version: Version, + ) -> Option> { + if !self.may_contain_version(version) { + return None; + } + + self.as_ref().last_versioned(version) + } + + /// Returns the value associated with the given key, if it exists. + /// + /// This method will return `None` if the entry is marked as removed. If you want to get the entry even if it is marked as removed, + /// you can use [`get_versioned`](Map::get_versioned). + /// + /// ## Example + /// + /// ```rust + /// use skl::dynamic::{multiple_version::{sync::SkipMap, Map}, Builder}; + /// + /// let map = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// map.insert(0, b"hello", b"world").unwrap(); + /// + /// let ent = map.get(0, b"hello").unwrap(); + /// assert_eq!(ent.value(), b"world"); + /// + /// map.get_or_remove(1, b"hello").unwrap(); + /// + /// assert!(map.get(1, b"hello").is_none()); + /// ``` + #[inline] + fn get( + &self, + version: Version, + key: &Q, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + if !self.may_contain_version(version) { + return None; + } + + self.as_ref().get(version, key.borrow()) + } + + /// Returns the value associated with the given key, if it exists. + /// + /// The difference between `get` and `get_versioned` is that `get_versioned` will return the value even if the entry is removed. + /// + /// ## Example + /// + /// ```rust + /// use skl::dynamic::{multiple_version::{sync::SkipMap, Map}, Builder}; + /// + /// let map = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// map.insert(0, b"hello", b"world").unwrap(); + /// + /// map.get_or_remove(1, b"hello").unwrap(); + /// + /// assert!(map.get(1, b"hello").is_none()); + /// + /// let ent = map.get_versioned(1, b"hello").unwrap(); + /// // value is None because the entry is marked as removed. + /// assert!(ent.value().is_none()); + /// ``` + #[inline] + fn get_versioned( + &self, + version: Version, + key: &Q, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + if !self.may_contain_version(version) { + return None; + } + + self.as_ref().get_versioned(version, key.borrow()) + } + + /// Returns an `EntryRef` pointing to the highest element whose key is below the given bound. + /// If no such element is found then `None` is returned. + #[inline] + fn upper_bound( + &self, + version: Version, + upper: Bound<&Q>, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + if !self.may_contain_version(version) { + return None; + } + + self.as_ref().iter(version).seek_upper_bound(upper) + } + + /// Returns an `EntryRef` pointing to the lowest element whose key is above the given bound. + /// If no such element is found then `None` is returned. + #[inline] + fn lower_bound( + &self, + version: Version, + lower: Bound<&Q>, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + if !self.may_contain_version(version) { + return None; + } + + self.as_ref().iter(version).seek_lower_bound(lower) + } + + /// Returns an `VersionedEntryRef` pointing to the highest element whose key is below the given bound. + /// If no such element is found then `None` is returned. + /// + /// The difference between [`upper_bound`](Map::upper_bound) and `upper_bound_versioned` is that `upper_bound_versioned` will return the value even if the entry is removed. + #[inline] + fn upper_bound_versioned( + &self, + version: Version, + upper: Bound<&Q>, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + if !self.may_contain_version(version) { + return None; + } + + self + .as_ref() + .iter_all_versions(version) + .seek_upper_bound(upper) + } + + /// Returns an `VersionedEntryRef` pointing to the lowest element whose key is above the given bound. + /// If no such element is found then `None` is returned. + /// + /// The difference between [`lower_bound`](Map::lower_bound) and `lower_bound_versioned` is that `lower_bound_versioned` will return the value even if the entry is removed. + #[inline] + fn lower_bound_versioned( + &self, + version: Version, + lower: Bound<&Q>, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + if !self.may_contain_version(version) { + return None; + } + + self + .as_ref() + .iter_all_versions(version) + .seek_lower_bound(lower) + } + + /// Returns a new iterator, this iterator will yield the latest version of all entries in the map less or equal to the given version. + #[inline] + fn iter( + &self, + version: Version, + ) -> Iter<'_, Self::Allocator, Self::RefCounter, Self::Comparator> { + self.as_ref().iter(version) + } + + /// Returns a new iterator, this iterator will yield all versions for all entries in the map less or equal to the given version. + #[inline] + fn iter_all_versions( + &self, + version: Version, + ) -> IterAll<'_, Self::Allocator, Self::RefCounter, Self::Comparator> { + self.as_ref().iter_all_versions(version) + } + + /// Returns a iterator that within the range, this iterator will yield the latest version of all entries in the range less or equal to the given version. + #[inline] + fn range( + &self, + version: Version, + range: R, + ) -> Iter<'_, Self::Allocator, Self::RefCounter, Self::Comparator, Q, R> + where + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds, + { + self.as_ref().range(version, range) + } + + /// Returns a iterator that within the range, this iterator will yield all versions for all entries in the range less or equal to the given version. + #[inline] + fn range_all_versions( + &self, + version: Version, + range: R, + ) -> IterAll<'_, Self::Allocator, Self::RefCounter, Self::Comparator, Q, R> + where + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds, + { + self.as_ref().range_all_versions(version, range) + } + + /// Upserts a new key-value pair if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert`](Map::get_or_insert), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + #[inline] + fn insert<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self.as_ref().insert(version, key, value) + } + + /// Upserts a new key-value pair at the given height if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_at_height`](Map::get_or_insert_at_height), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{multiple_version::{sync::SkipMap, Map}, Builder}, Arena}; + /// + /// let map = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let height = map.random_height(); + /// map.insert_at_height(0, height, b"hello", b"world").unwrap(); + /// ``` + #[inline] + fn insert_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self.as_ref().insert_at_height(version, height, key, value) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_value_builder`](Map::get_or_insert_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{multiple_version::{sync::SkipMap, Map}, Builder}, ValueBuilder}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(encoded_size) + /// }); + /// + /// l.insert_with_value_builder::(1, b"alice".as_slice(), vb) + /// .unwrap(); + /// ``` + #[inline] + fn insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + key: &'b [u8], + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Either, + > { + self.as_ref().insert_at_height_with_value_builder( + version, + self.random_height(), + key, + value_builder, + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_value_builder`](Map::get_or_insert_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{multiple_version::{sync::SkipMap, Map}, Builder}, ValueBuilder, Arena}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(encoded_size) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_value_builder::(1, height, b"alice".as_slice(), vb) + /// .unwrap(); + /// ``` + #[inline] + fn insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Either, + > { + self + .as_ref() + .insert_at_height_with_value_builder(version, height, key, value_builder) + } + + /// Inserts a new key-value pair if it does not yet exist. + /// + /// Unlike [`insert`](Map::insert), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + #[inline] + fn get_or_insert<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self + .as_ref() + .get_or_insert_at_height(version, self.random_height(), key, value) + } + + /// Inserts a new key-value pair at height if it does not yet exist. + /// + /// Unlike [`insert_at_height`](Map::insert_at_height), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + #[inline] + fn get_or_insert_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self + .as_ref() + .get_or_insert_at_height(version, height, key, value) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_value_builder`](Map::insert_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{multiple_version::{sync::SkipMap, Map}, Builder}, ValueBuilder}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(encoded_size) + /// }); + /// l.get_or_insert_with_value_builder::(1, b"alice".as_slice(), vb) + /// .unwrap(); + /// ``` + #[inline] + fn get_or_insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + key: &'b [u8], + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Either, + > { + self.get_or_insert_at_height_with_value_builder( + version, + self.random_height(), + key, + value_builder, + ) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_value_builder`](Map::insert_at_height_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{multiple_version::{sync::SkipMap, Map}, Builder}, ValueBuilder, Arena}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(encoded_size) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_value_builder::(1, height, b"alice".as_slice(), vb) + /// .unwrap(); + /// ``` + #[inline] + fn get_or_insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Either, + > { + self + .as_ref() + .get_or_insert_at_height_with_value_builder(version, height, key, value_builder) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders`](Map::get_or_insert_with_builders), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{multiple_version::{sync::SkipMap, Map}, Builder}, KeyBuilder, ValueBuilder}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(5) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(encoded_size) + /// }); + /// + /// l.insert_with_builders::<(), ()>(1, kb, vb) + /// .unwrap(); + /// ``` + #[inline] + fn insert_with_builders<'a, KE, VE>( + &'a self, + version: Version, + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Among, + > { + self.as_ref().insert_at_height_with_builders( + version, + self.random_height(), + key_builder, + value_builder, + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// + /// Unlike [`get_or_insert_with_builders`](Map::get_or_insert_with_builders), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{multiple_version::{sync::SkipMap, Map}, Builder}, KeyBuilder, ValueBuilder, Arena}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(5) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(encoded_size) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_builders::<(), ()>(1, height, kb, vb) + /// .unwrap(); + /// ``` + #[inline] + fn insert_at_height_with_builders<'a, KE, VE>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Among, + > { + self + .as_ref() + .insert_at_height_with_builders(version, height, key_builder, value_builder) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_builders`](Map::insert_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{multiple_version::{sync::SkipMap, Map}, Builder}, KeyBuilder, ValueBuilder}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(5) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(encoded_size) + /// }); + /// + /// l.get_or_insert_with_builders::<(), ()>(1, kb, vb) + /// .unwrap(); + /// ``` + #[inline] + fn get_or_insert_with_builders<'a, KE, VE>( + &'a self, + version: Version, + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Among, + > { + self.as_ref().get_or_insert_at_height_with_builders( + version, + self.random_height(), + key_builder, + value_builder, + ) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_builders`](Map::insert_at_height_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{multiple_version::{sync::SkipMap, Map}, Builder}, KeyBuilder, ValueBuilder, Arena}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(5) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(encoded_size) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_builders::<(), ()>(1, height, kb, vb) + /// .unwrap(); + /// ``` + #[inline] + fn get_or_insert_at_height_with_builders<'a, KE, VE>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Among, + > { + self + .as_ref() + .get_or_insert_at_height_with_builders(version, height, key_builder, value_builder) + } + + /// Removes the key-value pair if it exists. A CAS operation will be used to ensure the operation is atomic. + /// + /// Unlike [`get_or_remove`](Map::get_or_remove), this method will remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)`: + /// - if the remove operation is successful or the key is marked in remove status by other threads. + /// - Returns `Ok(Either::Right(current))` if the key with the given version already exists + /// and the entry is not successfully removed because of an update on this entry happens in another thread. + #[inline] + fn compare_remove<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + success: Ordering, + failure: Ordering, + ) -> Result>, Error> { + self.compare_remove_at_height(version, self.random_height(), key, success, failure) + } + + /// Removes the key-value pair if it exists. A CAS operation will be used to ensure the operation is atomic. + /// + /// Unlike [`get_or_remove_at_height`](Map::get_or_remove_at_height), this method will remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)`: + /// - if the remove operation is successful or the key is marked in remove status by other threads. + /// - Returns `Ok(Either::Right(current))` if the key with the given version already exists + /// and the entry is not successfully removed because of an update on this entry happens in another thread. + #[inline] + fn compare_remove_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + success: Ordering, + failure: Ordering, + ) -> Result>, Error> { + self + .as_ref() + .compare_remove_at_height(version, height, key, success, failure) + } + + /// Gets or removes the key-value pair if it exists. + /// + /// Unlike [`compare_remove`](Map::compare_remove), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + #[inline] + fn get_or_remove<'a, 'b: 'a>( + &'a self, + version: Version, + key: &'b [u8], + ) -> Result>, Error> { + self.get_or_remove_at_height(version, self.random_height(), key) + } + + /// Gets or removes the key-value pair if it exists. + /// + /// Unlike [`compare_remove_at_height`](Map::compare_remove_at_height), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{multiple_version::{sync::SkipMap, Map}, Builder}, Arena}; + /// + /// let map = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// map.insert(0, b"hello", b"world").unwrap(); + /// + /// let height = map.random_height(); + /// map.get_or_remove_at_height(0, height, b"hello").unwrap(); + /// ``` + #[inline] + fn get_or_remove_at_height<'a, 'b: 'a>( + &'a self, + version: Version, + height: Height, + key: &'b [u8], + ) -> Result>, Error> { + self.as_ref().get_or_remove_at_height(version, height, key) + } + + /// Gets or removes the key-value pair if it exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_remove a key and you know the key size but you do not have the key + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{multiple_version::{sync::SkipMap, Map}, Builder}, KeyBuilder}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(5) + /// }); + /// l.get_or_remove_with_builder::(1, kb) + /// .unwrap(); + /// ``` + #[inline] + fn get_or_remove_with_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + key_builder: KeyBuilder) -> Result>, + ) -> Result< + Option>, + Either, + > { + self + .as_ref() + .get_or_remove_at_height_with_builder(version, self.random_height(), key_builder) + } + + /// Gets or removes the key-value pair if it exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_remove a key and you know the key size but you do not have the key + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{multiple_version::{sync::SkipMap, Map}, Builder}, KeyBuilder, Arena}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(5) + /// }); + /// let height = l.random_height(); + /// l.get_or_remove_at_height_with_builder::(1, height, kb) + /// .unwrap(); + /// ``` + #[inline] + fn get_or_remove_at_height_with_builder<'a, 'b: 'a, E>( + &'a self, + version: Version, + height: Height, + key_builder: KeyBuilder) -> Result>, + ) -> Result< + Option>, + Either, + > { + self + .as_ref() + .get_or_remove_at_height_with_builder(version, height, key_builder) + } +} diff --git a/src/dynamic/unique.rs b/src/dynamic/unique.rs new file mode 100644 index 0000000..5643d1d --- /dev/null +++ b/src/dynamic/unique.rs @@ -0,0 +1,1179 @@ +use core::{ + borrow::Borrow, + ops::{Bound, RangeBounds}, + sync::atomic::Ordering, +}; + +use among::Among; +use dbutils::{buffer::VacantBuffer, equivalentor::Comparator}; +use either::Either; + +use crate::{ + allocator::{Allocator, Sealed, WithoutVersion}, + error::Error, + ref_counter::RefCounter, + Arena, Header, Height, KeyBuilder, ValueBuilder, MIN_VERSION, +}; + +use super::list::{iterator::Iter, EntryRef}; + +/// Implementations for single-threaded environments. +pub mod unsync { + use dbutils::equivalentor::{Ascend, Comparator}; + + pub use crate::unsync::{map::Allocator, RefCounter}; + + #[cfg(any(all(test, not(miri)), all_skl_tests, test_dynamic_unsync_map,))] + mod tests { + crate::__dynamic_map_tests!("dynamic_unsync_map": super::SkipMap); + } + + type SkipList = super::super::list::SkipList; + + /// Iterator over the [`SkipMap`]. + pub type Iter<'a, C> = super::super::iter::Iter<'a, Allocator, RefCounter, C>; + + /// Iterator over a subset of the [`SkipMap`]. + pub type Range<'a, C, Q, R> = super::super::iter::Iter<'a, Allocator, RefCounter, C, Q, R>; + + /// The entry reference of the [`SkipMap`]. + pub type Entry<'a, C> = super::super::entry::EntryRef<'a, Allocator, RefCounter, C>; + + /// A fast, ARENA based `SkipMap` that supports forward and backward iteration. + /// + /// If you want to use in concurrent environment, you can use [`unique::sync::SkipMap`](crate::dynamic::unique::sync::SkipMap). + #[repr(transparent)] + pub struct SkipMap(SkipList); + + impl Clone for SkipMap { + #[inline] + fn clone(&self) -> Self { + Self(self.0.clone()) + } + } + + impl From> for SkipMap { + #[inline] + fn from(list: SkipList) -> Self { + Self(list) + } + } + + impl crate::traits::List for SkipMap { + type Constructable = SkipList; + + #[inline] + fn as_ref(&self) -> &Self::Constructable { + &self.0 + } + + #[inline] + fn as_mut(&mut self) -> &mut Self::Constructable { + &mut self.0 + } + + #[inline] + fn meta( + &self, + ) -> &<::Allocator as super::Sealed>::Meta + { + self.0.meta() + } + } + + impl super::Map for SkipMap { + type Allocator = Allocator; + type Comparator = C; + type RefCounter = RefCounter; + } +} + +/// Implementations for concurrent environments. +pub mod sync { + use dbutils::equivalentor::{Ascend, Comparator}; + + pub use crate::sync::{map::Allocator, RefCounter}; + + #[cfg(any(all(test, not(miri)), all_skl_tests, test_dynamic_sync_map,))] + mod tests { + crate::__dynamic_map_tests!("sync_map": super::SkipMap); + } + + #[cfg(any(all(test, not(miri)), all_skl_tests, test_dynamic_sync_map_concurrent,))] + mod concurrent_tests { + crate::__dynamic_map_tests!(go "sync_map": super::SkipMap => crate::tests::dynamic::TEST_OPTIONS); + } + + #[cfg(any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_sync_map_concurrent_with_optimistic_freelist, + ))] + mod concurrent_tests_with_optimistic_freelist { + crate::__dynamic_map_tests!(go "sync_map": super::SkipMap => crate::tests::dynamic::TEST_OPTIONS_WITH_OPTIMISTIC_FREELIST); + } + + #[cfg(any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_sync_map_concurrent_with_pessimistic_freelist, + ))] + mod concurrent_tests_with_pessimistic_freelist { + crate::__dynamic_map_tests!(go "sync_map": super::SkipMap => crate::tests::dynamic::TEST_OPTIONS_WITH_PESSIMISTIC_FREELIST); + } + + type SkipList = super::super::list::SkipList; + + /// Iterator over the [`SkipMap`]. + pub type Iter<'a, C> = super::super::iter::Iter<'a, Allocator, RefCounter, C>; + + /// Iterator over a subset of the [`SkipMap`]. + pub type Range<'a, C, Q, R> = super::super::iter::Iter<'a, Allocator, RefCounter, C, Q, R>; + + /// The entry reference of the [`SkipMap`]. + pub type Entry<'a, C> = super::super::entry::EntryRef<'a, Allocator, RefCounter, C>; + + /// A fast, lock-free, thread-safe ARENA based `SkipMap` that supports forward and backward iteration. + /// + /// If you want to use in non-concurrent environment, you can use [`unique::unsync::SkipMap`](crate::dynamic::unique::unsync::SkipMap). + #[repr(transparent)] + pub struct SkipMap(SkipList); + + impl Clone for SkipMap { + #[inline] + fn clone(&self) -> Self { + Self(self.0.clone()) + } + } + + impl From> for SkipMap { + #[inline] + fn from(list: SkipList) -> Self { + Self(list) + } + } + + impl crate::traits::List for SkipMap { + type Constructable = SkipList; + + #[inline] + fn as_ref(&self) -> &Self::Constructable { + &self.0 + } + + #[inline] + fn as_mut(&mut self) -> &mut Self::Constructable { + &mut self.0 + } + + #[inline] + fn meta( + &self, + ) -> &<::Allocator as super::Sealed>::Meta + { + self.0.meta() + } + } + + impl super::Map for SkipMap { + type Allocator = Allocator; + type Comparator = C; + type RefCounter = RefCounter; + } +} + +/// A fast, ARENA based `SkipMap` that supports forward and backward iteration. +/// +/// - For concurrent environment, use [`sync::SkipMap`]. +/// - For non-concurrent environment, use [`unsync::SkipMap`]. +pub trait Map +where + Self: Arena< + Constructable = super::list::SkipList, + >, + ::Node: WithoutVersion, +{ + /// The allocator used to allocate nodes in the `SkipMap`. + type Allocator: Allocator; + /// The comparator used to compare keys in the `SkipMap`. + type Comparator: Comparator; + /// The reference counter of the `SkipMap`. + type RefCounter: RefCounter; + + /// Try creates from a `SkipMap` from an allocator directly. + /// + /// This method is not the ideal constructor, it is recommended to use [`Builder`](super::Builder) to create a `SkipMap`, + /// if you are not attempting to create multiple `SkipMap`s on the same allocator. + /// + /// Besides, the only way to reconstruct `SkipMap`s created by this method is to use the [`open_from_allocator(header: Header, arena: Self::Allocator, cmp: Self::Comparator)`](Map::open_from_allocator) method, + /// users must save the header to reconstruct the `SkipMap` by their own. + /// The header can be obtained by calling [`header`](Map::header) method. + #[inline] + fn create_from_allocator(arena: Self::Allocator, cmp: Self::Comparator) -> Result { + Self::try_create_from_allocator(arena, cmp) + } + + /// Try open a `SkipMap` from an allocator directly. + /// + /// See documentation for [`create_from_allocator`](Map::create_from_allocator) for more information. + /// + /// ## Safety + /// - The `header` must be the same as the one obtained from `SkipMap` when it was created. + /// - The `cmp` must be the same as the one used to create the `SkipMap`. + #[inline] + unsafe fn open_from_allocator( + header: Header, + arena: Self::Allocator, + cmp: Self::Comparator, + ) -> Result { + Self::try_open_from_allocator(arena, cmp, header) + } + + /// Returns the header of the `SkipMap`, which can be used to reconstruct the `SkipMap`. + /// + /// By default, `SkipMap` will allocate meta, head node, and tail node in the ARENA, + /// and the data section will be allocated after the tail node. + /// + /// This method will return the header in the ARENA. + #[inline] + fn header(&self) -> Option<&Header> { + self.as_ref().header() + } + + /// Returns the height of the highest tower within any of the nodes that + /// have ever been allocated as part of this skiplist. + #[inline] + fn height(&self) -> u8 { + self.as_ref().height() + } + + /// Returns the number of entries in the skipmap. + #[inline] + fn len(&self) -> usize { + self.as_ref().len() + } + + /// Returns true if the skipmap is empty. + #[inline] + fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Returns a random generated height. + /// + /// This method is useful when you want to check if the underlying allocator can allocate a node. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{unique::{sync::SkipMap, Map}, Builder, Ascend}, Arena}; + /// + /// let map = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// let height = map.random_height(); + /// + /// let needed = SkipMap::::estimated_node_size(height, b"k1".len(), b"k2".len()); + /// ``` + #[inline] + fn random_height(&self) -> Height { + self.as_ref().random_height() + } + + /// Returns `true` if the key exists in the map. + /// + /// ## Example + /// + /// ```rust + /// use skl::dynamic::{unique::{unsync::SkipMap, Map}, Builder}; + /// + /// let map = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// map.insert(b"hello", b"world").unwrap(); + /// + /// map.remove(b"hello").unwrap(); + /// + /// assert!(!map.contains_key(b"hello")); + /// ``` + #[inline] + fn contains_key(&self, key: &Q) -> bool + where + Q: ?Sized + Borrow<[u8]>, + { + self.as_ref().contains_key(MIN_VERSION, key.borrow()) + } + + /// Returns the first entry in the map. + #[inline] + fn first(&self) -> Option> { + self.as_ref().first(MIN_VERSION) + } + + /// Returns the last entry in the map. + #[inline] + fn last(&self) -> Option> { + self.as_ref().last(MIN_VERSION) + } + + /// Returns the value associated with the given key, if it exists. + /// + /// ## Example + /// + /// ```rust + /// use skl::dynamic::{unique::{sync::SkipMap, Map}, Builder}; + /// + /// let map = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// map.insert(b"hello", b"world").unwrap(); + /// + /// let ent = map.get(b"hello").unwrap(); + /// assert_eq!(ent.value(), b"world"); + /// + /// map.remove(b"hello").unwrap(); + /// + /// assert!(map.get(b"hello").is_none()); + /// ``` + #[inline] + fn get( + &self, + key: &Q, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + self.as_ref().get(MIN_VERSION, key.borrow()) + } + + /// Returns an `EntryRef` pointing to the highest element whose key is below the given bound. + /// If no such element is found then `None` is returned. + #[inline] + fn upper_bound( + &self, + upper: Bound<&Q>, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + self.as_ref().iter(MIN_VERSION).seek_upper_bound(upper) + } + + /// Returns an `EntryRef` pointing to the lowest element whose key is above the given bound. + /// If no such element is found then `None` is returned. + #[inline] + fn lower_bound( + &self, + lower: Bound<&Q>, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + self.as_ref().iter(MIN_VERSION).seek_lower_bound(lower) + } + + /// Returns a new iterator, this iterator will yield the latest version of all entries in the map less or equal to the given version. + #[inline] + fn iter(&self) -> Iter<'_, Self::Allocator, Self::RefCounter, Self::Comparator> { + self.as_ref().iter(MIN_VERSION) + } + + /// Returns a iterator that within the range, this iterator will yield the latest version of all entries in the range less or equal to the given version. + #[inline] + fn range( + &self, + range: R, + ) -> Iter<'_, Self::Allocator, Self::RefCounter, Self::Comparator, Q, R> + where + Q: ?Sized + Borrow<[u8]>, + R: RangeBounds, + { + self.as_ref().range(MIN_VERSION, range) + } + + /// Upserts a new key-value pair if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert`](Map::get_or_insert), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + #[inline] + fn insert<'a, 'b: 'a>( + &'a self, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self.insert_at_height(self.random_height(), key, value) + } + + /// Upserts a new key-value pair at the given height if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_at_height`](Map::get_or_insert_at_height), this method will update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{unique::{sync::SkipMap, Map}, Builder}, Arena}; + /// + /// let map = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let height = map.random_height(); + /// map.insert_at_height(height, b"hello", b"world").unwrap(); + /// ``` + #[inline] + fn insert_at_height<'a, 'b: 'a>( + &'a self, + height: Height, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self + .as_ref() + .insert_at_height(MIN_VERSION, height, key, value) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_value_builder`](Map::get_or_insert_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{unique::{sync::SkipMap, Map}, Builder}, ValueBuilder}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(encoded_size) + /// }); + /// + /// l.insert_with_value_builder::(b"alice".as_slice(), vb) + /// .unwrap(); + /// ``` + #[inline] + #[allow(single_use_lifetimes)] + fn insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + key: &'b [u8], + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Either, + > { + self.insert_at_height_with_value_builder(self.random_height(), key, value_builder) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_value_builder`](Map::get_or_insert_with_value_builder), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{unique::{sync::SkipMap, Map}, Builder}, ValueBuilder, Arena}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(encoded_size) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_value_builder::(height, b"alice".as_slice(), vb) + /// .unwrap(); + /// ``` + #[inline] + #[allow(single_use_lifetimes)] + fn insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Either, + > { + self + .as_ref() + .insert_at_height_with_value_builder(MIN_VERSION, height, key, value_builder) + } + + /// Inserts a new key-value pair if it does not yet exist. + /// + /// Unlike [`insert`](Map::insert), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + #[inline] + fn get_or_insert<'a, 'b: 'a>( + &'a self, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self.get_or_insert_at_height(self.random_height(), key, value) + } + + /// Inserts a new key-value pair at height if it does not yet exist. + /// + /// Unlike [`insert_at_height`](Map::insert_at_height), this method will not update the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + #[inline] + fn get_or_insert_at_height<'a, 'b: 'a>( + &'a self, + height: Height, + key: &'b [u8], + value: &'b [u8], + ) -> Result>, Error> { + self + .as_ref() + .get_or_insert_at_height(MIN_VERSION, height, key, value) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_value_builder`](Map::insert_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{unique::{sync::SkipMap, Map}, Builder}, ValueBuilder}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(encoded_size) + /// }); + /// l.get_or_insert_with_value_builder::(b"alice".as_slice(), vb) + /// .unwrap(); + /// ``` + #[inline] + #[allow(single_use_lifetimes)] + fn get_or_insert_with_value_builder<'a, 'b: 'a, E>( + &'a self, + key: &'b [u8], + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Either, + > { + self.get_or_insert_at_height_with_value_builder(self.random_height(), key, value_builder) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_value_builder`](Map::insert_at_height_with_value_builder), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully get_or_inserted. + /// - Returns `Ok(Some(_))` if the key with the given version already exists. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{unique::{sync::SkipMap, Map}, Builder}, ValueBuilder, Arena}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(encoded_size) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_value_builder::(height, b"alice".as_slice(), vb) + /// .unwrap(); + /// ``` + #[inline] + #[allow(single_use_lifetimes)] + fn get_or_insert_at_height_with_value_builder<'a, 'b: 'a, E>( + &'a self, + height: Height, + key: &'b [u8], + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Either, + > { + self.as_ref().get_or_insert_at_height_with_value_builder( + MIN_VERSION, + height, + key, + value_builder, + ) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// Unlike [`get_or_insert_with_builders`](Map::get_or_insert_with_builders), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{unique::{sync::SkipMap, Map}, Builder}, KeyBuilder, ValueBuilder}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(5) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(encoded_size) + /// }); + /// + /// l.insert_with_builders::<(), ()>(kb, vb) + /// .unwrap(); + /// ``` + #[inline] + fn insert_with_builders<'a, KE, VE>( + &'a self, + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Among, + > { + self.insert_at_height_with_builders(self.random_height(), key_builder, value_builder) + } + + /// Upserts a new key if it does not yet exist, if the key with the given version already exists, it will update the value. + /// + /// Unlike [`get_or_insert_with_builders`](Map::get_or_insert_with_builders), this method will update the value if the key with the given version already exists. + /// + /// This method is useful when you want to insert a key and you know the key size and value size but you do not have the key and value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// - Returns `Ok(None)` if the key was successfully inserted. + /// - Returns `Ok(Some(old))` if the key with the given version already exists and the value is successfully updated. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{unique::{sync::SkipMap, Map}, Builder}, KeyBuilder, ValueBuilder, Arena}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(5) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(encoded_size) + /// }); + /// + /// let height = l.random_height(); + /// l.insert_at_height_with_builders::<(), ()>(height, kb, vb) + /// .unwrap(); + /// ``` + #[inline] + fn insert_at_height_with_builders<'a, KE, VE>( + &'a self, + height: Height, + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Among, + > { + self + .as_ref() + .insert_at_height_with_builders(MIN_VERSION, height, key_builder, value_builder) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_with_builders`](Map::insert_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{unique::{sync::SkipMap, Map}, Builder}, KeyBuilder, ValueBuilder}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(5) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(encoded_size) + /// }); + /// + /// l.get_or_insert_with_builders::<(), ()>(kb, vb) + /// .unwrap(); + /// ``` + #[inline] + fn get_or_insert_with_builders<'a, KE, VE>( + &'a self, + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Among, + > { + self.get_or_insert_at_height_with_builders(self.random_height(), key_builder, value_builder) + } + + /// Inserts a new key if it does not yet exist. + /// + /// Unlike [`insert_at_height_with_builders`](Map::insert_at_height_with_builders), this method will not update the value if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_insert a key and you know the value size but you do not have the value + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{unique::{sync::SkipMap, Map}, Builder}, KeyBuilder, ValueBuilder, Arena}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(5) + /// }); + /// + /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { + /// val.put_u32_le(alice.id).unwrap(); + /// val.put_slice(alice.name.as_bytes()).unwrap(); + /// Ok(encoded_size) + /// }); + /// + /// let height = l.random_height(); + /// l.get_or_insert_at_height_with_builders::<(), ()>(height, kb, vb) + /// .unwrap(); + /// ``` + #[inline] + fn get_or_insert_at_height_with_builders<'a, KE, VE>( + &'a self, + height: Height, + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Among, + > { + self.as_ref().get_or_insert_at_height_with_builders( + MIN_VERSION, + height, + key_builder, + value_builder, + ) + } + + /// Removes the key-value pair if it exists. A CAS operation will be used to ensure the operation is atomic. + /// + /// Unlike [`get_or_remove`](Map::get_or_remove), this method will remove the value if the key already exists. + /// + /// - Returns `Ok(None)`: + /// - if the remove operation is successful or the key is marked in remove status by other threads. + /// - Returns `Ok(Some(current))` if the key exists and not in remove status + /// or the entry is not successfully removed because of an update on this entry happens in another thread. + #[inline] + fn remove<'a, 'b: 'a>( + &'a self, + key: &'b [u8], + ) -> Result>, Error> { + self.remove_at_height(self.random_height(), key) + } + + /// Removes the key-value pair if it exists. A CAS operation will be used to ensure the operation is atomic. + /// + /// Unlike [`get_or_remove_at_height`](Map::get_or_remove_at_height), this method will remove the value if the key already exists. + /// + /// - Returns `Ok(None)`: + /// - if the remove operation is successful or the key is marked in remove status by other threads. + /// - Returns `Ok(Some(current))` if the key exists and not in remove status + /// or the entry is not successfully removed because of an update on this entry happens in another thread. + #[inline] + #[allow(single_use_lifetimes)] + fn remove_at_height<'a, 'b: 'a>( + &'a self, + height: Height, + key: &'b [u8], + ) -> Result>, Error> { + self.as_ref().compare_remove_at_height( + MIN_VERSION, + height, + key, + Ordering::AcqRel, + Ordering::Relaxed, + ) + } + + /// Gets or removes the key-value pair if it exists. + /// + /// Unlike [`remove`](Map::remove), this method will not remove the value if the key already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key already exists. + #[inline] + fn get_or_remove<'a, 'b: 'a>( + &'a self, + key: &'b [u8], + ) -> Result>, Error> { + self.get_or_remove_at_height(self.random_height(), key) + } + + /// Gets or removes the key-value pair if it exists. + /// + /// Unlike [`remove_at_height`](Map::remove_at_height), this method will not remove the value if the key with the given version already exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{unique::{sync::SkipMap, Map}, Builder}, Arena}; + /// + /// let map = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// map.insert(b"hello", b"world").unwrap(); + /// + /// let height = map.random_height(); + /// map.get_or_remove_at_height(height, b"hello").unwrap(); + /// ``` + #[allow(single_use_lifetimes)] + #[inline] + fn get_or_remove_at_height<'a, 'b: 'a>( + &'a self, + height: Height, + key: &'b [u8], + ) -> Result>, Error> { + self + .as_ref() + .get_or_remove_at_height(MIN_VERSION, height, key) + } + + /// Gets or removes the key-value pair if it exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_remove a key and you know the key size but you do not have the key + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{unique::{sync::SkipMap, Map}, Builder}, KeyBuilder}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(5) + /// }); + /// l.get_or_remove_with_builder::(kb) + /// .unwrap(); + /// ``` + #[inline] + fn get_or_remove_with_builder<'a, 'b: 'a, E>( + &'a self, + key_builder: KeyBuilder) -> Result>, + ) -> Result< + Option>, + Either, + > { + self.get_or_remove_at_height_with_builder(self.random_height(), key_builder) + } + + /// Gets or removes the key-value pair if it exists. + /// + /// - Returns `Ok(None)` if the key does not exist. + /// - Returns `Ok(Some(old))` if the key with the given version already exists. + /// + /// This method is useful when you want to get_or_remove a key and you know the key size but you do not have the key + /// at this moment. + /// + /// A placeholder will be inserted first, then you will get an [`VacantBuffer`], + /// and you must fill the buffer with bytes later in the closure. + /// + /// ## Example + /// + /// ```rust + /// use skl::{dynamic::{unique::{sync::SkipMap, Map}, Builder}, KeyBuilder, Arena}; + /// + /// struct Person { + /// id: u32, + /// name: String, + /// } + /// + /// impl Person { + /// fn encoded_size(&self) -> usize { + /// 4 + self.name.len() + /// } + /// } + /// + /// + /// let alice = Person { + /// id: 1, + /// name: "Alice".to_string(), + /// }; + /// + /// let encoded_size = alice.encoded_size(); + /// + /// let l = Builder::new().with_capacity(1024).alloc::().unwrap(); + /// + /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { + /// key.put_slice(b"alice").unwrap(); + /// Ok(5) + /// }); + /// let height = l.random_height(); + /// l.get_or_remove_at_height_with_builder::(height, kb) + /// .unwrap(); + /// ``` + #[inline] + fn get_or_remove_at_height_with_builder<'a, 'b: 'a, E>( + &'a self, + height: Height, + key_builder: KeyBuilder) -> Result>, + ) -> Result< + Option>, + Either, + > { + self + .as_ref() + .get_or_remove_at_height_with_builder(MIN_VERSION, height, key_builder) + } +} diff --git a/src/generic.rs b/src/generic.rs new file mode 100644 index 0000000..91598a0 --- /dev/null +++ b/src/generic.rs @@ -0,0 +1,22 @@ +mod builder; +mod list; + +/// Generic `SkipMap` implementation with multiple versions support. +pub mod multiple_version; + +/// Generic `SkipMap` implementation without multiple versions support. +pub mod unique; + +/// Iterators for the skipmaps. +pub mod iter { + pub use super::list::iterator::{Iter, IterAll}; +} + +/// Entry references for the skipmaps. +pub mod entry { + pub use super::list::{EntryRef, VersionedEntryRef}; +} + +pub use builder::Builder; + +pub use dbutils::{equivalent::*, types::*}; diff --git a/src/generic/builder.rs b/src/generic/builder.rs new file mode 100644 index 0000000..5a66475 --- /dev/null +++ b/src/generic/builder.rs @@ -0,0 +1,138 @@ +use core::mem; + +use super::super::Options; +use crate::{ + allocator::Sealed, + error::Error, + options::{CompressionPolicy, Freelist}, + traits::Constructable, + types::{Height, KeySize}, + Arena, +}; + +#[cfg(all(feature = "memmap", not(target_family = "wasm")))] +#[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] +mod memmap; + +/// A builder for creating a generic key-value `SkipMap`. +#[derive(Debug, Clone)] +pub struct Builder { + options: Options, +} + +impl Default for Builder { + #[inline] + fn default() -> Self { + Self::new() + } +} + +impl From for Builder { + #[inline] + fn from(options: Options) -> Self { + Self { options } + } +} + +impl From for Options { + #[inline] + fn from(builder: Builder) -> Self { + builder.options + } +} + +impl Builder { + /// Create a new builder with the given options. + /// + /// ## Example + /// + /// ```rust + /// use skl::generic::Builder; + /// + /// let builder = Builder::new(); + /// ``` + #[inline] + pub const fn new() -> Self { + Self { + options: Options::new(), + } + } + + /// Get the options of the builder. + /// + /// ## Example + /// + /// ```rust + /// use skl::generic::Builder; + /// + /// let builder = Builder::new(); + /// let options = builder.options(); + /// ``` + #[inline] + pub const fn options(&self) -> &Options { + &self.options + } + + /// Set the options for the builder. + /// + /// ## Example + /// + /// ```rust + /// use skl::{generic::Builder, Options}; + /// + /// let builder = Builder::new().with_options(Options::new()); + /// ``` + #[inline] + pub const fn with_options(mut self, opts: Options) -> Self { + self.options = opts; + self + } + + crate::__builder_opts!(generic::Builder); +} + +impl Builder { + /// Create a new map which is backed by a `AlignedVec`. + /// + /// **Note:** The capacity stands for how many memory allocated, + /// it does not mean the skiplist can store `cap` entries. + /// + /// **What the difference between this method and [`Builder::map_anon`]?** + /// + /// 1. This method will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// 2. Where as [`Builder::map_anon`] will use mmap anonymous to require memory from the OS. + /// If you require very large contiguous memory regions, `mmap` might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// ## Example + /// + /// ```rust + /// use skl::generic::{unique::sync, multiple_version::unsync, Builder}; + /// + /// let map = Builder::new().with_capacity(1024).alloc::>().unwrap(); + /// + /// let arena = Builder::new().with_capacity(1024).alloc::>().unwrap(); + /// ``` + #[inline] + pub fn alloc(self) -> Result + where + T: Arena, + T::Constructable: Constructable, + { + let node_align = + mem::align_of::<<::Allocator as Sealed>::Node>(); + + let Builder { options } = self; + options + .to_arena_options() + .with_maximum_alignment(node_align) + .alloc::<<::Allocator as Sealed>::Allocator>() + .map_err(Into::into) + .and_then(|arena| T::construct(arena, options, false, ())) + } +} diff --git a/src/generic/builder/memmap.rs b/src/generic/builder/memmap.rs new file mode 100644 index 0000000..e89782e --- /dev/null +++ b/src/generic/builder/memmap.rs @@ -0,0 +1,228 @@ +use core::mem; + +use either::Either; + +use super::Builder; +use crate::{ + allocator::{Node, Sealed}, + error::{bad_magic_version, bad_version, flags_mismtach, invalid_data}, + options::CURRENT_VERSION, + traits::Constructable, + Arena, +}; + +impl Builder { + /// Create a new map which is backed by a anonymous memory map. + /// + /// **What the difference between this method and [`Builder::alloc`]?** + /// + /// 1. This method will use mmap anonymous to require memory from the OS directly. + /// If you require very large contiguous memory regions, this method might be more suitable because + /// it's more direct in requesting large chunks of memory from the OS. + /// + /// 2. Where as [`Builder::alloc`] will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. + /// Even if we are working with raw pointers with `Box::into_raw`, + /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` + /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, + /// especially if you're frequently accessing or modifying it. + /// + /// ## Example + /// + /// ```rust + /// use skl::generic::{unique::sync, multiple_version::unsync, Builder}; + /// + /// let map = Builder::new().with_capacity(1024).map_anon::>().unwrap(); + /// + /// let arena = Builder::new().with_capacity(1024).map_anon::>().unwrap(); + /// ``` + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub fn map_anon(self) -> std::io::Result + where + T: Arena, + T::Constructable: Constructable, + { + let node_align = + mem::align_of::<<::Allocator as Sealed>::Node>(); + let Builder { options } = self; + + options + .to_arena_options() + .with_maximum_alignment(node_align) + .map_anon::<<::Allocator as Sealed>::Allocator>() + .map_err(Into::into) + .and_then(|arena| T::construct(arena, options, false, ()).map_err(invalid_data)) + } + + /// Opens a read-only map which backed by file-backed memory map. + /// + /// ## Safety + /// - All file-backed memory map constructors are marked `unsafe` because of the potential for + /// *Undefined Behavior* (UB) using the map if the underlying file is subsequently modified, in or + /// out of process. Applications must consider the risk and take appropriate precautions when + /// using file-backed maps. Solutions such as file permissions, locks or process-private (e.g. + /// unlinked) files exist but are platform specific and limited. + /// - The `K` and `V` types must be the same as the types used to create the map. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map(self, path: P) -> std::io::Result + where + T: Arena, + T::Constructable: Constructable, + P: AsRef, + { + self + .map_with_path_builder::(|| Ok(path.as_ref().to_path_buf())) + .map_err(Either::unwrap_right) + } + + /// Opens a read-only map which backed by file-backed memory map with a path builder. + /// + /// ## Safety + /// - All file-backed memory map constructors are marked `unsafe` because of the potential for + /// *Undefined Behavior* (UB) using the map if the underlying file is subsequently modified, in or + /// out of process. Applications must consider the risk and take appropriate precautions when + /// using file-backed maps. Solutions such as file permissions, locks or process-private (e.g. + /// unlinked) files exist but are platform specific and limited. + /// - The `K` and `V` types must be the same as the types used to create the map. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_with_path_builder( + self, + path_builder: PB, + ) -> Result> + where + T: Arena, + T::Constructable: Constructable, + PB: FnOnce() -> Result, + { + use crate::allocator::Meta as _; + + let node_align = + mem::align_of::<<::Allocator as Sealed>::Node>(); + + let Builder { options } = self; + let magic_version = options.magic_version(); + + #[allow(clippy::bind_instead_of_map)] + options + .to_arena_options() + .with_unify(true) + .with_read(true) + .with_create(false) + .with_create_new(false) + .with_write(false) + .with_truncate(false) + .with_append(false) + .with_maximum_alignment(node_align) + .map_with_path_builder::<<::Allocator as Sealed>::Allocator, _, _>(path_builder) + .and_then(|arena| { + T::construct(arena, options, true, ()) + .map_err(invalid_data) + .and_then(|map| { + let flags = map.meta().flags(); + let node_flags = <<::Allocator as Sealed>::Node as Node>::flags(); + + if flags != node_flags { + return Err(flags_mismtach(flags, node_flags)); + } + + if Arena::magic_version(&map) != magic_version { + Err(bad_magic_version()) + } else if map.as_ref().version() != CURRENT_VERSION { + Err(bad_version()) + } else { + Ok(map) + } + }) + .map_err(Either::Right) + }) + } + + /// Creates a new map or reopens a map which backed by a file backed memory map. + /// + /// ## Safety + /// + /// - All file-backed memory map constructors are marked `unsafe` because of the potential for + /// *Undefined Behavior* (UB) using the map if the underlying file is subsequently modified, in or + /// out of process. Applications must consider the risk and take appropriate precautions when + /// using file-backed maps. Solutions such as file permissions, locks or process-private (e.g. + /// unlinked) files exist but are platform specific and limited. + /// - The `K` and `V` types must be the same as the types used to create the map. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + pub unsafe fn map_mut(self, path: P) -> std::io::Result + where + T: Arena, + T::Constructable: Constructable, + P: AsRef, + { + self + .map_mut_with_path_builder::(|| Ok(path.as_ref().to_path_buf())) + .map_err(Either::unwrap_right) + } + + /// Creates a new map or reopens a map which backed by a file backed memory map with path builder. + /// + /// # Safety + /// - All file-backed memory map constructors are marked `unsafe` because of the potential for + /// *Undefined Behavior* (UB) using the map if the underlying file is subsequently modified, in or + /// out of process. Applications must consider the risk and take appropriate precautions when + /// using file-backed maps. Solutions such as file permissions, locks or process-private (e.g. + /// unlinked) files exist but are platform specific and limited. + /// - The `K` and `V` types must be the same as the types used to create the map. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub unsafe fn map_mut_with_path_builder( + self, + path_builder: PB, + ) -> Result> + where + T: Arena, + T::Constructable: Constructable, + PB: FnOnce() -> Result, + { + use crate::allocator::Meta as _; + + let node_align = + mem::align_of::<<::Allocator as Sealed>::Node>(); + let Builder { options } = self; + let magic_version = options.magic_version(); + let path = path_builder().map_err(Either::Left)?; + let exist = path.exists(); + + #[allow(clippy::bind_instead_of_map)] + options + .to_arena_options() + .with_maximum_alignment(node_align) + .with_unify(true) + .map_mut::<<::Allocator as Sealed>::Allocator, _>(path) + .map_err(Either::Right) + .and_then(|arena| { + T::construct(arena, options, exist, ()) + .map_err(invalid_data) + .and_then(|map| { + let flags = map.meta().flags(); + let node_flags = + <<::Allocator as Sealed>::Node as Node>::flags(); + + if flags != node_flags { + return Err(flags_mismtach(flags, node_flags)); + } + + if Arena::magic_version(&map) != magic_version { + Err(bad_magic_version()) + } else if map.as_ref().version() != CURRENT_VERSION { + Err(bad_version()) + } else { + Ok(map) + } + }) + .map_err(Either::Right) + }) + } +} diff --git a/src/base.rs b/src/generic/list.rs similarity index 89% rename from src/base.rs rename to src/generic/list.rs index 17a8ba1..caa3890 100644 --- a/src/base.rs +++ b/src/generic/list.rs @@ -1,7 +1,5 @@ use core::{cmp, marker::PhantomData, ptr::NonNull, sync::atomic::Ordering}; -use std::boxed::Box; - use among::Among; use dbutils::{ buffer::VacantBuffer, @@ -12,12 +10,17 @@ use either::Either; use rarena_allocator::Allocator as _; use crate::{ - allocator::{Allocator, Deallocator, Header, Node, NodePointer, Pointer, ValuePointer}, + allocator::{Allocator, Deallocator, Meta, Node, NodePointer, Pointer, ValuePointer}, encode_key_size_and_height, error::Error, + internal::RefMeta, + options::CompressionPolicy, + random_height, + ref_counter::RefCounter, + traits::Constructable, ty_ref, types::{internal::ValuePointer as ValuePointerType, Height, KeyBuilder, ValueBuilder}, - CompressionPolicy, Version, + FindResult, Header, Inserter, Splice, Version, }; mod entry; @@ -26,20 +29,20 @@ pub use entry::{EntryRef, VersionedEntryRef}; mod api; pub(super) mod iterator; -type UpdateOk<'a, 'b, K, V, A> = Either< - Option>, - Result, VersionedEntryRef<'a, K, V, A>>, +type UpdateOk<'a, 'b, K, V, A, R> = Either< + Option>, + Result, VersionedEntryRef<'a, K, V, A, R>>, >; /// A fast, cocnurrent map implementation based on skiplist that supports forward /// and backward iteration. #[derive(Debug)] -pub struct SkipList { +pub struct SkipList { pub(crate) arena: A, - meta: NonNull, + meta: RefMeta, head: ::Pointer, tail: ::Pointer, - data_offset: u32, + header: Option
, #[cfg(all(feature = "memmap", not(target_family = "wasm")))] on_disk: bool, /// If set to true by tests, then extra delays are added to make it easier to @@ -50,37 +53,41 @@ pub struct SkipList { _m: PhantomData<(fn() -> K, fn() -> V)>, } -unsafe impl Send for SkipList +unsafe impl Send for SkipList where K: ?Sized, V: ?Sized, A: Allocator + Send, + R: RefCounter + Send, { } -unsafe impl Sync for SkipList +unsafe impl Sync for SkipList where K: ?Sized, V: ?Sized, - A: Allocator + Send, + A: Allocator + Sync, + R: RefCounter + Sync, { } -impl Clone for SkipList +impl Clone for SkipList where K: ?Sized, V: ?Sized, A: Allocator, + R: RefCounter, { + #[inline] fn clone(&self) -> Self { Self { arena: self.arena.clone(), - meta: self.meta, + meta: self.meta.clone(), #[cfg(all(feature = "memmap", not(target_family = "wasm")))] on_disk: self.on_disk, head: self.head, tail: self.tail, - data_offset: self.data_offset, + header: self.header, #[cfg(all(test, feature = "std"))] yield_now: self.yield_now, _m: PhantomData, @@ -88,81 +95,106 @@ where } } -impl Drop for SkipList +impl SkipList where K: ?Sized, V: ?Sized, A: Allocator, + R: RefCounter, { - #[allow(clippy::collapsible_if)] - fn drop(&mut self) { - if self.arena.refs() == 1 { - if !self.arena.unify() { - unsafe { - let _ = Box::from_raw(self.meta.as_ptr()); - } - } - - #[cfg(all(feature = "memmap", not(target_family = "wasm"), not(miri)))] - if self.arena.is_map() && self.arena.options().lock_meta() { - let _ = unsafe { self.arena.munlock(0, self.arena.page_size()) }; - } - } + #[inline] + pub(crate) fn meta(&self) -> &A::Meta { + &self.meta } } -impl SkipList +impl Constructable for SkipList where K: ?Sized, V: ?Sized, A: Allocator, + R: RefCounter, { + type Allocator = A; + type Comparator = (); + #[inline] - pub(crate) fn construct( - arena: A, - meta: NonNull, - head: ::Pointer, - tail: ::Pointer, - data_offset: u32, + fn allocator(&self) -> &Self::Allocator { + &self.arena + } + + #[inline] + fn allocator_mut(&mut self) -> &mut Self::Allocator { + &mut self.arena + } + + #[inline] + fn magic_version(&self) -> u16 { + self.meta().magic_version() + } + + #[inline] + fn len(&self) -> usize { + self.meta().len() as usize + } + + #[inline] + fn height(&self) -> u8 { + self.meta().height() + } + + #[inline] + fn random_height(&self) -> crate::Height { + random_height(self.arena.max_height()) + } + + #[inline] + fn header(&self) -> Option<&Header> { + self.header.as_ref() + } + + #[inline] + fn construct( + arena: Self::Allocator, + meta: core::ptr::NonNull<::Meta>, + head: <::Node as crate::allocator::Node>::Pointer, + tail: <::Node as crate::allocator::Node>::Pointer, + header: Option
, + _: Self::Comparator, ) -> Self { Self { #[cfg(all(feature = "memmap", not(target_family = "wasm")))] on_disk: arena.is_ondisk(), + meta: RefMeta::new(meta, arena.unify()), arena, - meta, head, tail, - data_offset, + header, #[cfg(all(test, feature = "std"))] yield_now: false, _m: PhantomData, } } - - #[inline] - pub(crate) const fn meta(&self) -> &A::Header { - // Safety: the pointer is well aligned and initialized. - unsafe { self.meta.as_ref() } - } } -impl SkipList +impl SkipList where K: ?Sized + Type, V: ?Sized + Type, A: Allocator, + R: RefCounter, { fn new_node<'a, E>( &'a self, version: Version, height: u32, key: &Key<'a, '_, K, A>, - value_builder: Option) -> Result<(), E>>>, + value_builder: Option) -> Result>>, ) -> Result<(::Pointer, Deallocator), Among> { let (nd, deallocator) = match key { Key::Structured(key) => { let kb = KeyBuilder::new(key.encoded_len(), |buf: &mut VacantBuffer<'_>| { - key.encode_to_buffer(buf).map(|_| ()) + key.encode_to_buffer(buf) }); let vb = value_builder.unwrap(); self @@ -170,9 +202,10 @@ where .allocate_entry_node::(version, height, kb, vb)? } Key::Occupied(key) => { + let klen = key.len(); let kb = KeyBuilder::new(key.len(), |buf: &mut VacantBuffer<'_>| { buf.put_slice_unchecked(key); - Ok(()) + Ok(klen) }); let vb = value_builder.unwrap(); self @@ -199,18 +232,21 @@ where version, height, key.encoded_len(), - |buf| key.encode_to_buffer(buf).map(|_| ()), + |buf| key.encode_to_buffer(buf), ) .map_err(Among::from_either_to_left_right)?, - Key::Remove(key) => self - .arena - .allocate_tombstone_node_with_key_builder::(version, height, key.len(), |buf| { - buf - .put_slice(key) - .expect("buffer must be large enough for key"); - Ok(()) - }) - .map_err(Among::from_either_to_left_right)?, + Key::Remove(key) => { + let klen = key.len(); + self + .arena + .allocate_tombstone_node_with_key_builder::(version, height, klen, |buf| { + buf + .put_slice(key) + .expect("buffer must be large enough for key"); + Ok(klen) + }) + .map_err(Among::from_either_to_left_right)? + } Key::RemoveVacant { buf: key, offset } => self .arena .allocate_tombstone_node::(version, height, *offset, key.len()) @@ -240,11 +276,12 @@ where } } -impl SkipList +impl SkipList where K: ?Sized + Type, V: ?Sized + Type, A: Allocator, + R: RefCounter, { /// ## Safety /// @@ -293,18 +330,19 @@ where } } -impl SkipList +impl SkipList where K: ?Sized + Type, V: ?Sized + Type, A: Allocator, + R: RefCounter, { unsafe fn move_to_prev<'a>( &'a self, nd: &mut ::Pointer, version: Version, contains_key: impl Fn(&K::Ref<'a>) -> bool, - ) -> Option> + ) -> Option> where K::Ref<'a>: KeyRef<'a, K>, { @@ -344,7 +382,7 @@ where nd: &mut ::Pointer, version: Version, contains_key: impl Fn(&K::Ref<'a>) -> bool, - ) -> Option> + ) -> Option> where K::Ref<'a>: KeyRef<'a, K>, { @@ -416,7 +454,7 @@ where nd: &mut ::Pointer, version: Version, contains_key: impl Fn(&K::Ref<'a>) -> bool, - ) -> Option> + ) -> Option> where K::Ref<'a>: KeyRef<'a, K>, { @@ -456,7 +494,7 @@ where nd: &mut ::Pointer, version: Version, contains_key: impl Fn(&K::Ref<'a>) -> bool, - ) -> Option> + ) -> Option> where K::Ref<'a>: KeyRef<'a, K>, { @@ -864,12 +902,12 @@ where version: Version, height: u32, key: Key<'a, 'b, K, A>, - value_builder: Option) -> Result<(), E>>>, + value_builder: Option) -> Result>>, success: Ordering, failure: Ordering, mut ins: Inserter<'a, ::Pointer>, upsert: bool, - ) -> Result, Among> + ) -> Result, Among> where K::Ref<'a>: KeyRef<'a, K>, { @@ -1138,14 +1176,14 @@ where unsafe fn upsert_value<'a, 'b: 'a>( &'a self, version: Version, - old: VersionedEntryRef<'a, K, V, A>, + old: VersionedEntryRef<'a, K, V, A, R>, old_node: ::Pointer, key: &Key<'a, 'b, K, A>, value_offset: u32, value_size: u32, success: Ordering, failure: Ordering, - ) -> Result, Error> { + ) -> Result, Error> { match key { Key::Structured(_) | Key::Occupied(_) | Key::Vacant { .. } | Key::Pointer { .. } => { old_node.update_value(&self.arena, value_offset, value_size); @@ -1179,13 +1217,13 @@ where unsafe fn upsert<'a, 'b: 'a, E>( &'a self, version: Version, - old: VersionedEntryRef<'a, K, V, A>, + old: VersionedEntryRef<'a, K, V, A, R>, old_node: ::Pointer, key: &Key<'a, 'b, K, A>, - value_builder: Option) -> Result<(), E>>>, + value_builder: Option) -> Result>>, success: Ordering, failure: Ordering, - ) -> Result, Either> { + ) -> Result, Either> { match key { Key::Structured(_) | Key::Occupied(_) | Key::Vacant { .. } | Key::Pointer { .. } => self .arena @@ -1211,73 +1249,6 @@ where } } -/// A helper struct for caching splice information -pub struct Inserter<'a, P> { - spl: [Splice

; super::MAX_HEIGHT], - height: u32, - _m: core::marker::PhantomData<&'a ()>, -} - -impl Default for Inserter<'_, P> { - #[inline] - fn default() -> Self { - Self { - spl: [ - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - Splice::default(), - ], - height: 0, - _m: core::marker::PhantomData, - } - } -} - -#[derive(Debug, Clone, Copy)] -struct Splice

{ - prev: P, - next: P, -} - -impl Default for Splice

{ - #[inline] - fn default() -> Self { - Self { - prev: P::NULL, - next: P::NULL, - } - } -} - pub(crate) enum Key<'a, 'b: 'a, K: ?Sized, A> { Structured(&'b K), Occupied(&'b [u8]), @@ -1414,12 +1385,3 @@ impl<'a, K: ?Sized, A> Key<'a, '_, K, A> { } } } - -struct FindResult

{ - // both key and version are equal. - found: bool, - // only key is equal. - found_key: Option, - splice: Splice

, - curr: Option

, -} diff --git a/src/base/api.rs b/src/generic/list/api.rs similarity index 77% rename from src/base/api.rs rename to src/generic/list/api.rs index d4b67e8..9b0e428 100644 --- a/src/base/api.rs +++ b/src/generic/list/api.rs @@ -1,10 +1,7 @@ use core::{ mem, ops::{Bound, RangeBounds}, - ptr::NonNull, - sync::atomic::Ordering, }; -use std::boxed::Box; use dbutils::{ buffer::VacantBuffer, @@ -14,21 +11,27 @@ use dbutils::{ use rarena_allocator::Allocator as _; use crate::{ - allocator::{Allocator, Header, Link, Node, NodePointer}, + allocator::{Allocator, Meta, Node, NodePointer}, error::Error, random_height, ty_ref, types::{Height, ValueBuilder}, - Version, + Header, Version, }; -use super::{iterator, EntryRef, SkipList, VersionedEntryRef}; +use super::{iterator, EntryRef, RefCounter, SkipList, VersionedEntryRef}; mod update; type RemoveValueBuilder = - ValueBuilder) -> Result<(), E>>>; + ValueBuilder) -> Result>>; -impl SkipList { +impl SkipList +where + K: ?Sized, + V: ?Sized, + A: Allocator, + R: RefCounter, +{ /// Sets remove on drop, only works on mmap with a file backend. /// /// Default is `false`. @@ -42,15 +45,15 @@ impl SkipList { self.arena.remove_on_drop(val); } - /// Returns the offset of the data section in the `SkipList`. + /// Returns the header of the `SkipList`. /// /// By default, `SkipList` will allocate meta, head node, and tail node in the ARENA, /// and the data section will be allocated after the tail node. /// - /// This method will return the offset of the data section in the ARENA. + /// This method will return the header of the `SkipList`. #[inline] - pub const fn data_offset(&self) -> usize { - self.data_offset as usize + pub const fn header(&self) -> Option<&Header> { + self.header.as_ref() } /// Returns the version number of the [`SkipList`]. @@ -74,30 +77,6 @@ impl SkipList { self.meta().height() } - /// Returns the number of remaining bytes can be allocated by the arena. - #[inline] - pub fn remaining(&self) -> usize { - self.arena.remaining() - } - - /// Returns how many bytes are discarded by the ARENA. - #[inline] - pub fn discarded(&self) -> u32 { - self.arena.discarded() - } - - /// Returns the number of bytes that have allocated from the arena. - #[inline] - pub fn allocated(&self) -> usize { - self.arena.allocated() - } - - /// Returns the capacity of the arena. - #[inline] - pub fn capacity(&self) -> usize { - self.arena.capacity() - } - /// Returns the number of entries in the skipmap. #[inline] pub fn len(&self) -> usize { @@ -113,7 +92,7 @@ impl SkipList { /// Gets the number of pointers to this `SkipList` similar to [`Arc::strong_count`](std::sync::Arc::strong_count). #[inline] pub fn refs(&self) -> usize { - self.arena.refs() + self.meta.refs() } /// Returns the maximum version of all entries in the map. @@ -155,49 +134,6 @@ impl SkipList { + value_size } - /// Clear the skiplist to empty and re-initialize. - /// - /// ## Safety - /// - The current pointers get from the ARENA cannot be used anymore after calling this method. - /// - This method is not thread-safe. - pub unsafe fn clear(&mut self) -> Result<(), Error> { - self.arena.clear()?; - - let options = self.arena.options(); - - if self.arena.unify() { - self.meta = self - .arena - .allocate_header(self.meta.as_ref().magic_version())?; - } else { - let magic_version = self.meta.as_ref().magic_version(); - let _ = Box::from_raw(self.meta.as_ptr()); - self.meta = NonNull::new_unchecked(Box::into_raw(Box::new(::new( - magic_version, - )))); - } - - let max_height: u8 = options.max_height().into(); - let head = self.arena.allocate_full_node(max_height)?; - let tail = self.arena.allocate_full_node(max_height)?; - - // Safety: - // We will always allocate enough space for the head node and the tail node. - unsafe { - // Link all head/tail levels together. - for i in 0..(max_height as usize) { - let head_link = head.tower(&self.arena, i); - let tail_link = tail.tower(&self.arena, i); - head_link.store_next_offset(tail.offset(), Ordering::Relaxed); - tail_link.store_prev_offset(head.offset(), Ordering::Relaxed); - } - } - - self.head = head; - self.tail = tail; - Ok(()) - } - /// Flushes outstanding memory map modifications to disk. /// /// When this method returns with a non-error result, @@ -221,11 +157,12 @@ impl SkipList { } } -impl SkipList +impl SkipList where K: ?Sized + Type, V: ?Sized + Type, A: Allocator, + RC: RefCounter, { /// Returns `true` if the key exists in the map. /// @@ -249,7 +186,7 @@ where } /// Returns the first entry in the map. - pub fn first<'a>(&'a self, version: Version) -> Option> + pub fn first<'a>(&'a self, version: Version) -> Option> where K::Ref<'a>: KeyRef<'a, K>, { @@ -257,7 +194,7 @@ where } /// Returns the last entry in the map. - pub fn last<'a>(&'a self, version: Version) -> Option> + pub fn last<'a>(&'a self, version: Version) -> Option> where K::Ref<'a>: KeyRef<'a, K>, { @@ -265,7 +202,10 @@ where } /// Returns the first entry in the map. - pub fn first_versioned<'a>(&'a self, version: Version) -> Option> + pub fn first_versioned<'a>( + &'a self, + version: Version, + ) -> Option> where K::Ref<'a>: KeyRef<'a, K>, { @@ -273,7 +213,10 @@ where } /// Returns the last entry in the map. - pub fn last_versioned<'a>(&'a self, version: Version) -> Option> + pub fn last_versioned<'a>( + &'a self, + version: Version, + ) -> Option> where K::Ref<'a>: KeyRef<'a, K>, { @@ -284,7 +227,7 @@ where /// /// This method will return `None` if the entry is marked as removed. If you want to get the entry even if it is marked as removed, /// you can use [`get_versioned`](SkipList::get_versioned). - pub fn get<'a, Q>(&'a self, version: Version, key: &Q) -> Option> + pub fn get<'a, Q>(&'a self, version: Version, key: &Q) -> Option> where Q: ?Sized + Comparable>, { @@ -336,7 +279,7 @@ where &'a self, version: Version, key: &Q, - ) -> Option> + ) -> Option> where Q: ?Sized + Comparable>, { @@ -383,7 +326,7 @@ where &'a self, version: Version, upper: Bound<&Q>, - ) -> Option> + ) -> Option> where K::Ref<'a>: KeyRef<'a, K>, Q: ?Sized + Comparable>, @@ -397,7 +340,7 @@ where &'a self, version: Version, lower: Bound<&Q>, - ) -> Option> + ) -> Option> where K::Ref<'a>: KeyRef<'a, K>, Q: ?Sized + Comparable>, @@ -407,7 +350,7 @@ where /// Returns a new iterator, this iterator will yield the latest version of all entries in the map less or equal to the given version. #[inline] - pub fn iter<'a>(&'a self, version: Version) -> iterator::Iter<'a, K, V, A> + pub fn iter<'a>(&'a self, version: Version) -> iterator::Iter<'a, K, V, A, RC> where K::Ref<'a>: KeyRef<'a, K>, { @@ -416,7 +359,7 @@ where /// Returns a new iterator, this iterator will yield all versions for all entries in the map less or equal to the given version. #[inline] - pub fn iter_all_versions<'a>(&'a self, version: Version) -> iterator::IterAll<'a, K, V, A> + pub fn iter_all_versions<'a>(&'a self, version: Version) -> iterator::IterAll<'a, K, V, A, RC> where K::Ref<'a>: KeyRef<'a, K>, { @@ -425,7 +368,11 @@ where /// Returns a iterator that within the range, this iterator will yield the latest version of all entries in the range less or equal to the given version. #[inline] - pub fn range<'a, Q, R>(&'a self, version: Version, range: R) -> iterator::Iter<'a, K, V, A, Q, R> + pub fn range<'a, Q, R>( + &'a self, + version: Version, + range: R, + ) -> iterator::Iter<'a, K, V, A, RC, Q, R> where K::Ref<'a>: KeyRef<'a, K>, Q: ?Sized + Comparable>, @@ -440,7 +387,7 @@ where &'a self, version: Version, range: R, - ) -> iterator::IterAll<'a, K, V, A, Q, R> + ) -> iterator::IterAll<'a, K, V, A, RC, Q, R> where K::Ref<'a>: KeyRef<'a, K>, Q: ?Sized + Comparable>, diff --git a/src/base/api/update.rs b/src/generic/list/api/update.rs similarity index 94% rename from src/base/api/update.rs rename to src/generic/list/api/update.rs index 68e3eba..de57a3f 100644 --- a/src/base/api/update.rs +++ b/src/generic/list/api/update.rs @@ -10,15 +10,16 @@ use either::Either; use crate::KeyBuilder; use super::{ - super::{Inserter, Key}, + super::{Inserter, Key, RefCounter}, Allocator, EntryRef, Error, Height, RemoveValueBuilder, SkipList, ValueBuilder, Version, }; -impl SkipList +impl SkipList where K: ?Sized + Type + 'static, V: ?Sized + Type + 'static, A: Allocator, + R: RefCounter, { /// Upserts a new key-value pair if it does not yet exist, if the key with the given version already exists, it will update the value. /// Unlike [`get_or_insert`](SkipList::get_or_insert), this method will update the value if the key with the given version already exists. @@ -31,7 +32,7 @@ where version: Version, key: impl Into>, value: impl Into>, - ) -> Result>, Among> + ) -> Result>, Among> where K::Ref<'a>: KeyRef<'a, K>, { @@ -49,7 +50,7 @@ where height: Height, key: impl Into>, value: impl Into>, - ) -> Result>, Among> + ) -> Result>, Among> where K::Ref<'a>: KeyRef<'a, K>, { @@ -60,7 +61,7 @@ where .validate(height, key.encoded_len(), value.encoded_len()) .map_err(Among::Right)?; - let copy = |buf: &mut VacantBuffer<'_>| value.encode_to_buffer(buf).map(|_| ()); + let copy = |buf: &mut VacantBuffer<'_>| value.encode_to_buffer(buf); let val_len = value.encoded_len(); self @@ -102,8 +103,8 @@ where version: Version, height: Height, key: impl Into>, - value_builder: ValueBuilder) -> Result<(), E>>, - ) -> Result>, Among> + value_builder: ValueBuilder) -> Result>, + ) -> Result>, Among> where K::Ref<'a>: KeyRef<'a, K>, { @@ -146,7 +147,7 @@ where height: Height, key: impl Into>, value: impl Into>, - ) -> Result>, Among> + ) -> Result>, Among> where K::Ref<'a>: KeyRef<'a, K>, { @@ -156,7 +157,7 @@ where .validate(height, key.encoded_len(), value.encoded_len()) .map_err(Among::Right)?; - let copy = |buf: &mut VacantBuffer<'_>| value.encode_to_buffer(buf).map(|_| ()); + let copy = |buf: &mut VacantBuffer<'_>| value.encode_to_buffer(buf); let val_len = value.encoded_len(); self @@ -199,8 +200,8 @@ where version: Version, height: Height, key: impl Into>, - value_builder: ValueBuilder) -> Result<(), E>>, - ) -> Result>, Among> + value_builder: ValueBuilder) -> Result>, + ) -> Result>, Among> where K::Ref<'a>: KeyRef<'a, K>, { @@ -246,9 +247,9 @@ where &'a self, version: Version, height: Height, - key_builder: KeyBuilder) -> Result<(), KE>>, - value_builder: ValueBuilder) -> Result<(), VE>>, - ) -> Result>, Among> + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result>, Among> where K::Ref<'a>: KeyRef<'a, K>, { @@ -302,9 +303,9 @@ where &'a self, version: Version, height: Height, - key_builder: KeyBuilder) -> Result<(), KE>>, - value_builder: ValueBuilder) -> Result<(), VE>>, - ) -> Result>, Among> + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result>, Among> where K::Ref<'a>: KeyRef<'a, K>, { @@ -359,10 +360,9 @@ where version: Version, height: Height, key: impl Into>, - success: Ordering, failure: Ordering, - ) -> Result>, Either> + ) -> Result>, Either> where K::Ref<'a>: KeyRef<'a, K>, { @@ -415,7 +415,7 @@ where version: Version, height: Height, key: impl Into>, - ) -> Result>, Either> + ) -> Result>, Either> where K::Ref<'a>: KeyRef<'a, K>, { @@ -466,8 +466,8 @@ where &'a self, version: Version, height: Height, - key_builder: KeyBuilder) -> Result<(), E>>, - ) -> Result>, Either> + key_builder: KeyBuilder) -> Result>, + ) -> Result>, Either> where K::Ref<'a>: KeyRef<'a, K>, { diff --git a/src/base/entry.rs b/src/generic/list/entry.rs similarity index 86% rename from src/base/entry.rs rename to src/generic/list/entry.rs index 3c7b347..87b9742 100644 --- a/src/base/entry.rs +++ b/src/generic/list/entry.rs @@ -1,8 +1,8 @@ use dbutils::types::{KeyRef, LazyRef, Type}; +use super::{RefCounter, SkipList}; use crate::{ allocator::{Allocator, Node, NodePointer, WithVersion}, - base::SkipList, types::internal::ValuePointer, Version, }; @@ -10,13 +10,14 @@ use crate::{ /// A versioned entry reference of the skipmap. /// /// Compared to the [`EntryRef`], this one's value can be `None` which means the entry is removed. -pub struct VersionedEntryRef<'a, K, V, A> +pub struct VersionedEntryRef<'a, K, V, A, R> where K: ?Sized + Type, V: ?Sized + Type, A: Allocator, + R: RefCounter, { - pub(super) list: &'a SkipList, + pub(super) list: &'a SkipList, pub(super) key: LazyRef<'a, K>, pub(super) value: Option>, pub(super) value_part_pointer: ValuePointer, @@ -25,11 +26,12 @@ where pub(super) ptr: ::Pointer, } -impl core::fmt::Debug for VersionedEntryRef<'_, K, V, A> +impl core::fmt::Debug for VersionedEntryRef<'_, K, V, A, R> where K: ?Sized + Type, V: ?Sized + Type, A: Allocator, + R: RefCounter, { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { f.debug_struct("VersionedEntryRef") @@ -40,13 +42,14 @@ where } } -impl<'a, K, V, A: Allocator> Clone for VersionedEntryRef<'a, K, V, A> +impl<'a, K, V, A, R> Clone for VersionedEntryRef<'a, K, V, A, R> where K: ?Sized + Type, K::Ref<'a>: Clone, V: ?Sized + Type, V::Ref<'a>: Clone, A: Allocator, + R: RefCounter, { fn clone(&self) -> Self { Self { @@ -61,11 +64,12 @@ where } } -impl<'a, K, V, A: Allocator> VersionedEntryRef<'a, K, V, A> +impl<'a, K, V, A, R> VersionedEntryRef<'a, K, V, A, R> where K: ?Sized + Type, V: ?Sized + Type, A: Allocator, + R: RefCounter, { /// Returns the reference to the key #[inline] @@ -101,12 +105,13 @@ where } } -impl<'a, K, V, A: Allocator> VersionedEntryRef<'a, K, V, A> +impl<'a, K, V, A, R> VersionedEntryRef<'a, K, V, A, R> where K: ?Sized + Type, K::Ref<'a>: KeyRef<'a, K>, V: ?Sized + Type, A: Allocator, + R: RefCounter, { /// Returns the next entry in the map. #[inline] @@ -159,12 +164,13 @@ where } } -impl VersionedEntryRef<'_, K, V, A> +impl VersionedEntryRef<'_, K, V, A, R> where K: ?Sized + Type, V: ?Sized + Type, A: Allocator, A::Node: WithVersion, + R: RefCounter, { /// Returns the version of the entry #[inline] @@ -173,20 +179,21 @@ where } } -impl<'a, K, V, A> VersionedEntryRef<'a, K, V, A> +impl<'a, K, V, A, R> VersionedEntryRef<'a, K, V, A, R> where K: ?Sized + Type, V: ?Sized + Type, A: Allocator, + R: RefCounter, { #[inline] pub(crate) fn from_node( query_version: Version, node: ::Pointer, - list: &'a SkipList, + list: &'a SkipList, raw_key: Option<&'a [u8]>, key: Option>, - ) -> VersionedEntryRef<'a, K, V, A> { + ) -> Self { unsafe { let (raw_value, vp) = node.get_value_with_pointer(&list.arena); @@ -204,7 +211,7 @@ where }), }; - VersionedEntryRef { + Self { list, key, value: raw_value.map(|raw_value| LazyRef::from_raw(raw_value)), @@ -220,11 +227,11 @@ where pub(crate) fn from_node_with_pointer( query_version: Version, node: ::Pointer, - list: &'a SkipList, + list: &'a SkipList, pointer: ValuePointer, raw_key: Option<&'a [u8]>, key: Option>, - ) -> VersionedEntryRef<'a, K, V, A> { + ) -> Self { unsafe { let raw_value = node.get_value_by_value_offset(&list.arena, pointer.value_offset, pointer.value_len); @@ -243,7 +250,7 @@ where }), }; - VersionedEntryRef { + Self { list, key, value: raw_value.map(|raw_value| LazyRef::from_raw(raw_value)), @@ -259,17 +266,19 @@ where /// An entry reference to the skipmap's entry. /// /// Compared to the [`VersionedEntryRef`], this one's value cannot be `None`. -pub struct EntryRef<'a, K, V, A>(pub(crate) VersionedEntryRef<'a, K, V, A>) +pub struct EntryRef<'a, K, V, A, R>(pub(crate) VersionedEntryRef<'a, K, V, A, R>) where K: Type + ?Sized, V: Type + ?Sized, - A: Allocator; + A: Allocator, + R: RefCounter; -impl core::fmt::Debug for EntryRef<'_, K, V, A> +impl core::fmt::Debug for EntryRef<'_, K, V, A, R> where K: ?Sized + Type, V: ?Sized + Type, A: Allocator, + R: RefCounter, { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { f.debug_struct("EntryRef") @@ -279,25 +288,27 @@ where } } -impl<'a, K, V, A: Allocator> Clone for EntryRef<'a, K, V, A> +impl<'a, K, V, A, R> Clone for EntryRef<'a, K, V, A, R> where K: ?Sized + Type, K::Ref<'a>: Clone, V: ?Sized + Type, V::Ref<'a>: Clone, A: Allocator, + R: RefCounter, { fn clone(&self) -> Self { Self(self.0.clone()) } } -impl<'a, K, V, A: Allocator> EntryRef<'a, K, V, A> +impl<'a, K, V, A, R> EntryRef<'a, K, V, A, R> where K: ?Sized + Type, K::Ref<'a>: KeyRef<'a, K>, V: ?Sized + Type, A: Allocator, + R: RefCounter, { /// Returns the next entry in the map. #[inline] @@ -312,12 +323,13 @@ where } } -impl EntryRef<'_, K, V, A> +impl EntryRef<'_, K, V, A, R> where K: ?Sized + Type, V: ?Sized + Type, A: Allocator, A::Node: WithVersion, + R: RefCounter, { /// Returns the version of the entry #[inline] @@ -326,11 +338,12 @@ where } } -impl<'a, K, V, A: Allocator> EntryRef<'a, K, V, A> +impl<'a, K, V, A, R> EntryRef<'a, K, V, A, R> where K: ?Sized + Type, V: ?Sized + Type, A: Allocator, + R: RefCounter, { /// Returns the reference to the key #[inline] diff --git a/src/generic/list/iterator.rs b/src/generic/list/iterator.rs new file mode 100644 index 0000000..da39ae8 --- /dev/null +++ b/src/generic/list/iterator.rs @@ -0,0 +1,5 @@ +mod all_versions; +pub use all_versions::*; + +mod iter; +pub use iter::*; diff --git a/src/base/iterator/all_versions.rs b/src/generic/list/iterator/all_versions.rs similarity index 90% rename from src/base/iterator/all_versions.rs rename to src/generic/list/iterator/all_versions.rs index 81bf247..88f1135 100644 --- a/src/base/iterator/all_versions.rs +++ b/src/generic/list/iterator/all_versions.rs @@ -7,33 +7,35 @@ use dbutils::{ use crate::allocator::Node; -use super::super::{Allocator, NodePointer, SkipList, Version, VersionedEntryRef}; +use super::super::{Allocator, NodePointer, RefCounter, SkipList, Version, VersionedEntryRef}; /// An iterator over the skipmap (this iterator will yields all versions). The current state of the iterator can be cloned by /// simply value copying the struct. -pub struct IterAll<'a, K, V, A, Q = ::Ref<'a>, R = core::ops::RangeFull> +pub struct IterAll<'a, K, V, A, RC, Q = ::Ref<'a>, R = core::ops::RangeFull> where A: Allocator, K: ?Sized + Type, V: ?Sized + Type, Q: ?Sized, + RC: RefCounter, { - pub(super) map: &'a SkipList, + pub(super) map: &'a SkipList, pub(super) version: Version, pub(super) range: Option, pub(super) all_versions: bool, - pub(super) head: Option>, - pub(super) tail: Option>, + pub(super) head: Option>, + pub(super) tail: Option>, pub(super) _phantom: core::marker::PhantomData, } -impl<'a, K, V, A, Q, R: Clone> Clone for IterAll<'a, K, V, A, Q, R> +impl<'a, K, V, A, RC, Q, R: Clone> Clone for IterAll<'a, K, V, A, RC, Q, R> where K: ?Sized + Type, K::Ref<'a>: Clone, V: ?Sized + Type, A: Allocator, Q: ?Sized, + RC: RefCounter, { fn clone(&self) -> Self { Self { @@ -48,28 +50,30 @@ where } } -impl<'a, K, V, A, Q, R: Copy> Copy for IterAll<'a, K, V, A, Q, R> +impl<'a, K, V, A, RC, Q, R: Copy> Copy for IterAll<'a, K, V, A, RC, Q, R> where K: ?Sized + Type, K::Ref<'a>: Copy, V: ?Sized + Type, A: Allocator, - VersionedEntryRef<'a, K, V, A>: Copy, + RC: RefCounter, + VersionedEntryRef<'a, K, V, A, RC>: Copy, Q: ?Sized, { } -impl<'a, K, V, A> IterAll<'a, K, V, A> +impl<'a, K, V, A, RC> IterAll<'a, K, V, A, RC> where K: ?Sized + Type, K::Ref<'a>: KeyRef<'a, K>, V: ?Sized + Type, A: Allocator, + RC: RefCounter, { #[inline] pub(crate) const fn new( version: Version, - map: &'a SkipList, + map: &'a SkipList, all_versions: bool, ) -> Self { Self { @@ -84,18 +88,19 @@ where } } -impl<'a, K, V, A, Q, R> IterAll<'a, K, V, A, Q, R> +impl<'a, K, V, A, RC, Q, R> IterAll<'a, K, V, A, RC, Q, R> where K: ?Sized + Type, K::Ref<'a>: KeyRef<'a, K>, V: ?Sized + Type, A: Allocator, + RC: RefCounter, Q: ?Sized, { #[inline] pub(crate) fn range( version: Version, - map: &'a SkipList, + map: &'a SkipList, r: R, all_versions: bool, ) -> Self { @@ -111,12 +116,13 @@ where } } -impl<'a, K, V, A, Q, R> IterAll<'a, K, V, A, Q, R> +impl<'a, K, V, A, RC, Q, R> IterAll<'a, K, V, A, RC, Q, R> where K: ?Sized + Type, K::Ref<'a>: KeyRef<'a, K>, V: ?Sized + Type, A: Allocator, + RC: RefCounter, R: RangeBounds, Q: ?Sized, { @@ -142,29 +148,30 @@ where /// Returns the entry at the current head position of the iterator. #[inline] - pub const fn head(&self) -> Option<&VersionedEntryRef<'a, K, V, A>> { + pub const fn head(&self) -> Option<&VersionedEntryRef<'a, K, V, A, RC>> { self.head.as_ref() } /// Returns the entry at the current tail position of the iterator. #[inline] - pub const fn tail(&self) -> Option<&VersionedEntryRef<'a, K, V, A>> { + pub const fn tail(&self) -> Option<&VersionedEntryRef<'a, K, V, A, RC>> { self.tail.as_ref() } } -impl<'a, K, V, A, Q, R> IterAll<'a, K, V, A, Q, R> +impl<'a, K, V, A, RC, Q, R> IterAll<'a, K, V, A, RC, Q, R> where K: ?Sized + Type, K::Ref<'a>: KeyRef<'a, K>, V: ?Sized + Type, A: Allocator, + RC: RefCounter, Q: ?Sized + Comparable>, R: RangeBounds, { /// Advances to the next position. Returns the key and value if the /// iterator is pointing at a valid entry, and `None` otherwise. - fn next_in(&mut self) -> Option> { + fn next_in(&mut self) -> Option> { unsafe { let mut next_head = match self.head.as_ref() { Some(head) => self.map.get_next(head.ptr, 0), @@ -212,7 +219,7 @@ where /// Advances to the prev position. Returns the key and value if the /// iterator is pointing at a valid entry, and `None` otherwise. - fn prev(&mut self) -> Option> { + fn prev(&mut self) -> Option> { unsafe { let mut next_tail = match self.tail.as_ref() { Some(tail) => self.map.get_prev(tail.ptr, 0), @@ -259,7 +266,7 @@ where } } - fn range_next_in(&mut self) -> Option> { + fn range_next_in(&mut self) -> Option> { unsafe { let mut next_head = match self.head.as_ref() { Some(head) => self.map.get_next(head.ptr, 0), @@ -317,7 +324,7 @@ where } } - fn range_prev(&mut self) -> Option> { + fn range_prev(&mut self) -> Option> { unsafe { let mut next_tail = match self.tail.as_ref() { Some(tail) => self.map.get_prev(tail.ptr, 0), @@ -376,12 +383,13 @@ where } } -impl<'a, K, V, A, Q, R> IterAll<'a, K, V, A, Q, R> +impl<'a, K, V, A, RC, Q, R> IterAll<'a, K, V, A, RC, Q, R> where K: ?Sized + Type, K::Ref<'a>: KeyRef<'a, K>, V: ?Sized + Type, A: Allocator, + RC: RefCounter, Q: ?Sized + Comparable>, R: RangeBounds, { @@ -392,7 +400,7 @@ where pub fn seek_upper_bound( &mut self, upper: Bound<&QR>, - ) -> Option> + ) -> Option> where QR: ?Sized + Comparable>, { @@ -417,7 +425,7 @@ where pub fn seek_lower_bound( &mut self, lower: Bound<&QR>, - ) -> Option> + ) -> Option> where QR: ?Sized + Comparable>, { @@ -438,7 +446,7 @@ where /// Moves the iterator to the first entry whose key is greater than or /// equal to the given key. Returns the key and value if the iterator is /// pointing at a valid entry, and `None` otherwise. - fn seek_ge(&self, key: &QR) -> Option> + fn seek_ge(&self, key: &QR) -> Option> where QR: ?Sized + Comparable>, { @@ -475,7 +483,7 @@ where /// Moves the iterator to the first entry whose key is greater than /// the given key. Returns the key and value if the iterator is /// pointing at a valid entry, and `None` otherwise. - fn seek_gt(&self, key: &QR) -> Option> + fn seek_gt(&self, key: &QR) -> Option> where QR: ?Sized + Comparable>, { @@ -512,7 +520,7 @@ where /// Moves the iterator to the first entry whose key is less than or /// equal to the given key. Returns the key and value if the iterator is /// pointing at a valid entry, and `None` otherwise. - fn seek_le(&self, key: &QR) -> Option> + fn seek_le(&self, key: &QR) -> Option> where QR: ?Sized + Comparable>, { @@ -549,7 +557,7 @@ where /// Moves the iterator to the last entry whose key is less than the given /// key. Returns the key and value if the iterator is pointing at a valid entry, /// and `None` otherwise. - fn seek_lt(&self, key: &QR) -> Option> + fn seek_lt(&self, key: &QR) -> Option> where QR: ?Sized + Comparable>, { @@ -578,14 +586,14 @@ where } #[inline] - fn first(&mut self) -> Option> { + fn first(&mut self) -> Option> { self.head = None; self.tail = None; self.next() } #[inline] - fn last(&mut self) -> Option> { + fn last(&mut self) -> Option> { self.tail = None; self.head = None; self.prev() @@ -601,16 +609,17 @@ where } } -impl<'a, K, V, A, Q, R> Iterator for IterAll<'a, K, V, A, Q, R> +impl<'a, K, V, A, RC, Q, R> Iterator for IterAll<'a, K, V, A, RC, Q, R> where K: ?Sized + Type, K::Ref<'a>: KeyRef<'a, K>, V: ?Sized + Type, A: Allocator, + RC: RefCounter, Q: ?Sized + Comparable>, R: RangeBounds, { - type Item = VersionedEntryRef<'a, K, V, A>; + type Item = VersionedEntryRef<'a, K, V, A, RC>; #[inline] fn next(&mut self) -> Option { @@ -648,12 +657,13 @@ where } } -impl<'a, K, V, A, Q, R> DoubleEndedIterator for IterAll<'a, K, V, A, Q, R> +impl<'a, K, V, A, RC, Q, R> DoubleEndedIterator for IterAll<'a, K, V, A, RC, Q, R> where K: ?Sized + Type, K::Ref<'a>: KeyRef<'a, K>, V: ?Sized + Type, A: Allocator, + RC: RefCounter, Q: ?Sized + Comparable>, R: RangeBounds, { diff --git a/src/base/iterator/iter.rs b/src/generic/list/iterator/iter.rs similarity index 70% rename from src/base/iterator/iter.rs rename to src/generic/list/iterator/iter.rs index c759cc2..3575a6f 100644 --- a/src/base/iterator/iter.rs +++ b/src/generic/list/iterator/iter.rs @@ -6,24 +6,26 @@ use dbutils::{ }; use super::{ - super::{Allocator, EntryRef, SkipList, Version}, + super::{Allocator, EntryRef, RefCounter, SkipList, Version}, IterAll, }; /// An iterator over the skipmap. The current state of the iterator can be cloned by /// simply value copying the struct. -pub struct Iter<'a, K, V, A: Allocator, Q = ::Ref<'a>, R = core::ops::RangeFull>( - IterAll<'a, K, V, A, Q, R>, +pub struct Iter<'a, K, V, A, RC, Q = ::Ref<'a>, R = core::ops::RangeFull>( + IterAll<'a, K, V, A, RC, Q, R>, ) where A: Allocator, + RC: RefCounter, K: ?Sized + Type, V: ?Sized + Type, Q: ?Sized; -impl Clone for Iter<'_, K, V, A, Q, R> +impl Clone for Iter<'_, K, V, A, RC, Q, R> where A: Allocator, + RC: RefCounter, K: ?Sized + Type, V: ?Sized + Type, Q: ?Sized, @@ -33,39 +35,42 @@ where } } -impl<'a, K, V, A> Iter<'a, K, V, A> +impl<'a, K, V, A, RC> Iter<'a, K, V, A, RC> where K: ?Sized + Type, K::Ref<'a>: KeyRef<'a, K>, V: ?Sized + Type, A: Allocator, + RC: RefCounter, { #[inline] - pub(crate) const fn new(version: Version, map: &'a SkipList) -> Self { + pub(crate) const fn new(version: Version, map: &'a SkipList) -> Self { Self(IterAll::new(version, map, false)) } } -impl<'a, K, V, A, Q, R> Iter<'a, K, V, A, Q, R> +impl<'a, K, V, A, RC, Q, R> Iter<'a, K, V, A, RC, Q, R> where K: ?Sized + Type, K::Ref<'a>: KeyRef<'a, K>, V: ?Sized + Type, A: Allocator, + RC: RefCounter, Q: ?Sized, { #[inline] - pub(crate) fn range(version: Version, map: &'a SkipList, r: R) -> Self { + pub(crate) fn range(version: Version, map: &'a SkipList, r: R) -> Self { Self(IterAll::range(version, map, r, false)) } } -impl<'a, K, V, A, Q, R> Iter<'a, K, V, A, Q, R> +impl<'a, K, V, A, RC, Q, R> Iter<'a, K, V, A, RC, Q, R> where K: ?Sized + Type, K::Ref<'a>: KeyRef<'a, K>, V: ?Sized + Type, A: Allocator, + RC: RefCounter, Q: ?Sized, R: RangeBounds, { @@ -82,34 +87,36 @@ where } } -impl<'a, K, V, A, Q, R> Iter<'a, K, V, A, Q, R> +impl<'a, K, V, A, RC, Q, R> Iter<'a, K, V, A, RC, Q, R> where K: ?Sized + Type, K::Ref<'a>: KeyRef<'a, K>, V: ?Sized + Type, A: Allocator, + RC: RefCounter, Q: ?Sized, R: RangeBounds, { /// Returns the entry at the current head position of the iterator. #[inline] - pub fn head(&self) -> Option> { - self.0.head().map(|e| EntryRef::(e.clone())) + pub fn head(&self) -> Option> { + self.0.head().map(|e| EntryRef::(e.clone())) } /// Returns the entry at the current tail position of the iterator. #[inline] - pub fn tail(&self) -> Option> { - self.0.tail().map(|e| EntryRef::(e.clone())) + pub fn tail(&self) -> Option> { + self.0.tail().map(|e| EntryRef::(e.clone())) } } -impl<'a, K, V, A, Q, R> Iter<'a, K, V, A, Q, R> +impl<'a, K, V, A, RC, Q, R> Iter<'a, K, V, A, RC, Q, R> where K: ?Sized + Type, K::Ref<'a>: KeyRef<'a, K>, V: ?Sized + Type, A: Allocator, + RC: RefCounter, Q: ?Sized + Comparable>, R: RangeBounds, { @@ -117,7 +124,7 @@ where /// If no such element is found then `None` is returned. /// /// **Note**: This method will clear the current state of the iterator. - pub fn seek_upper_bound(&mut self, upper: Bound<&QR>) -> Option> + pub fn seek_upper_bound(&mut self, upper: Bound<&QR>) -> Option> where QR: ?Sized + Comparable>, { @@ -128,7 +135,10 @@ where /// If no such element is found then `None` is returned. /// /// **Note**: This method will clear the current state of the iterator. - pub(crate) fn seek_lower_bound(&mut self, lower: Bound<&QR>) -> Option> + pub(crate) fn seek_lower_bound( + &mut self, + lower: Bound<&QR>, + ) -> Option> where QR: ?Sized + Comparable>, { @@ -136,16 +146,17 @@ where } } -impl<'a, K, V, A, Q, R> Iterator for Iter<'a, K, V, A, Q, R> +impl<'a, K, V, A, RC, Q, R> Iterator for Iter<'a, K, V, A, RC, Q, R> where K: ?Sized + Type, K::Ref<'a>: KeyRef<'a, K>, V: ?Sized + Type, A: Allocator, + RC: RefCounter, Q: ?Sized + Comparable>, R: RangeBounds, { - type Item = EntryRef<'a, K, V, A>; + type Item = EntryRef<'a, K, V, A, RC>; #[inline] fn next(&mut self) -> Option { @@ -161,12 +172,13 @@ where } } -impl<'a, K, V, A, Q, R> DoubleEndedIterator for Iter<'a, K, V, A, Q, R> +impl<'a, K, V, A, RC, Q, R> DoubleEndedIterator for Iter<'a, K, V, A, RC, Q, R> where K: ?Sized + Type, K::Ref<'a>: KeyRef<'a, K>, V: ?Sized + Type, A: Allocator, + RC: RefCounter, Q: ?Sized + Comparable>, R: RangeBounds, { diff --git a/src/traits/multiple_version.rs b/src/generic/multiple_version.rs similarity index 67% rename from src/traits/multiple_version.rs rename to src/generic/multiple_version.rs index 0921491..7972e19 100644 --- a/src/traits/multiple_version.rs +++ b/src/generic/multiple_version.rs @@ -1,27 +1,219 @@ -use core::ops::{Bound, RangeBounds}; +use core::{ + ops::{Bound, RangeBounds}, + sync::atomic::Ordering, +}; +use among::Among; use dbutils::{ + buffer::VacantBuffer, equivalent::Comparable, types::{KeyRef, MaybeStructured, Type}, }; +use either::Either; -use super::{AllocatorSealed, Arena, EntryRef, Iter, VersionedEntryRef}; -use crate::{allocator::WithVersion, iter::IterAll, Version}; +use crate::{ + allocator::{Allocator, Sealed, WithVersion}, + error::Error, + ref_counter::RefCounter, + Arena, Header, Height, KeyBuilder, ValueBuilder, Version, +}; -use super::*; +use super::list::{ + iterator::{Iter, IterAll}, + EntryRef, VersionedEntryRef, +}; -/// [`Map`] implementation for concurrent environment. -pub mod sync { - pub use crate::sync::multiple_version::{ - Entry, Iter, IterAll, Range, RangeAll, SkipMap, VersionedEntry, - }; +/// Implementations for single-threaded environments. +pub mod unsync { + pub use crate::unsync::{multiple_version::Allocator, RefCounter}; + + #[cfg(any(all(test, not(miri)), all_skl_tests, test_generic_unsync_versioned,))] + mod tests { + crate::__generic_multiple_version_map_tests!("unsync_multiple_version_map": super::SkipMap<[u8], [u8]>); + } + + type SkipList = super::super::list::SkipList; + + /// Iterator over the [`SkipMap`]. + pub type Iter<'a, K, V> = super::super::iter::Iter<'a, K, V, Allocator, RefCounter>; + + /// Iterator over a subset of the [`SkipMap`]. + pub type Range<'a, K, V, Q, R> = super::super::iter::Iter<'a, K, V, Allocator, RefCounter, Q, R>; + + /// The entry reference of the [`SkipMap`]. + pub type Entry<'a, K, V> = super::super::entry::EntryRef<'a, K, V, Allocator, RefCounter>; + + /// The versioned entry reference of the [`SkipMap`]. + pub type VersionedEntry<'a, K, V> = + super::super::entry::VersionedEntryRef<'a, K, V, Allocator, RefCounter>; + + /// Iterator over the [`SkipMap`]. + pub type IterAll<'a, K, V> = super::super::iter::IterAll<'a, K, V, Allocator, RefCounter>; + + /// Iterator over a subset of the [`SkipMap`]. + pub type RangeAll<'a, K, V, Q, R> = + super::super::iter::IterAll<'a, K, V, Allocator, RefCounter, Q, R>; + + /// A fast, ARENA based `SkipMap` that supports multiple versions, forward and backward iteration. + /// + /// If you want to use in concurrent environment, you can use [`multiple_version::sync::SkipMap`](crate::generic::multiple_version::sync::SkipMap). + #[repr(transparent)] + pub struct SkipMap(SkipList); + + impl Clone for SkipMap { + #[inline] + fn clone(&self) -> Self { + Self(self.0.clone()) + } + } + + impl From> for SkipMap { + #[inline] + fn from(list: SkipList) -> Self { + Self(list) + } + } + + impl crate::traits::List for SkipMap { + type Constructable = SkipList; + + #[inline] + fn as_ref(&self) -> &Self::Constructable { + &self.0 + } + + #[inline] + fn as_mut(&mut self) -> &mut Self::Constructable { + &mut self.0 + } + + #[inline] + fn meta( + &self, + ) -> &<::Allocator as super::Sealed>::Meta + { + self.0.meta() + } + } + + impl super::Map for SkipMap + where + K: ?Sized + 'static, + V: ?Sized + 'static, + { + type Allocator = Allocator; + type RefCounter = RefCounter; + } } -/// [`Map`] implementation for non-concurrent environment. -pub mod unsync { - pub use crate::unsync::multiple_version::{ - Entry, Iter, IterAll, Range, RangeAll, SkipMap, VersionedEntry, - }; +/// Implementations for concurrent environments. +pub mod sync { + pub use crate::sync::{multiple_version::Allocator, RefCounter}; + + #[cfg(any(all(test, not(miri)), all_skl_tests, test_generic_sync_versioned,))] + mod tests { + crate::__generic_multiple_version_map_tests!("sync_multiple_version_map": super::SkipMap<[u8], [u8]>); + } + + #[cfg(any( + all(test, not(miri)), + all_skl_tests, + test_generic_sync_multiple_version_concurrent, + ))] + mod concurrent_tests { + crate::__generic_multiple_version_map_tests!(go "sync_multiple_version_map": super::SkipMap<[u8], [u8]> => crate::tests::generic::TEST_OPTIONS); + } + + #[cfg(any( + all(test, not(miri)), + all_skl_tests, + test_generic_sync_multiple_version_concurrent_with_optimistic_freelist, + ))] + mod concurrent_tests_with_optimistic_freelist { + crate::__generic_multiple_version_map_tests!(go "sync_multiple_version_map": super::SkipMap<[u8], [u8]> => crate::tests::generic::TEST_OPTIONS_WITH_OPTIMISTIC_FREELIST); + } + + #[cfg(any( + all(test, not(miri)), + all_skl_tests, + test_generic_sync_multiple_version_concurrent_with_pessimistic_freelist, + ))] + mod concurrent_tests_with_pessimistic_freelist { + crate::__generic_multiple_version_map_tests!(go "sync_multiple_version_map": super::SkipMap<[u8], [u8]> => crate::tests::generic::TEST_OPTIONS_WITH_PESSIMISTIC_FREELIST); + } + + type SkipList = super::super::list::SkipList; + + /// Iterator over the [`SkipMap`]. + pub type Iter<'a, K, V> = super::super::iter::Iter<'a, K, V, Allocator, RefCounter>; + + /// Iterator over a subset of the [`SkipMap`]. + pub type Range<'a, K, V, Q, R> = super::super::iter::Iter<'a, K, V, Allocator, RefCounter, Q, R>; + + /// Iterator over the [`SkipMap`]. + pub type IterAll<'a, K, V> = super::super::iter::IterAll<'a, K, V, Allocator, RefCounter>; + + /// Iterator over a subset of the [`SkipMap`]. + pub type RangeAll<'a, K, V, Q, R> = + super::super::iter::IterAll<'a, K, V, Allocator, RefCounter, Q, R>; + + /// The entry reference of the [`SkipMap`]. + pub type Entry<'a, K, V> = super::super::entry::EntryRef<'a, K, V, Allocator, RefCounter>; + + /// The versioned entry reference of the [`SkipMap`]. + pub type VersionedEntry<'a, K, V> = + super::super::entry::VersionedEntryRef<'a, K, V, Allocator, RefCounter>; + + /// A fast, lock-free, thread-safe ARENA based `SkipMap` that supports multiple versions, forward and backward iteration. + /// + /// If you want to use in non-concurrent environment, you can use [`multiple_version::unsync::SkipMap`](crate::generic::multiple_version::unsync::SkipMap). + #[repr(transparent)] + pub struct SkipMap(SkipList); + + impl Clone for SkipMap { + #[inline] + fn clone(&self) -> Self { + Self(self.0.clone()) + } + } + + impl From> for SkipMap { + #[inline] + fn from(list: SkipList) -> Self { + Self(list) + } + } + + impl crate::traits::List for SkipMap { + type Constructable = SkipList; + + #[inline] + fn as_ref(&self) -> &Self::Constructable { + &self.0 + } + + #[inline] + fn as_mut(&mut self) -> &mut Self::Constructable { + &mut self.0 + } + + #[inline] + fn meta( + &self, + ) -> &<::Allocator as super::Sealed>::Meta + { + self.0.meta() + } + } + + impl super::Map for SkipMap + where + K: ?Sized + 'static, + V: ?Sized + 'static, + { + type Allocator = Allocator; + type RefCounter = RefCounter; + } } /// A fast, ARENA based `SkipMap` that supports multiple versions, forward and backward iteration. @@ -32,9 +224,75 @@ pub trait Map where K: ?Sized + 'static, V: ?Sized + 'static, - Self: Arena, - ::Node: WithVersion, + Self: Arena>, + ::Node: WithVersion, { + /// The allocator type used to allocate nodes in the map. + type Allocator: Allocator; + /// The reference counter type used in the map. + type RefCounter: RefCounter; + + /// Try creates from a `SkipMap` from an allocator directly. + /// + /// This method is not the ideal constructor, it is recommended to use [`Builder`](super::Builder) to create a `SkipMap`, + /// if you are not attempting to create multiple `SkipMap`s on the same allocator. + /// + /// Besides, the only way to reconstruct `SkipMap`s created by this method is to use the [`open_from_allocator(header: Header, arena: Self::Allocator, cmp: Self::Comparator)`](Map::open_from_allocator) method, + /// users must save the header to reconstruct the `SkipMap` by their own. + /// The header can be obtained by calling [`header`](Map::header) method. + #[inline] + fn create_from_allocator(arena: Self::Allocator) -> Result { + Self::try_create_from_allocator(arena, ()) + } + + /// Try open a `SkipMap` from an allocator directly. + /// + /// See documentation for [`create_from_allocator`](Map::create_from_allocator) for more information. + /// + /// ## Safety + /// - The `header` must be the same as the one obtained from `SkipMap` when it was created. + /// - The `K` and `V` types must be the same as the types used to create the map. + #[inline] + unsafe fn open_from_allocator(header: Header, arena: Self::Allocator) -> Result { + Self::try_open_from_allocator(arena, (), header) + } + + /// Returns the header of the `SkipMap`, which can be used to reconstruct the `SkipMap`. + /// + /// By default, `SkipMap` will allocate meta, head node, and tail node in the ARENA, + /// and the data section will be allocated after the tail node. + /// + /// This method will return the header in the ARENA. + #[inline] + fn header(&self) -> Option<&Header> { + self.as_ref().header() + } + + /// Returns the references for the `SkipMap`. + #[inline] + fn refs(&self) -> usize { + self.as_ref().refs() + } + + /// Returns the height of the highest tower within any of the nodes that + /// have ever been allocated as part of this skiplist. + #[inline] + fn height(&self) -> u8 { + self.as_ref().height() + } + + /// Returns the number of entries in the skipmap. + #[inline] + fn len(&self) -> usize { + self.as_ref().len() + } + + /// Returns true if the skipmap is empty. + #[inline] + fn is_empty(&self) -> bool { + self.len() == 0 + } + /// Returns the maximum version of all entries in the map. #[inline] fn maximum_version(&self) -> Version { @@ -47,6 +305,25 @@ where self.as_ref().minimum_version() } + /// Returns a random generated height. + /// + /// This method is useful when you want to check if the underlying allocator can allocate a node. + /// + /// ## Example + /// + /// ```rust + /// use skl::{generic::{multiple_version::{sync::SkipMap, Map}, Builder}, Arena}; + /// + /// let map = Builder::new().with_capacity(1024).alloc::>().unwrap(); + /// let height = map.random_height(); + /// + /// let needed = SkipMap::<[u8], [u8]>::estimated_node_size(height, b"k1".len(), b"k2".len()); + /// ``` + #[inline] + fn random_height(&self) -> Height { + self.as_ref().random_height() + } + /// Returns `true` if the map may contains an entry whose version is less than or equal to the given version. #[inline] fn may_contain_version(&self, v: Version) -> bool { @@ -61,9 +338,9 @@ where /// ## Example /// /// ```rust - /// use skl::{multiple_version::{sync::SkipMap, Map}, Options}; + /// use skl::generic::{multiple_version::{sync::SkipMap, Map}, Builder}; /// - /// let map = Options::new().with_capacity(1024).alloc::>().unwrap(); + /// let map = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// map.insert(0, "hello", "world").unwrap(); /// @@ -92,9 +369,9 @@ where /// ## Example /// /// ```rust - /// use skl::{multiple_version::{sync::SkipMap, Map}, Options}; + /// use skl::generic::{multiple_version::{sync::SkipMap, Map}, Builder}; /// - /// let map = Options::new().with_capacity(1024).alloc::>().unwrap(); + /// let map = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// map.insert(0, "hello", "world").unwrap(); /// @@ -120,7 +397,10 @@ where /// Returns the first entry in the map. #[inline] - fn first<'a>(&'a self, version: Version) -> Option> + fn first<'a>( + &'a self, + version: Version, + ) -> Option> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -135,7 +415,10 @@ where /// Returns the last entry in the map. #[inline] - fn last<'a>(&'a self, version: Version) -> Option> + fn last<'a>( + &'a self, + version: Version, + ) -> Option> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -156,7 +439,7 @@ where fn first_versioned<'a>( &'a self, version: Version, - ) -> Option> + ) -> Option> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -177,7 +460,7 @@ where fn last_versioned<'a>( &'a self, version: Version, - ) -> Option> + ) -> Option> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -198,9 +481,9 @@ where /// ## Example /// /// ```rust - /// use skl::{multiple_version::{sync::SkipMap, Map}, Options}; + /// use skl::generic::{multiple_version::{sync::SkipMap, Map}, Builder}; /// - /// let map = Options::new().with_capacity(1024).alloc::>().unwrap(); + /// let map = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// map.insert(0, "hello", "world").unwrap(); /// @@ -212,7 +495,11 @@ where /// assert!(map.get(1, "hello").is_none()); /// ``` #[inline] - fn get<'a, Q>(&'a self, version: Version, key: &Q) -> Option> + fn get<'a, Q>( + &'a self, + version: Version, + key: &Q, + ) -> Option> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -233,9 +520,9 @@ where /// ## Example /// /// ```rust - /// use skl::{multiple_version::{sync::SkipMap, Map}, Options}; + /// use skl::generic::{multiple_version::{sync::SkipMap, Map}, Builder}; /// - /// let map = Options::new().with_capacity(1024).alloc::>().unwrap(); + /// let map = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// map.insert(0, "hello", "world").unwrap(); /// @@ -252,7 +539,7 @@ where &'a self, version: Version, key: &Q, - ) -> Option> + ) -> Option> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -273,7 +560,7 @@ where &'a self, version: Version, upper: Bound<&Q>, - ) -> Option> + ) -> Option> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -294,7 +581,7 @@ where &'a self, version: Version, lower: Bound<&Q>, - ) -> Option> + ) -> Option> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -317,7 +604,7 @@ where &'a self, version: Version, upper: Bound<&Q>, - ) -> Option> + ) -> Option> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -343,7 +630,7 @@ where &'a self, version: Version, lower: Bound<&Q>, - ) -> Option> + ) -> Option> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -362,7 +649,7 @@ where /// Returns a new iterator, this iterator will yield the latest version of all entries in the map less or equal to the given version. #[inline] - fn iter<'a>(&'a self, version: Version) -> Iter<'a, K, V, Self::Allocator> + fn iter<'a>(&'a self, version: Version) -> Iter<'a, K, V, Self::Allocator, Self::RefCounter> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -373,7 +660,10 @@ where /// Returns a new iterator, this iterator will yield all versions for all entries in the map less or equal to the given version. #[inline] - fn iter_all_versions<'a>(&'a self, version: Version) -> IterAll<'a, K, V, Self::Allocator> + fn iter_all_versions<'a>( + &'a self, + version: Version, + ) -> IterAll<'a, K, V, Self::Allocator, Self::RefCounter> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -384,7 +674,11 @@ where /// Returns a iterator that within the range, this iterator will yield the latest version of all entries in the range less or equal to the given version. #[inline] - fn range<'a, Q, R>(&'a self, version: Version, range: R) -> Iter<'a, K, V, Self::Allocator, Q, R> + fn range<'a, Q, R>( + &'a self, + version: Version, + range: R, + ) -> Iter<'a, K, V, Self::Allocator, Self::RefCounter, Q, R> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -401,7 +695,7 @@ where &'a self, version: Version, range: R, - ) -> IterAll<'a, K, V, Self::Allocator, Q, R> + ) -> IterAll<'a, K, V, Self::Allocator, Self::RefCounter, Q, R> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -423,7 +717,10 @@ where version: Version, key: impl Into>, value: impl Into>, - ) -> Result>, Among> + ) -> Result< + Option>, + Among, + > where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -441,9 +738,9 @@ where /// ## Example /// /// ```rust - /// use skl::{multiple_version::{sync::SkipMap, Map}, Options, Arena}; + /// use skl::{generic::{multiple_version::{sync::SkipMap, Map}, Builder}, Arena}; /// - /// let map = Options::new().with_capacity(1024).alloc::>().unwrap(); + /// let map = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let height = map.random_height(); /// map.insert_at_height(0, height, "hello", "world").unwrap(); @@ -455,7 +752,10 @@ where height: Height, key: impl Into>, value: impl Into>, - ) -> Result>, Among> + ) -> Result< + Option>, + Among, + > where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -479,7 +779,7 @@ where /// ## Example /// /// ```rust - /// use skl::{multiple_version::{sync::SkipMap, Map}, ValueBuilder, Options}; + /// use skl::{generic::{multiple_version::{sync::SkipMap, Map}, Builder}, ValueBuilder}; /// /// struct Person { /// id: u32, @@ -500,12 +800,12 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { /// val.put_u32_le(alice.id).unwrap(); /// val.put_slice(alice.name.as_bytes()).unwrap(); - /// Ok(()) + /// Ok(encoded_size) /// }); /// /// l.insert_with_value_builder::(1, b"alice".as_slice(), vb) @@ -516,8 +816,11 @@ where &'a self, version: Version, key: impl Into>, - value_builder: ValueBuilder) -> Result<(), E>>, - ) -> Result>, Among> + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Among, + > where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -546,7 +849,7 @@ where /// ## Example /// /// ```rust - /// use skl::{multiple_version::{sync::SkipMap, Map}, ValueBuilder, Options, Arena}; + /// use skl::{generic::{multiple_version::{sync::SkipMap, Map}, Builder}, ValueBuilder, Arena}; /// /// struct Person { /// id: u32, @@ -567,12 +870,12 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { /// val.put_u32_le(alice.id).unwrap(); /// val.put_slice(alice.name.as_bytes()).unwrap(); - /// Ok(()) + /// Ok(encoded_size) /// }); /// /// let height = l.random_height(); @@ -585,8 +888,11 @@ where version: Version, height: Height, key: impl Into>, - value_builder: ValueBuilder) -> Result<(), E>>, - ) -> Result>, Among> + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Among, + > where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -609,7 +915,10 @@ where version: Version, key: impl Into>, value: impl Into>, - ) -> Result>, Among> + ) -> Result< + Option>, + Among, + > where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -633,7 +942,10 @@ where height: Height, key: impl Into>, value: impl Into>, - ) -> Result>, Among> + ) -> Result< + Option>, + Among, + > where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -660,7 +972,7 @@ where /// ## Example /// /// ```rust - /// use skl::{multiple_version::{sync::SkipMap, Map}, ValueBuilder, Options}; + /// use skl::{generic::{multiple_version::{sync::SkipMap, Map}, Builder}, ValueBuilder}; /// /// struct Person { /// id: u32, @@ -681,12 +993,12 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { /// val.put_u32_le(alice.id).unwrap(); /// val.put_slice(alice.name.as_bytes()).unwrap(); - /// Ok(()) + /// Ok(encoded_size) /// }); /// l.get_or_insert_with_value_builder::(1, b"alice".as_slice(), vb) /// .unwrap(); @@ -696,8 +1008,11 @@ where &'a self, version: Version, key: impl Into>, - value_builder: ValueBuilder) -> Result<(), E>>, - ) -> Result>, Among> + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Among, + > where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -727,7 +1042,7 @@ where /// ## Example /// /// ```rust - /// use skl::{multiple_version::{sync::SkipMap, Map}, ValueBuilder, Options, Arena}; + /// use skl::{generic::{multiple_version::{sync::SkipMap, Map}, Builder}, ValueBuilder, Arena}; /// /// struct Person { /// id: u32, @@ -748,12 +1063,12 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { /// val.put_u32_le(alice.id).unwrap(); /// val.put_slice(alice.name.as_bytes()).unwrap(); - /// Ok(()) + /// Ok(encoded_size) /// }); /// /// let height = l.random_height(); @@ -766,8 +1081,11 @@ where version: Version, height: Height, key: impl Into>, - value_builder: ValueBuilder) -> Result<(), E>>, - ) -> Result>, Among> + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Among, + > where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -793,7 +1111,7 @@ where /// ## Example /// /// ```rust - /// use skl::{multiple_version::{sync::SkipMap, Map}, KeyBuilder, ValueBuilder, Options}; + /// use skl::{generic::{multiple_version::{sync::SkipMap, Map}, Builder}, KeyBuilder, ValueBuilder}; /// /// struct Person { /// id: u32, @@ -814,17 +1132,17 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { /// key.put_slice(b"alice").unwrap(); - /// Ok(()) + /// Ok(5) /// }); /// /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { /// val.put_u32_le(alice.id).unwrap(); /// val.put_slice(alice.name.as_bytes()).unwrap(); - /// Ok(()) + /// Ok(encoded_size) /// }); /// /// l.insert_with_builders::<(), ()>(1, kb, vb) @@ -834,9 +1152,9 @@ where fn insert_with_builders<'a, KE, VE>( &'a self, version: Version, - key_builder: KeyBuilder) -> Result<(), KE>>, - value_builder: ValueBuilder) -> Result<(), VE>>, - ) -> Result>, Among> + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result>, Among> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -866,7 +1184,7 @@ where /// ## Example /// /// ```rust - /// use skl::{multiple_version::{sync::SkipMap, Map}, KeyBuilder, ValueBuilder, Options, Arena}; + /// use skl::{generic::{multiple_version::{sync::SkipMap, Map}, Builder}, KeyBuilder, ValueBuilder, Arena}; /// /// struct Person { /// id: u32, @@ -887,17 +1205,17 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { /// key.put_slice(b"alice").unwrap(); - /// Ok(()) + /// Ok(5) /// }); /// /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { /// val.put_u32_le(alice.id).unwrap(); /// val.put_slice(alice.name.as_bytes()).unwrap(); - /// Ok(()) + /// Ok(encoded_size) /// }); /// /// let height = l.random_height(); @@ -909,9 +1227,9 @@ where &'a self, version: Version, height: Height, - key_builder: KeyBuilder) -> Result<(), KE>>, - value_builder: ValueBuilder) -> Result<(), VE>>, - ) -> Result>, Among> + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result>, Among> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -935,7 +1253,7 @@ where /// ## Example /// /// ```rust - /// use skl::{multiple_version::{sync::SkipMap, Map}, KeyBuilder, ValueBuilder, Options}; + /// use skl::{generic::{multiple_version::{sync::SkipMap, Map}, Builder}, KeyBuilder, ValueBuilder}; /// /// struct Person { /// id: u32, @@ -956,17 +1274,17 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { /// key.put_slice(b"alice").unwrap(); - /// Ok(()) + /// Ok(5) /// }); /// /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { /// val.put_u32_le(alice.id).unwrap(); /// val.put_slice(alice.name.as_bytes()).unwrap(); - /// Ok(()) + /// Ok(encoded_size) /// }); /// /// l.get_or_insert_with_builders::<(), ()>(1, kb, vb) @@ -976,9 +1294,9 @@ where fn get_or_insert_with_builders<'a, KE, VE>( &'a self, version: Version, - key_builder: KeyBuilder) -> Result<(), KE>>, - value_builder: ValueBuilder) -> Result<(), VE>>, - ) -> Result>, Among> + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result>, Among> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -1005,7 +1323,7 @@ where /// ## Example /// /// ```rust - /// use skl::{multiple_version::{sync::SkipMap, Map}, KeyBuilder, ValueBuilder, Options, Arena}; + /// use skl::{generic::{multiple_version::{sync::SkipMap, Map}, Builder}, KeyBuilder, ValueBuilder, Arena}; /// /// struct Person { /// id: u32, @@ -1026,17 +1344,17 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { /// key.put_slice(b"alice").unwrap(); - /// Ok(()) + /// Ok(5) /// }); /// /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { /// val.put_u32_le(alice.id).unwrap(); /// val.put_slice(alice.name.as_bytes()).unwrap(); - /// Ok(()) + /// Ok(encoded_size) /// }); /// /// let height = l.random_height(); @@ -1048,9 +1366,9 @@ where &'a self, version: Version, height: Height, - key_builder: KeyBuilder) -> Result<(), KE>>, - value_builder: ValueBuilder) -> Result<(), VE>>, - ) -> Result>, Among> + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result>, Among> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -1076,7 +1394,7 @@ where key: impl Into>, success: Ordering, failure: Ordering, - ) -> Result>, Either> + ) -> Result>, Either> where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -1101,7 +1419,7 @@ where key: impl Into>, success: Ordering, failure: Ordering, - ) -> Result>, Either> + ) -> Result>, Either> where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -1123,7 +1441,7 @@ where &'a self, version: Version, key: impl Into>, - ) -> Result>, Either> + ) -> Result>, Either> where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -1142,9 +1460,9 @@ where /// ## Example /// /// ```rust - /// use skl::{multiple_version::{sync::SkipMap, Map}, Options, Arena}; + /// use skl::{generic::{multiple_version::{sync::SkipMap, Map}, Builder}, Arena}; /// - /// let map = Options::new().with_capacity(1024).alloc::>().unwrap(); + /// let map = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// map.insert(0, "hello", "world").unwrap(); /// @@ -1157,7 +1475,7 @@ where version: Version, height: Height, key: impl Into>, - ) -> Result>, Either> + ) -> Result>, Either> where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -1180,7 +1498,7 @@ where /// ## Example /// /// ```rust - /// use skl::{multiple_version::{sync::SkipMap, Map}, KeyBuilder, Options}; + /// use skl::{generic::{multiple_version::{sync::SkipMap, Map}, Builder}, KeyBuilder}; /// /// struct Person { /// id: u32, @@ -1201,11 +1519,11 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { /// key.put_slice(b"alice").unwrap(); - /// Ok(()) + /// Ok(5) /// }); /// l.get_or_remove_with_builder::(1, kb) /// .unwrap(); @@ -1214,8 +1532,8 @@ where fn get_or_remove_with_builder<'a, 'b: 'a, E>( &'a self, version: Version, - key_builder: KeyBuilder) -> Result<(), E>>, - ) -> Result>, Either> + key_builder: KeyBuilder) -> Result>, + ) -> Result>, Either> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -1240,7 +1558,7 @@ where /// ## Example /// /// ```rust - /// use skl::{multiple_version::{sync::SkipMap, Map}, KeyBuilder, Options, Arena}; + /// use skl::{generic::{multiple_version::{sync::SkipMap, Map}, Builder}, KeyBuilder, Arena}; /// /// struct Person { /// id: u32, @@ -1261,11 +1579,11 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { /// key.put_slice(b"alice").unwrap(); - /// Ok(()) + /// Ok(5) /// }); /// let height = l.random_height(); /// l.get_or_remove_at_height_with_builder::(1, height, kb) @@ -1276,8 +1594,8 @@ where &'a self, version: Version, height: Height, - key_builder: KeyBuilder) -> Result<(), E>>, - ) -> Result>, Either> + key_builder: KeyBuilder) -> Result>, + ) -> Result>, Either> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -1288,12 +1606,3 @@ where .get_or_remove_at_height_with_builder(version, height, key_builder) } } - -impl Map for T -where - K: ?Sized + 'static, - V: ?Sized + 'static, - T: Arena, - ::Node: WithVersion, -{ -} diff --git a/src/traits/map.rs b/src/generic/unique.rs similarity index 66% rename from src/traits/map.rs rename to src/generic/unique.rs index cf59485..aa178b5 100644 --- a/src/traits/map.rs +++ b/src/generic/unique.rs @@ -1,20 +1,190 @@ +use core::{ + ops::{Bound, RangeBounds}, + sync::atomic::Ordering, +}; + +use among::Among; use dbutils::{ + buffer::VacantBuffer, equivalent::Comparable, types::{KeyRef, MaybeStructured, Type}, }; +use either::Either; -use crate::allocator::WithoutVersion; +use crate::{ + allocator::{Allocator, Sealed, WithoutVersion}, + error::Error, + ref_counter::RefCounter, + Arena, Header, Height, KeyBuilder, ValueBuilder, MIN_VERSION, +}; -use super::*; +use super::list::{iterator::Iter, EntryRef}; -/// [`Map`] implementation for concurrent environment. -pub mod sync { - pub use crate::sync::map::{Entry, Iter, IterAll, Range, RangeAll, SkipMap}; +/// Implementations for single-threaded environments. +pub mod unsync { + pub use crate::unsync::{map::Allocator, RefCounter}; + + #[cfg(any(all(test, not(miri)), all_skl_tests, test_generic_unsync_map,))] + mod tests { + crate::__generic_map_tests!("unsync_map": super::SkipMap<[u8], [u8]>); + } + + type SkipList = super::super::list::SkipList; + + /// Iterator over the [`SkipMap`]. + pub type Iter<'a, K, V> = super::super::iter::Iter<'a, K, V, Allocator, RefCounter>; + + /// Iterator over a subset of the [`SkipMap`]. + pub type Range<'a, K, V, Q, R> = super::super::iter::Iter<'a, K, V, Allocator, RefCounter, Q, R>; + + /// The entry reference of the [`SkipMap`]. + pub type Entry<'a, K, V> = super::super::entry::EntryRef<'a, K, V, Allocator, RefCounter>; + + /// A fast, ARENA based `SkipMap` that supports forward and backward iteration. + /// + /// If you want to use in concurrent environment, you can use [`unique::sync::SkipMap`](crate::generic::unique::sync::SkipMap). + #[repr(transparent)] + pub struct SkipMap(SkipList); + + impl Clone for SkipMap { + #[inline] + fn clone(&self) -> Self { + Self(self.0.clone()) + } + } + + impl From> for SkipMap { + #[inline] + fn from(list: SkipList) -> Self { + Self(list) + } + } + + impl crate::traits::List for SkipMap { + type Constructable = SkipList; + + #[inline] + fn as_ref(&self) -> &Self::Constructable { + &self.0 + } + + #[inline] + fn as_mut(&mut self) -> &mut Self::Constructable { + &mut self.0 + } + + #[inline] + fn meta( + &self, + ) -> &<::Allocator as super::Sealed>::Meta + { + self.0.meta() + } + } + + impl super::Map for SkipMap + where + K: ?Sized + 'static, + V: ?Sized + 'static, + { + type Allocator = Allocator; + type RefCounter = RefCounter; + } } -/// [`Map`] implementation for non-concurrent environment. -pub mod unsync { - pub use crate::unsync::map::{Entry, Iter, IterAll, Range, RangeAll, SkipMap}; +/// Implementations for concurrent environments. +pub mod sync { + pub use crate::sync::{map::Allocator, RefCounter}; + + #[cfg(any(all(test, not(miri)), all_skl_tests, test_generic_sync_map,))] + mod tests { + crate::__generic_map_tests!("sync_map": super::SkipMap<[u8], [u8]>); + } + + #[cfg(any(all(test, not(miri)), all_skl_tests, test_generic_sync_map_concurrent,))] + mod concurrent_tests { + crate::__generic_map_tests!(go "sync_map": super::SkipMap<[u8], [u8]> => crate::tests::generic::TEST_OPTIONS); + } + + #[cfg(any( + all(test, not(miri)), + all_skl_tests, + test_generic_sync_map_concurrent_with_optimistic_freelist, + ))] + mod concurrent_tests_with_optimistic_freelist { + crate::__generic_map_tests!(go "sync_map": super::SkipMap<[u8], [u8]> => crate::tests::generic::TEST_OPTIONS_WITH_OPTIMISTIC_FREELIST); + } + + #[cfg(any( + all(test, not(miri)), + all_skl_tests, + test_generic_sync_map_concurrent_with_pessimistic_freelist, + ))] + mod concurrent_tests_with_pessimistic_freelist { + crate::__generic_map_tests!(go "sync_map": super::SkipMap<[u8], [u8]> => crate::tests::generic::TEST_OPTIONS_WITH_PESSIMISTIC_FREELIST); + } + + type SkipList = super::super::list::SkipList; + + /// Iterator over the [`SkipMap`]. + pub type Iter<'a, K, V> = super::super::iter::Iter<'a, K, V, Allocator, RefCounter>; + + /// Iterator over a subset of the [`SkipMap`]. + pub type Range<'a, K, V, Q, R> = super::super::iter::Iter<'a, K, V, Allocator, RefCounter, Q, R>; + + /// The entry reference of the [`SkipMap`]. + pub type Entry<'a, K, V> = super::super::entry::EntryRef<'a, K, V, Allocator, RefCounter>; + + /// A fast, lock-free, thread-safe ARENA based `SkipMap` that supports forward and backward iteration. + /// + /// If you want to use in non-concurrent environment, you can use [`unique::unsync::SkipMap`](crate::generic::unique::unsync::SkipMap). + #[repr(transparent)] + pub struct SkipMap(SkipList); + + impl Clone for SkipMap { + #[inline] + fn clone(&self) -> Self { + Self(self.0.clone()) + } + } + + impl From> for SkipMap { + #[inline] + fn from(list: SkipList) -> Self { + Self(list) + } + } + + impl crate::traits::List for SkipMap { + type Constructable = SkipList; + + #[inline] + fn as_ref(&self) -> &Self::Constructable { + &self.0 + } + + #[inline] + fn as_mut(&mut self) -> &mut Self::Constructable { + &mut self.0 + } + + #[inline] + fn meta( + &self, + ) -> &<::Allocator as super::Sealed>::Meta + { + self.0.meta() + } + } + + impl super::Map for SkipMap + where + K: ?Sized + 'static, + V: ?Sized + 'static, + { + type Allocator = Allocator; + type RefCounter = RefCounter; + } } /// A fast, ARENA based `SkipMap` that supports forward and backward iteration. @@ -25,17 +195,96 @@ pub trait Map where K: ?Sized + 'static, V: ?Sized + 'static, - Self: Arena, - ::Node: WithoutVersion, + Self: Arena>, + ::Node: WithoutVersion, { + /// The allocator used to allocate nodes in the `SkipMap`. + type Allocator: Allocator; + /// The reference counter of the `SkipMap`. + type RefCounter: RefCounter; + + /// Try creates from a `SkipMap` from an allocator directly. + /// + /// This method is not the ideal constructor, it is recommended to use [`Builder`](super::Builder) to create a `SkipMap`, + /// if you are not attempting to create multiple `SkipMap`s on the same allocator. + /// + /// Besides, the only way to reconstruct `SkipMap`s created by this method is to use the [`open_from_allocator(header: Header, arena: Self::Allocator, cmp: Self::Comparator)`](Map::open_from_allocator) method, + /// users must save the header to reconstruct the `SkipMap` by their own. + /// The header can be obtained by calling [`header`](Map::header) method. + #[inline] + fn create_from_allocator(arena: Self::Allocator) -> Result { + Self::try_create_from_allocator(arena, ()) + } + + /// Try open a `SkipMap` from an allocator directly. + /// + /// See documentation for [`create_from_allocator`](Map::create_from_allocator) for more information. + /// + /// ## Safety + /// - The `header` must be the same as the one obtained from `SkipMap` when it was created. + /// - The `K` and `V` types must be the same as the types used to create the map. + #[inline] + unsafe fn open_from_allocator(header: Header, arena: Self::Allocator) -> Result { + Self::try_open_from_allocator(arena, (), header) + } + + /// Returns the header of the `SkipMap`, which can be used to reconstruct the `SkipMap`. + /// + /// By default, `SkipMap` will allocate meta, head node, and tail node in the ARENA, + /// and the data section will be allocated after the tail node. + /// + /// This method will return the header in the ARENA. + #[inline] + fn header(&self) -> Option<&Header> { + self.as_ref().header() + } + + /// Returns the height of the highest tower within any of the nodes that + /// have ever been allocated as part of this skiplist. + #[inline] + fn height(&self) -> u8 { + self.as_ref().height() + } + + /// Returns the number of entries in the skipmap. + #[inline] + fn len(&self) -> usize { + self.as_ref().len() + } + + /// Returns true if the skipmap is empty. + #[inline] + fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Returns a random generated height. + /// + /// This method is useful when you want to check if the underlying allocator can allocate a node. + /// + /// ## Example + /// + /// ```rust + /// use skl::{generic::{unique::{sync::SkipMap, Map}, Builder}, Arena}; + /// + /// let map = Builder::new().with_capacity(1024).alloc::>().unwrap(); + /// let height = map.random_height(); + /// + /// let needed = SkipMap::<[u8], [u8]>::estimated_node_size(height, b"k1".len(), b"k2".len()); + /// ``` + #[inline] + fn random_height(&self) -> Height { + self.as_ref().random_height() + } + /// Returns `true` if the key exists in the map. /// /// ## Example /// /// ```rust - /// use skl::{map::{unsync::SkipMap, Map}, Options}; + /// use skl::generic::{unique::{unsync::SkipMap, Map}, Builder}; /// - /// let map = Options::new().with_capacity(1024).alloc::<_, _, SkipMap::>().unwrap(); + /// let map = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// map.insert("hello", "world").unwrap(); /// @@ -55,7 +304,7 @@ where /// Returns the first entry in the map. #[inline] - fn first<'a>(&'a self) -> Option> + fn first<'a>(&'a self) -> Option> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -66,7 +315,7 @@ where /// Returns the last entry in the map. #[inline] - fn last<'a>(&'a self) -> Option> + fn last<'a>(&'a self) -> Option> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -80,9 +329,9 @@ where /// ## Example /// /// ```rust - /// use skl::{map::{sync::SkipMap, Map}, Options}; + /// use skl::generic::{unique::{sync::SkipMap, Map}, Builder}; /// - /// let map = Options::new().with_capacity(1024).alloc::<_, _, SkipMap>().unwrap(); + /// let map = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// map.insert("hello", "world").unwrap(); /// @@ -94,7 +343,7 @@ where /// assert!(map.get("hello").is_none()); /// ``` #[inline] - fn get<'a, Q>(&'a self, key: &Q) -> Option> + fn get<'a, Q>(&'a self, key: &Q) -> Option> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -107,7 +356,10 @@ where /// Returns an `EntryRef` pointing to the highest element whose key is below the given bound. /// If no such element is found then `None` is returned. #[inline] - fn upper_bound<'a, Q>(&'a self, upper: Bound<&Q>) -> Option> + fn upper_bound<'a, Q>( + &'a self, + upper: Bound<&Q>, + ) -> Option> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -120,7 +372,10 @@ where /// Returns an `EntryRef` pointing to the lowest element whose key is above the given bound. /// If no such element is found then `None` is returned. #[inline] - fn lower_bound<'a, Q>(&'a self, lower: Bound<&Q>) -> Option> + fn lower_bound<'a, Q>( + &'a self, + lower: Bound<&Q>, + ) -> Option> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -132,7 +387,7 @@ where /// Returns a new iterator, this iterator will yield the latest version of all entries in the map less or equal to the given version. #[inline] - fn iter<'a>(&'a self) -> Iter<'a, K, V, Self::Allocator> + fn iter<'a>(&'a self) -> Iter<'a, K, V, Self::Allocator, Self::RefCounter> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -143,7 +398,7 @@ where /// Returns a iterator that within the range, this iterator will yield the latest version of all entries in the range less or equal to the given version. #[inline] - fn range<'a, Q, R>(&'a self, range: R) -> Iter<'a, K, V, Self::Allocator, Q, R> + fn range<'a, Q, R>(&'a self, range: R) -> Iter<'a, K, V, Self::Allocator, Self::RefCounter, Q, R> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -164,7 +419,10 @@ where &'a self, key: impl Into>, value: impl Into>, - ) -> Result>, Among> + ) -> Result< + Option>, + Among, + > where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -182,9 +440,9 @@ where /// ## Example /// /// ```rust - /// use skl::{map::{sync::SkipMap, Map}, Options, Arena}; + /// use skl::{generic::{unique::{sync::SkipMap, Map}, Builder}, Arena}; /// - /// let map = Options::new().with_capacity(1024).alloc::>().unwrap(); + /// let map = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let height = map.random_height(); /// map.insert_at_height(height, "hello", "world").unwrap(); @@ -195,7 +453,10 @@ where height: Height, key: impl Into>, value: impl Into>, - ) -> Result>, Among> + ) -> Result< + Option>, + Among, + > where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -221,7 +482,7 @@ where /// ## Example /// /// ```rust - /// use skl::{map::{sync::SkipMap, Map}, ValueBuilder, Options}; + /// use skl::{generic::{unique::{sync::SkipMap, Map}, Builder}, ValueBuilder}; /// /// struct Person { /// id: u32, @@ -242,12 +503,12 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { /// val.put_u32_le(alice.id).unwrap(); /// val.put_slice(alice.name.as_bytes()).unwrap(); - /// Ok(()) + /// Ok(encoded_size) /// }); /// /// l.insert_with_value_builder::(b"alice".as_slice(), vb) @@ -258,8 +519,11 @@ where fn insert_with_value_builder<'a, 'b: 'a, E>( &'a self, key: impl Into>, - value_builder: ValueBuilder) -> Result<(), E>>, - ) -> Result>, Among> + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Among, + > where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -283,7 +547,7 @@ where /// ## Example /// /// ```rust - /// use skl::{map::{sync::SkipMap, Map}, ValueBuilder, Options, Arena}; + /// use skl::{generic::{unique::{sync::SkipMap, Map}, Builder}, ValueBuilder, Arena}; /// /// struct Person { /// id: u32, @@ -304,12 +568,12 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { /// val.put_u32_le(alice.id).unwrap(); /// val.put_slice(alice.name.as_bytes()).unwrap(); - /// Ok(()) + /// Ok(encoded_size) /// }); /// /// let height = l.random_height(); @@ -322,8 +586,11 @@ where &'a self, height: Height, key: impl Into>, - value_builder: ValueBuilder) -> Result<(), E>>, - ) -> Result>, Among> + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Among, + > where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -345,7 +612,10 @@ where &'a self, key: impl Into>, value: impl Into>, - ) -> Result>, Among> + ) -> Result< + Option>, + Among, + > where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -366,7 +636,10 @@ where height: Height, key: impl Into>, value: impl Into>, - ) -> Result>, Among> + ) -> Result< + Option>, + Among, + > where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -393,7 +666,7 @@ where /// ## Example /// /// ```rust - /// use skl::{map::{sync::SkipMap, Map}, ValueBuilder, Options}; + /// use skl::{generic::{unique::{sync::SkipMap, Map}, Builder}, ValueBuilder}; /// /// struct Person { /// id: u32, @@ -414,12 +687,12 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { /// val.put_u32_le(alice.id).unwrap(); /// val.put_slice(alice.name.as_bytes()).unwrap(); - /// Ok(()) + /// Ok(encoded_size) /// }); /// l.get_or_insert_with_value_builder::(b"alice".as_slice(), vb) /// .unwrap(); @@ -429,8 +702,11 @@ where fn get_or_insert_with_value_builder<'a, 'b: 'a, E>( &'a self, key: impl Into>, - value_builder: ValueBuilder) -> Result<(), E>>, - ) -> Result>, Among> + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Among, + > where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -455,7 +731,7 @@ where /// ## Example /// /// ```rust - /// use skl::{map::{sync::SkipMap, Map}, ValueBuilder, Options, Arena}; + /// use skl::{generic::{unique::{sync::SkipMap, Map}, Builder}, ValueBuilder, Arena}; /// /// struct Person { /// id: u32, @@ -476,12 +752,12 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { /// val.put_u32_le(alice.id).unwrap(); /// val.put_slice(alice.name.as_bytes()).unwrap(); - /// Ok(()) + /// Ok(encoded_size) /// }); /// /// let height = l.random_height(); @@ -494,8 +770,11 @@ where &'a self, height: Height, key: impl Into>, - value_builder: ValueBuilder) -> Result<(), E>>, - ) -> Result>, Among> + value_builder: ValueBuilder) -> Result>, + ) -> Result< + Option>, + Among, + > where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -524,7 +803,7 @@ where /// ## Example /// /// ```rust - /// use skl::{map::{sync::SkipMap, Map}, KeyBuilder, ValueBuilder, Options}; + /// use skl::{generic::{unique::{sync::SkipMap, Map}, Builder}, KeyBuilder, ValueBuilder}; /// /// struct Person { /// id: u32, @@ -545,17 +824,17 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { /// key.put_slice(b"alice").unwrap(); - /// Ok(()) + /// Ok(5) /// }); /// /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { /// val.put_u32_le(alice.id).unwrap(); /// val.put_slice(alice.name.as_bytes()).unwrap(); - /// Ok(()) + /// Ok(encoded_size) /// }); /// /// l.insert_with_builders::<(), ()>(kb, vb) @@ -564,9 +843,9 @@ where #[inline] fn insert_with_builders<'a, KE, VE>( &'a self, - key_builder: KeyBuilder) -> Result<(), KE>>, - value_builder: ValueBuilder) -> Result<(), VE>>, - ) -> Result>, Among> + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result>, Among> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -591,7 +870,7 @@ where /// ## Example /// /// ```rust - /// use skl::{map::{sync::SkipMap, Map}, KeyBuilder, ValueBuilder, Options, Arena}; + /// use skl::{generic::{unique::{sync::SkipMap, Map}, Builder}, KeyBuilder, ValueBuilder, Arena}; /// /// struct Person { /// id: u32, @@ -612,17 +891,17 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { /// key.put_slice(b"alice").unwrap(); - /// Ok(()) + /// Ok(5) /// }); /// /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { /// val.put_u32_le(alice.id).unwrap(); /// val.put_slice(alice.name.as_bytes()).unwrap(); - /// Ok(()) + /// Ok(encoded_size) /// }); /// /// let height = l.random_height(); @@ -633,9 +912,9 @@ where fn insert_at_height_with_builders<'a, KE, VE>( &'a self, height: Height, - key_builder: KeyBuilder) -> Result<(), KE>>, - value_builder: ValueBuilder) -> Result<(), VE>>, - ) -> Result>, Among> + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result>, Among> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -659,7 +938,7 @@ where /// ## Example /// /// ```rust - /// use skl::{map::{sync::SkipMap, Map}, KeyBuilder, ValueBuilder, Options}; + /// use skl::{generic::{unique::{sync::SkipMap, Map}, Builder}, KeyBuilder, ValueBuilder}; /// /// struct Person { /// id: u32, @@ -680,17 +959,17 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { /// key.put_slice(b"alice").unwrap(); - /// Ok(()) + /// Ok(5) /// }); /// /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { /// val.put_u32_le(alice.id).unwrap(); /// val.put_slice(alice.name.as_bytes()).unwrap(); - /// Ok(()) + /// Ok(encoded_size) /// }); /// /// l.get_or_insert_with_builders::<(), ()>(kb, vb) @@ -699,9 +978,9 @@ where #[inline] fn get_or_insert_with_builders<'a, KE, VE>( &'a self, - key_builder: KeyBuilder) -> Result<(), KE>>, - value_builder: ValueBuilder) -> Result<(), VE>>, - ) -> Result>, Among> + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result>, Among> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -723,7 +1002,7 @@ where /// ## Example /// /// ```rust - /// use skl::{map::{sync::SkipMap, Map}, KeyBuilder, ValueBuilder, Options, Arena}; + /// use skl::{generic::{unique::{sync::SkipMap, Map}, Builder}, KeyBuilder, ValueBuilder, Arena}; /// /// struct Person { /// id: u32, @@ -744,17 +1023,17 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { /// key.put_slice(b"alice").unwrap(); - /// Ok(()) + /// Ok(5) /// }); /// /// let vb = ValueBuilder::new(encoded_size, |val: &mut skl::VacantBuffer<'_>| { /// val.put_u32_le(alice.id).unwrap(); /// val.put_slice(alice.name.as_bytes()).unwrap(); - /// Ok(()) + /// Ok(encoded_size) /// }); /// /// let height = l.random_height(); @@ -765,9 +1044,9 @@ where fn get_or_insert_at_height_with_builders<'a, KE, VE>( &'a self, height: Height, - key_builder: KeyBuilder) -> Result<(), KE>>, - value_builder: ValueBuilder) -> Result<(), VE>>, - ) -> Result>, Among> + key_builder: KeyBuilder) -> Result>, + value_builder: ValueBuilder) -> Result>, + ) -> Result>, Among> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -793,7 +1072,7 @@ where fn remove<'a, 'b: 'a>( &'a self, key: impl Into>, - ) -> Result>, Either> + ) -> Result>, Either> where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -816,7 +1095,7 @@ where &'a self, height: Height, key: impl Into>, - ) -> Result>, Either> + ) -> Result>, Either> where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -841,7 +1120,7 @@ where fn get_or_remove<'a, 'b: 'a>( &'a self, key: impl Into>, - ) -> Result>, Either> + ) -> Result>, Either> where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -860,9 +1139,9 @@ where /// ## Example /// /// ```rust - /// use skl::{map::{sync::SkipMap, Map}, Options, Arena}; + /// use skl::{generic::{unique::{sync::SkipMap, Map}, Builder}, Arena}; /// - /// let map = Options::new().with_capacity(1024).alloc::>().unwrap(); + /// let map = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// map.insert("hello", "world").unwrap(); /// @@ -875,7 +1154,7 @@ where &'a self, height: Height, key: impl Into>, - ) -> Result>, Either> + ) -> Result>, Either> where K: Type + 'b, K::Ref<'a>: KeyRef<'a, K>, @@ -900,7 +1179,7 @@ where /// ## Example /// /// ```rust - /// use skl::{map::{sync::SkipMap, Map}, KeyBuilder, Options}; + /// use skl::{generic::{unique::{sync::SkipMap, Map}, Builder}, KeyBuilder}; /// /// struct Person { /// id: u32, @@ -921,11 +1200,11 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { /// key.put_slice(b"alice").unwrap(); - /// Ok(()) + /// Ok(5) /// }); /// l.get_or_remove_with_builder::(kb) /// .unwrap(); @@ -933,8 +1212,8 @@ where #[inline] fn get_or_remove_with_builder<'a, 'b: 'a, E>( &'a self, - key_builder: KeyBuilder) -> Result<(), E>>, - ) -> Result>, Either> + key_builder: KeyBuilder) -> Result>, + ) -> Result>, Either> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -957,7 +1236,7 @@ where /// ## Example /// /// ```rust - /// use skl::{map::{sync::SkipMap, Map}, KeyBuilder, Options, Arena}; + /// use skl::{generic::{unique::{sync::SkipMap, Map}, Builder}, KeyBuilder, Arena}; /// /// struct Person { /// id: u32, @@ -978,11 +1257,11 @@ where /// /// let encoded_size = alice.encoded_size(); /// - /// let l = Options::new().with_capacity(1024).alloc::<[u8], [u8], SkipMap<_, _>>().unwrap(); + /// let l = Builder::new().with_capacity(1024).alloc::>().unwrap(); /// /// let kb = KeyBuilder::new(5u8.into(), |key: &mut skl::VacantBuffer<'_>| { /// key.put_slice(b"alice").unwrap(); - /// Ok(()) + /// Ok(5) /// }); /// let height = l.random_height(); /// l.get_or_remove_at_height_with_builder::(height, kb) @@ -992,8 +1271,8 @@ where fn get_or_remove_at_height_with_builder<'a, 'b: 'a, E>( &'a self, height: Height, - key_builder: KeyBuilder) -> Result<(), E>>, - ) -> Result>, Either> + key_builder: KeyBuilder) -> Result>, + ) -> Result>, Either> where K: Type, K::Ref<'a>: KeyRef<'a, K>, @@ -1004,12 +1283,3 @@ where .get_or_remove_at_height_with_builder(MIN_VERSION, height, key_builder) } } - -impl Map for T -where - K: ?Sized + 'static, - V: ?Sized + 'static, - T: Arena, - ::Node: WithoutVersion, -{ -} diff --git a/src/lib.rs b/src/lib.rs index 99e1848..183124b 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -19,53 +19,137 @@ extern crate std; mod allocator; pub use allocator::GenericAllocator; -/// Skiplist implementation -mod base; +/// The dynamic key-value type `SkipMap`s. +pub mod dynamic; + +/// The generic key-value type `SkipMap`s. +pub mod generic; /// Error types for the `SkipMap`s. pub mod error; -mod options; -pub use options::*; +/// Options for the `SkipMap`s. +#[macro_use] +pub mod options; +pub use options::Options; mod traits; -pub use traits::{map, multiple_version, Arena}; +pub use traits::Arena; mod types; pub use types::*; -pub use dbutils::equivalent::*; - -/// Iterators for the skipmaps. -pub mod iter { - pub use super::base::iterator::{Iter, IterAll}; -} - #[cfg(any( all(test, not(miri)), - all_tests, - test_unsync_map, - test_unsync_versioned, - test_sync_map, - test_sync_versioned, - test_sync_map_concurrent, - test_sync_multiple_version_concurrent, - test_sync_map_concurrent_with_optimistic_freelist, - test_sync_multiple_version_concurrent_with_optimistic_freelist, - test_sync_map_concurrent_with_pessimistic_freelist, - test_sync_multiple_version_concurrent_with_pessimistic_freelist, + all_skl_tests, + test_generic_unsync_map, + test_generic_unsync_versioned, + test_generic_sync_map, + test_generic_sync_versioned, + test_generic_sync_map_concurrent, + test_generic_sync_multiple_version_concurrent, + test_generic_sync_map_concurrent_with_optimistic_freelist, + test_generic_sync_multiple_version_concurrent_with_optimistic_freelist, + test_generic_sync_map_concurrent_with_pessimistic_freelist, + test_generic_sync_multiple_version_concurrent_with_pessimistic_freelist, + test_dynamic_unsync_map, + test_dynamic_unsync_versioned, + test_dynamic_sync_map, + test_dynamic_sync_versioned, + test_dynamic_sync_map_concurrent, + test_dynamic_sync_multiple_version_concurrent, + test_dynamic_sync_map_concurrent_with_optimistic_freelist, + test_dynamic_sync_multiple_version_concurrent_with_optimistic_freelist, + test_dynamic_sync_map_concurrent_with_pessimistic_freelist, + test_dynamic_sync_multiple_version_concurrent_with_pessimistic_freelist, ))] mod tests; pub use among; pub use either; -pub use rarena_allocator::{Allocator as ArenaAllocator, ArenaPosition}; +pub use rarena_allocator::Allocator; const MAX_HEIGHT: usize = 1 << 5; const MIN_VERSION: Version = Version::MIN; /// The tombstone value size, if a node's value size is equal to this value, then it is a tombstone. const REMOVE: u32 = u32::MAX; +/// A helper struct for caching splice information +pub struct Inserter<'a, P> { + spl: [Splice

; crate::MAX_HEIGHT], + height: u32, + _m: core::marker::PhantomData<&'a ()>, +} + +impl Default for Inserter<'_, P> { + #[inline] + fn default() -> Self { + Self { + spl: [ + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + Splice::default(), + ], + height: 0, + _m: core::marker::PhantomData, + } + } +} + +#[derive(Debug, Clone, Copy)] +struct Splice

{ + prev: P, + next: P, +} + +impl Default for Splice

{ + #[inline] + fn default() -> Self { + Self { + prev: P::NULL, + next: P::NULL, + } + } +} + +struct FindResult

{ + // both key and version are equal. + found: bool, + // only key is equal. + found_key: Option, + splice: Splice

, + curr: Option

, +} + /// Utility function to generate a random height for a new node. #[cfg(feature = "std")] pub fn random_height(max_height: Height) -> Height { @@ -277,6 +361,8 @@ mod sync; /// Implementations for single-threaded environments. mod unsync; +mod ref_counter; + #[inline] fn ty_ref<'a, T: dbutils::types::Type + ?Sized>(src: &'a [u8]) -> T::Ref<'a> { unsafe { as dbutils::types::TypeRef<'a>>::from_slice(src) } diff --git a/src/options.rs b/src/options.rs index c51129d..2784d98 100644 --- a/src/options.rs +++ b/src/options.rs @@ -1,14 +1,7 @@ -use core::mem; - pub use rarena_allocator::Freelist; use rarena_allocator::Options as ArenaOptions; -use super::{ - allocator::{Node, Sealed as AllocatorSealed}, - error::Error, - types::{Height, KeySize}, - Arena, -}; +use crate::types::{Height, KeySize}; /// The memory format version. pub(crate) const CURRENT_VERSION: u16 = 0; @@ -88,7 +81,7 @@ impl Options { freelist: Freelist::None, policy: CompressionPolicy::Fast, reserved: 0, - lock_meta: true, + lock_meta: false, #[cfg(all(feature = "memmap", not(target_family = "wasm")))] create_new: false, @@ -186,7 +179,7 @@ impl Options { /// ## Example /// /// ``` - /// use skl::{Options, Freelist}; + /// use skl::{Options, options::Freelist}; /// /// let opts = Options::new().with_freelist(Freelist::Optimistic); /// ``` @@ -203,7 +196,7 @@ impl Options { /// ## Example /// /// ``` - /// use skl::{Options, CompressionPolicy}; + /// use skl::{Options, options::CompressionPolicy}; /// /// let opts = Options::new().with_compression_policy(CompressionPolicy::Fast); /// ``` @@ -471,7 +464,7 @@ impl Options { /// ## Example /// /// ```rust - /// use skl::{Options, Freelist}; + /// use skl::{Options, options::Freelist}; /// /// let opts = Options::new().with_freelist(Freelist::Optimistic); /// @@ -489,7 +482,7 @@ impl Options { /// ## Example /// /// ```rust - /// use skl::{Options, CompressionPolicy}; + /// use skl::{Options, options::CompressionPolicy}; /// /// let opts = Options::new().with_compression_policy(CompressionPolicy::Fast); /// @@ -499,88 +492,6 @@ impl Options { pub const fn compression_policy(&self) -> CompressionPolicy { self.policy } - - /// Returns the data offset of the `SkipMap` if the `SkipMap` is in unified memory layout. - /// - /// See also [`Options::data_offset`]. - /// - /// ## Example - /// - /// ```rust - /// use skl::{map::sync, multiple_version::unsync, Options, Arena}; - /// - /// let opts = Options::new().with_capacity(1024); - /// let data_offset_from_opts = opts.data_offset::<_, _, sync::SkipMap<[u8], [u8]>>(); - /// let map = opts.alloc::<_, _, sync::SkipMap<[u8], [u8]>>().unwrap(); - /// assert_eq!(data_offset_from_opts, map.data_offset()); - /// - /// let data_offset_from_opts = opts.data_offset_unify::<_, _, sync::SkipMap<[u8], [u8]>>(); - /// let map = opts.with_unify(true).alloc::<_, _, sync::SkipMap<[u8], [u8]>>().unwrap(); - /// assert_eq!(data_offset_from_opts, map.data_offset()); - /// - /// // Create a unsync ARENA. - /// let opts = Options::new().with_capacity(1024); - /// let data_offset_from_opts = opts.data_offset::<_, _, unsync::SkipMap<[u8], [u8]>>(); - /// let map = opts.alloc::<_, _, unsync::SkipMap<[u8], [u8]>>().unwrap(); - /// assert_eq!(data_offset_from_opts, map.data_offset()); - /// - /// let data_offset_from_opts = opts.data_offset_unify::<_, _, unsync::SkipMap<[u8], [u8]>>(); - /// let map = opts.with_unify(true).alloc::<_, _, unsync::SkipMap<[u8], [u8]>>().unwrap(); - /// assert_eq!(data_offset_from_opts, map.data_offset()); - /// ``` - pub fn data_offset_unify(&self) -> usize - where - K: ?Sized + 'static, - V: ?Sized + 'static, - A: Arena, - { - let arena_opts = self.to_arena_options(); - let arena_data_offset = - arena_opts.data_offset_unify::<::Allocator>(); - - data_offset_in::(arena_data_offset, self.max_height(), true) - } - - /// Returns the data offset of the `SkipMap` if the `SkipMap` is not in unified memory layout. - /// - /// As the file backed `SkipMap` will only use the unified memory layout and ignore the unify configuration of `Options`, - /// so see also [`Options::data_offset_unify`], if you want to get the data offset of the `SkipMap` in unified memory layout. - /// - /// ## Example - /// - /// ```rust - /// use skl::{map::sync, multiple_version::unsync, Options, Arena}; - /// - /// let opts = Options::new().with_capacity(1024); - /// let data_offset_from_opts = opts.data_offset::<_, _, sync::SkipMap<[u8], [u8]>>(); - /// let map = opts.alloc::<_, _, sync::SkipMap<[u8], [u8]>>().unwrap(); - /// assert_eq!(data_offset_from_opts, map.data_offset()); - /// - /// let data_offset_from_opts = opts.data_offset_unify::<_, _, sync::SkipMap<[u8], [u8]>>(); - /// let map = opts.with_unify(true).alloc::<_, _, sync::SkipMap<[u8], [u8]>>().unwrap(); - /// assert_eq!(data_offset_from_opts, map.data_offset()); - /// - /// // Create a unsync ARENA. - /// let opts = Options::new().with_capacity(1024); - /// let data_offset_from_opts = opts.data_offset::<_, _, unsync::SkipMap<[u8], [u8]>>(); - /// let map = opts.alloc::<_, _, unsync::SkipMap<[u8], [u8]>>().unwrap(); - /// assert_eq!(data_offset_from_opts, map.data_offset()); - /// - /// let data_offset_from_opts = opts.data_offset_unify::<_, _, unsync::SkipMap<[u8], [u8]>>(); - /// let map = opts.with_unify(true).alloc::<_, _, unsync::SkipMap<[u8], [u8]>>().unwrap(); - /// assert_eq!(data_offset_from_opts, map.data_offset()); - /// ``` - pub fn data_offset(&self) -> usize - where - K: ?Sized + 'static, - V: ?Sized + 'static, - A: Arena, - { - let arena_opts = self.to_arena_options(); - let arena_data_offset = - arena_opts.data_offset::<::Allocator>(); - data_offset_in::(arena_data_offset, self.max_height(), false) - } } impl Options { @@ -597,7 +508,7 @@ impl Options { #[cfg(all(feature = "memmap", not(target_family = "wasm")))] { opts - .with_lock_meta(false) // we need to avoid arena's lock_meta + .with_lock_meta(self.lock_meta()) .with_create(self.create()) .with_create_new(self.create_new()) .with_read(self.read()) @@ -615,69 +526,823 @@ impl Options { } } -impl Options { - /// Create a new map which is backed by a `AlignedVec`. - /// - /// **Note:** The capacity stands for how many memory allocated, - /// it does not mean the skiplist can store `cap` entries. - /// - /// **What the difference between this method and [`Options::map_anon`]?** - /// - /// 1. This method will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. - /// Even if we are working with raw pointers with `Box::into_raw`, - /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` - /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, - /// especially if you're frequently accessing or modifying it. - /// - /// 2. Where as [`Options::map_anon`] will use mmap anonymous to require memory from the OS. - /// If you require very large contiguous memory regions, `mmap` might be more suitable because - /// it's more direct in requesting large chunks of memory from the OS. - /// - /// ## Example - /// - /// ```rust - /// use skl::{map::sync, multiple_version::unsync, Options}; - /// - /// let map = Options::new().with_capacity(1024).alloc::<_, _, sync::SkipMap<[u8], [u8]>>().unwrap(); - /// - /// let arena = Options::new().with_capacity(1024).alloc::<_, _, unsync::SkipMap<[u8], [u8]>>().unwrap(); - /// ``` - #[inline] - pub fn alloc(self) -> Result - where - K: ?Sized + 'static, - V: ?Sized + 'static, - T: Arena, - { - let node_align = mem::align_of::<::Node>(); +#[macro_export] +#[doc(hidden)] +macro_rules! __builder_opts { + ($mod:ident::$name:ident) => { + /// Set the reserved bytes of the ARENA. + /// + /// The reserved is used to configure the start position of the ARENA. This is useful + /// when you want to add some bytes before the ARENA, e.g. when using the memory map file backed ARENA, + /// you can set the reserved to the size to `8` to store a 8 bytes checksum. + /// + /// The default reserved is `0`. + /// + /// ## Example + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_reserved(8);")] + /// ``` + #[inline] + pub const fn with_reserved(mut self, reserved: u32) -> Self { + self.options.reserved = reserved; + self + } - self - .to_arena_options() - .with_maximum_alignment(node_align) - .alloc::<::Allocator>() - .map_err(Into::into) - .and_then(|arena| T::construct(arena, self, false)) - } -} + /// Set if lock the meta of the ARENA in the memory to prevent OS from swapping out the first page of ARENA. + /// When using memory map backed ARENA, the meta of the ARENA + /// is in the first page, meta is frequently accessed, + /// lock (`mlock` on the first page) the meta can reduce the page fault, + /// but yes, this means that one `SkipMap` will have one page are locked in memory, + /// and will not be swapped out. So, this is a trade-off between performance and memory usage. + /// + /// Default is `true`. + /// + /// This configuration has no effect on windows and vec backed ARENA. + /// + /// ## Example + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_lock_meta(false);")] + /// ``` + #[inline] + pub const fn with_lock_meta(mut self, lock_meta: bool) -> Self { + self.options.lock_meta = lock_meta; + self + } -#[inline] -fn data_offset_in(offset: usize, max_height: Height, unify: bool) -> usize { - let meta_end = if unify { - let alignment = mem::align_of::(); - let meta_offset = (offset + alignment - 1) & !(alignment - 1); - meta_offset + mem::size_of::() - } else { - offset - }; + /// Set the magic version of the [`Arena`](crate::traits::Arena). + /// + /// This is used by the application using [`Arena`](crate::traits::Arena) + /// to ensure that it doesn't open the [`Arena`](crate::traits::Arena) + /// with incompatible data format. + /// + /// The default value is `0`. + /// + /// ## Example + /// + /// ``` + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_magic_version(1);")] + /// ``` + #[inline] + pub const fn with_magic_version(mut self, magic_version: u16) -> Self { + self.options.magic_version = magic_version; + self + } + + /// Set the [`Freelist`] kind of the [`Arena`](crate::traits::Arena). + /// + /// The default value is [`Freelist::Optimistic`]. + /// + /// ## Example + /// + /// ``` + #[doc = concat!("use skl::{", stringify!($mod), "::", stringify!($name), ", options::Freelist};")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_freelist(Freelist::Optimistic);")] + /// ``` + #[inline] + pub const fn with_freelist(mut self, freelist: Freelist) -> Self { + self.options.freelist = freelist; + self + } + + /// Set the compression policy of the key in [`Arena`](crate::traits::Arena). + /// + /// The default value is [`CompressionPolicy::Fast`]. + /// + /// ## Example + /// + /// ``` + #[doc = concat!("use skl::{", stringify!($mod), "::", stringify!($name), ", options::CompressionPolicy};")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_compression_policy(CompressionPolicy::Fast);")] + /// ``` + #[inline] + pub const fn with_compression_policy(mut self, policy: CompressionPolicy) -> Self { + self.options.policy = policy; + self + } + + /// Set if use the unify memory layout of the [`Arena`](crate::traits::Arena). + /// + /// File backed [`Arena`](crate::traits::Arena) has different memory layout with other kind backed [`Arena`](crate::traits::Arena), + /// set this value to `true` will unify the memory layout of the [`Arena`](crate::traits::Arena), which means + /// all kinds of backed [`Arena`](crate::traits::Arena) will have the same memory layout. + /// + /// This value will be ignored if the [`Arena`](crate::traits::Arena) is backed by a file backed memory map. + /// + /// The default value is `false`. + /// + /// ## Example + /// + /// ``` + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_unify(true);")] + /// ``` + #[inline] + pub const fn with_unify(mut self, unify: bool) -> Self { + self.options.unify = unify; + self + } + + /// Sets the maximum size of the value. + /// + /// Default is `u32::MAX`. + /// + /// ## Example + /// + /// ``` + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let options = ", stringify!($name), "::new().with_max_value_size(1024);")] + /// ``` + #[inline] + pub const fn with_max_value_size(mut self, size: u32) -> Self { + self.options.max_value_size = size; + self + } + + /// Sets the maximum size of the key. + /// + /// The maximum size of the key is `u27::MAX`. + /// + /// Default is `65535`. + /// + /// ## Example + /// + /// ``` + #[doc = concat!("use skl::{", stringify!($mod), "::", stringify!($name), ", KeySize};")] + /// + #[doc = concat!("let options = ", stringify!($name), "::new().with_max_key_size(KeySize::new());")] + /// ``` + #[inline] + pub const fn with_max_key_size(mut self, size: KeySize) -> Self { + self.options.max_key_size = size; + self + } + + /// Sets the maximum height. + /// + /// Default is `20`. The maximum height is `31`. The minimum height is `1`. + /// + /// ## Example + /// + /// ```rust + #[doc = concat!("use skl::{", stringify!($mod), "::", stringify!($name), ", Height};")] + /// + #[doc = concat!("let options = ", stringify!($name), "::new().with_max_height(Height::new());")] + /// ``` + #[inline] + pub const fn with_max_height(mut self, height: Height) -> Self { + self.options.max_height = height; + self + } + + /// Sets the capacity of the underlying ARENA. + /// + /// ## Example + /// + /// ``` + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let options = ", stringify!($name), "::new().with_capacity(1024);")] + /// ``` + #[inline] + pub const fn with_capacity(mut self, capacity: u32) -> Self { + self.options.capacity = Some(capacity); + self + } + + /// Get the reserved of the ARENA. + /// + /// The reserved is used to configure the start position of the ARENA. This is useful + /// when you want to add some bytes before the ARENA, e.g. when using the memory map file backed ARENA, + /// you can set the reserved to the size to `8` to store a 8 bytes checksum. + /// + /// The default reserved is `0`. + /// + /// ## Example + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_reserved(8);")] + /// + /// assert_eq!(opts.reserved(), 8); + /// ``` + #[inline] + pub const fn reserved(&self) -> u32 { + self.options.reserved + } + + /// Get if lock the meta of the ARENA in the memory to prevent OS from swapping out the first page of ARENA. + /// When using memory map backed ARENA, the meta of the ARENA + /// is in the first page, meta is frequently accessed, + /// lock (`mlock` on the first page) the meta can reduce the page fault, + /// but yes, this means that one `SkipMap` will have one page are locked in memory, + /// and will not be swapped out. So, this is a trade-off between performance and memory usage. + /// + /// ## Example + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_lock_meta(false);")] + /// + /// assert_eq!(opts.lock_meta(), false); + /// ``` + #[inline] + pub const fn lock_meta(&self) -> bool { + self.options.lock_meta + } + + /// Returns the maximum size of the value. + /// + /// Default is `u32::MAX`. The maximum size of the value is `u32::MAX - header`. + /// + /// ## Example + /// + /// ``` + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let options = ", stringify!($name), "::new().with_max_value_size(1024);")] + /// ``` + #[inline] + pub const fn max_value_size(&self) -> u32 { + self.options.max_value_size + } + + /// Returns the maximum size of the key. + /// + /// The maximum size of the key is `u27::MAX`. + /// + /// Default is `65535`. + /// + /// ## Example + /// + /// ``` + #[doc = concat!("use skl::{", stringify!($mod), "::", stringify!($name), ", KeySize};")] + /// + #[doc = concat!("let options = ", stringify!($name), "::new().with_max_key_size(KeySize::new());")] + /// + /// assert_eq!(options.max_key_size(), u16::MAX); + /// ``` + #[inline] + pub const fn max_key_size(&self) -> KeySize { + self.options.max_key_size + } + + /// Returns the maximum height. + /// + /// Default is `20`. The maximum height is `31`. The minimum height is `1`. + /// + /// ## Example + /// + /// ``` + #[doc = concat!("use skl::{", stringify!($mod), "::", stringify!($name), ", Height};")] + /// + #[doc = concat!("let options = ", stringify!($name), "::new().with_max_height(Height::from_u8_unchecked(5));")] + /// + /// assert_eq!(options.max_height(), 5); + /// ``` + #[inline] + pub const fn max_height(&self) -> Height { + self.options.max_height + } + + /// Returns the configuration of underlying ARENA size. + /// + /// ## Example + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let options = ", stringify!($name), "::new().with_capacity(1024);")] + /// ``` + #[inline] + pub const fn capacity(&self) -> u32 { + match self.options.capacity { + Some(capacity) => capacity, + None => 0, + } + } + + /// Get if use the unify memory layout of the [`Arena`](crate::traits::Arena). + /// + /// File backed [`Arena`](crate::traits::Arena) has different memory layout with other kind backed [`Arena`](crate::traits::Arena), + /// set this value to `true` will unify the memory layout of the [`Arena`](crate::traits::Arena), which means + /// all kinds of backed [`Arena`](crate::traits::Arena) will have the same memory layout. + /// + /// This value will be ignored if the [`Arena`](crate::traits::Arena) is backed by a file backed memory map. + /// + /// The default value is `false`. + /// + /// ## Example + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_unify(true);")] + /// + /// assert_eq!(opts.unify(), true); + /// ``` + #[inline] + pub const fn unify(&self) -> bool { + self.options.unify + } - let alignment = mem::align_of::(); - let head_offset = (meta_end + alignment - 1) & !(alignment - 1); - let head_end = head_offset - + mem::size_of::() - + mem::size_of::<::Link>() * max_height.to_usize(); + /// Get the magic version of the [`Arena`](crate::traits::Arena). + /// + /// This is used by the application using [`Arena`](crate::traits::Arena) + /// to ensure that it doesn't open the [`Arena`](crate::traits::Arena) + /// with incompatible data format. + /// + /// The default value is `0`. + /// + /// ## Example + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_magic_version(1);")] + /// + /// assert_eq!(opts.magic_version(), 1); + /// ``` + #[inline] + pub const fn magic_version(&self) -> u16 { + self.options.magic_version + } + + /// Get the [`Freelist`] kind of the [`Arena`](crate::traits::Arena). + /// + /// The default value is [`Freelist::Optimistic`]. + /// + /// ## Example + /// + /// ```rust + #[doc = concat!("use skl::{", stringify!($mod), "::", stringify!($name), ", options::Freelist};")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_freelist(Freelist::Optimistic);")] + /// + /// assert_eq!(opts.freelist(), Freelist::Optimistic); + /// ``` + #[inline] + pub const fn freelist(&self) -> Freelist { + self.options.freelist + } + + /// Get the compression policy of the keys in [`Arena`](crate::traits::Arena). + /// + /// The default value is [`CompressionPolicy::Fast`]. + /// + /// ## Example + /// + /// ```rust + #[doc = concat!("use skl::{", stringify!($mod), "::", stringify!($name), ", options::CompressionPolicy};")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_compression_policy(CompressionPolicy::Fast);")] + /// + /// assert_eq!(opts.compression_policy(), CompressionPolicy::Fast); + /// ``` + #[inline] + pub const fn compression_policy(&self) -> CompressionPolicy { + self.options.policy + } + + /// Sets the option for read access. + /// + /// This option, when true, will indicate that the file should be + /// `read`-able if opened. + /// + /// ## Examples + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_read(true);")] + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn with_read(mut self, read: bool) -> Self { + self.options.read = read; + self + } + + /// Sets the option for write access. + /// + /// This option, when true, will indicate that the file should be + /// `write`-able if opened. + /// + /// If the file already exists, any write calls on it will overwrite its + /// contents, without truncating it. + /// + /// ## Examples + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_write(true);")] + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn with_write(mut self, write: bool) -> Self { + self.options.write = write; + self + } + + /// Sets the option for the append mode. + /// + /// This option, when true, means that writes will append to a file instead + /// of overwriting previous contents. + /// Note that setting `.write(true).append(true)` has the same effect as + /// setting only `.append(true)`. + /// + /// For most filesystems, the operating system guarantees that all writes are + /// atomic: no writes get mangled because another process writes at the same + /// time. + /// + /// One maybe obvious note when using append-mode: make sure that all data + /// that belongs together is written to the file in one operation. This + /// can be done by concatenating strings before passing them to [`write()`], + /// or using a buffered writer (with a buffer of adequate size), + /// and calling [`flush()`] when the message is complete. + /// + /// If a file is opened with both read and append access, beware that after + /// opening, and after every write, the position for reading may be set at the + /// end of the file. So, before writing, save the current position (using + /// [seek]\([SeekFrom](std::io::SeekFrom)::[Current]\(opts))), and restore it before the next read. + /// + /// ## Note + /// + /// This function doesn't create the file if it doesn't exist. Use the + /// [`Options::with_create`] method to do so. + /// + /// [`write()`]: std::io::Write::write "io::Write::write" + /// [`flush()`]: std::io::Write::flush "io::Write::flush" + /// [seek]: std::io::Seek::seek "io::Seek::seek" + /// [Current]: std::io::SeekFrom::Current "io::SeekFrom::Current" + /// + /// ## Examples + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_append(true);")] + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn with_append(mut self, append: bool) -> Self { + self.options.write = true; + self.options.append = append; + self + } + + /// Sets the option for truncating a previous file. + /// + /// If a file is successfully opened with this option set it will truncate + /// the file to opts length if it already exists. + /// + /// The file must be opened with write access for truncate to work. + /// + /// ## Examples + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_write(true).with_truncate(true);")] + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn with_truncate(mut self, truncate: bool) -> Self { + self.options.truncate = truncate; + self.options.write = true; + self + } - let tail_offset = (head_end + alignment - 1) & !(alignment - 1); - tail_offset - + mem::size_of::() - + mem::size_of::<::Link>() * max_height.to_usize() + /// Sets the option to create a new file, or open it if it already exists. + /// If the file does not exist, it is created and set the lenght of the file to the given size. + /// + /// In order for the file to be created, [`Options::with_write`] or + /// [`Options::with_append`] access must be used. + /// + /// See also [`std::fs::write()`][std::fs::write] for a simple function to + /// create a file with some given data. + /// + /// ## Examples + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_write(true).with_create(true);")] + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn with_create(mut self, val: bool) -> Self { + self.options.create = val; + self + } + + /// Sets the option to create a new file and set the file length to the given value, failing if it already exists. + /// + /// No file is allowed to exist at the target location, also no (dangling) symlink. In this + /// way, if the call succeeds, the file returned is guaranteed to be new. + /// + /// This option is useful because it is atomic. Otherwise between checking + /// whether a file exists and creating a new one, the file may have been + /// created by another process (a TOCTOU race condition / attack). + /// + /// If `.with_create_new(true)` is set, [`.with_create()`](Options::with_create) and [`.with_truncate()`](Options::with_truncate) are + /// ignored. + /// + /// The file must be opened with write or append access in order to create + /// a new file. + /// + /// ## Examples + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_write(true).with_create_new(true);")] + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn with_create_new(mut self, val: bool) -> Self { + self.options.create_new = val; + self + } + + /// Configures the memory map to start at byte `offset` from the beginning of the file. + /// + /// This option has no effect on anonymous memory maps or vec backed [`Arena`](crate::traits::Arena). + /// + /// By default, the offset is 0. + /// + /// ## Example + /// + /// ``` + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_offset(30);")] + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn with_offset(mut self, offset: u64) -> Self { + self.options.offset = offset; + self + } + + /// Configures the anonymous memory map to be suitable for a process or thread stack. + /// + /// This option corresponds to the `MAP_STACK` flag on Linux. It has no effect on Windows. + /// + /// This option has no effect on file-backed memory maps and vec backed [`Arena`](crate::traits::Arena). + /// + /// ## Example + /// + /// ``` + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let stack = ", stringify!($name), "::new().with_stack(true);")] + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn with_stack(mut self, stack: bool) -> Self { + self.options.stack = stack; + self + } + + /// Configures the anonymous memory map to be allocated using huge pages. + /// + /// This option corresponds to the `MAP_HUGETLB` flag on Linux. It has no effect on Windows. + /// + /// The size of the requested page can be specified in page bits. If not provided, the system + /// default is requested. The requested length should be a multiple of this, or the mapping + /// will fail. + /// + /// This option has no effect on file-backed memory maps and vec backed [`Arena`](crate::traits::Arena). + /// + /// ## Example + /// + /// ``` + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let huge = ", stringify!($name), "::new().with_huge(Some(8));")] + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn with_huge(mut self, page_bits: Option) -> Self { + self.options.huge = page_bits; + self + } + + /// Populate (prefault) page tables for a mapping. + /// + /// For a file mapping, this causes read-ahead on the file. This will help to reduce blocking on page faults later. + /// + /// This option corresponds to the `MAP_POPULATE` flag on Linux. It has no effect on Windows. + /// + /// This option has no effect on vec backed [`Arena`](crate::traits::Arena). + /// + /// ## Example + /// + /// ``` + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let populate = ", stringify!($name), "::new().with_populate(true);")] + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub fn with_populate(mut self, populate: bool) -> Self { + self.options.populate = populate; + self + } + + /// Returns `true` if the file should be opened with read access. + /// + /// ## Examples + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_read(true);")] + /// assert_eq!(opts.read(), true); + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub const fn read(&self) -> bool { + self.options.read + } + + /// Returns `true` if the file should be opened with write access. + /// + /// ## Examples + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_write(true);")] + /// assert_eq!(opts.write(), true); + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub const fn write(&self) -> bool { + self.options.write + } + + /// Returns `true` if the file should be opened with append access. + /// + /// ## Examples + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_append(true);")] + /// assert_eq!(opts.append(), true); + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub const fn append(&self) -> bool { + self.options.append + } + + /// Returns `true` if the file should be opened with truncate access. + /// + /// ## Examples + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_truncate(true);")] + /// assert_eq!(opts.truncate(), true); + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub const fn truncate(&self) -> bool { + self.options.truncate + } + + /// Returns `true` if the file should be created if it does not exist. + /// + /// ## Examples + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_create(true);")] + /// assert_eq!(opts.create(), true); + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub const fn create(&self) -> bool { + self.options.create + } + + /// Returns `true` if the file should be created if it does not exist and fail if it does. + /// + /// ## Examples + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_create_new(true);")] + /// assert_eq!(opts.create_new(), true); + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub const fn create_new(&self) -> bool { + self.options.create_new + } + + /// Returns the offset of the memory map. + /// + /// ## Examples + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_offset(30);")] + /// assert_eq!(opts.offset(), 30); + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub const fn offset(&self) -> u64 { + self.options.offset + } + + /// Returns `true` if the memory map should be suitable for a process or thread stack. + /// + /// ## Examples + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_stack(true);")] + /// assert_eq!(opts.stack(), true); + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub const fn stack(&self) -> bool { + self.options.stack + } + + /// Returns the page bits of the memory map. + /// + /// ## Examples + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_huge(Some(8));")] + /// assert_eq!(opts.huge(), Some(8)); + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub const fn huge(&self) -> Option { + self.options.huge + } + + /// Returns `true` if the memory map should populate (prefault) page tables for a mapping. + /// + /// ## Examples + /// + /// ```rust + #[doc = concat!("use skl::", stringify!($mod), "::", stringify!($name), ";")] + /// + #[doc = concat!("let opts = ", stringify!($name), "::new().with_populate(true);")] + /// assert_eq!(opts.populate(), true); + /// ``` + #[inline] + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + pub const fn populate(&self) -> bool { + self.options.populate + } + }; } diff --git a/src/options/open_options.rs b/src/options/open_options.rs index 0170194..8585987 100644 --- a/src/options/open_options.rs +++ b/src/options/open_options.rs @@ -1,254 +1,4 @@ -use core::mem; - -use either::Either; -use rarena_allocator::Allocator; - -use super::{super::Options, Arena, CURRENT_VERSION}; -use crate::{ - allocator::{Header, Node, Sealed}, - error::{bad_magic_version, bad_version, flags_mismtach, invalid_data}, -}; - -impl Options { - /// Create a new map which is backed by a anonymous memory map. - /// - /// **What the difference between this method and [`Options::alloc`]?** - /// - /// 1. This method will use mmap anonymous to require memory from the OS directly. - /// If you require very large contiguous memory regions, this method might be more suitable because - /// it's more direct in requesting large chunks of memory from the OS. - /// - /// 2. Where as [`Options::alloc`] will use an `AlignedVec` ensures we are working within Rust's memory safety guarantees. - /// Even if we are working with raw pointers with `Box::into_raw`, - /// the backend ARENA will reclaim the ownership of this memory by converting it back to a `Box` - /// when dropping the backend ARENA. Since `AlignedVec` uses heap memory, the data might be more cache-friendly, - /// especially if you're frequently accessing or modifying it. - /// - /// ## Example - /// - /// ```rust - /// use skl::{map::sync, multiple_version::unsync, Options}; - /// - /// let map = Options::new().with_capacity(1024).map_anon::<_, _, sync::SkipMap<[u8], [u8]>>().unwrap(); - /// - /// let arena = Options::new().with_capacity(1024).map_anon::<_, _, unsync::SkipMap<[u8], [u8]>>().unwrap(); - /// ``` - /// - /// [`Options::alloc`]: #method.alloc - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - #[inline] - pub fn map_anon(self) -> std::io::Result - where - K: ?Sized + 'static, - V: ?Sized + 'static, - T: Arena, - { - let node_align = mem::align_of::<::Node>(); - - #[allow(clippy::bind_instead_of_map)] - self - .to_arena_options() - .with_maximum_alignment(node_align) - .map_anon::<::Allocator>() - .map_err(Into::into) - .and_then(|arena| { - T::construct(arena, self, false) - .map_err(invalid_data) - .and_then(|map| { - // Lock the memory of first page to prevent it from being swapped out. - #[cfg(not(miri))] - if self.lock_meta { - unsafe { - let arena = map.allocator(); - arena.mlock(0, arena.page_size().min(arena.capacity()))?; - } - } - - Ok(map) - }) - }) - } - - /// Opens a read-only map which backed by file-backed memory map. - /// - /// ## Safety - /// - All file-backed memory map constructors are marked `unsafe` because of the potential for - /// *Undefined Behavior* (UB) using the map if the underlying file is subsequently modified, in or - /// out of process. Applications must consider the risk and take appropriate precautions when - /// using file-backed maps. Solutions such as file permissions, locks or process-private (e.g. - /// unlinked) files exist but are platform specific and limited. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - #[inline] - pub unsafe fn map(self, path: P) -> std::io::Result - where - K: ?Sized + 'static, - V: ?Sized + 'static, - T: Arena, - P: AsRef, - { - self - .map_with_path_builder::(|| Ok(path.as_ref().to_path_buf())) - .map_err(Either::unwrap_right) - } - - /// Opens a read-only map which backed by file-backed memory map with a path builder. - /// - /// ## Safety - /// - All file-backed memory map constructors are marked `unsafe` because of the potential for - /// *Undefined Behavior* (UB) using the map if the underlying file is subsequently modified, in or - /// out of process. Applications must consider the risk and take appropriate precautions when - /// using file-backed maps. Solutions such as file permissions, locks or process-private (e.g. - /// unlinked) files exist but are platform specific and limited. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - #[inline] - pub unsafe fn map_with_path_builder( - self, - path_builder: PB, - ) -> Result> - where - K: ?Sized + 'static, - V: ?Sized + 'static, - T: Arena, - PB: FnOnce() -> Result, - { - let node_align = mem::align_of::<::Node>(); - let magic_version = self.magic_version(); - - #[allow(clippy::bind_instead_of_map)] - self - .to_arena_options() - .with_unify(true) - .with_read(true) - .with_create(false) - .with_create_new(false) - .with_write(false) - .with_truncate(false) - .with_append(false) - .with_maximum_alignment(node_align) - .map_with_path_builder::<::Allocator, _, _>(path_builder) - .and_then(|arena| { - T::construct(arena, self, true) - .map_err(invalid_data) - .and_then(|map| { - let flags = map.as_ref().meta().flags(); - let node_flags = <::Node as Node>::flags(); - - if flags != node_flags { - return Err(flags_mismtach(flags, node_flags)); - } - - if Arena::magic_version(&map) != magic_version { - Err(bad_magic_version()) - } else if map.version() != CURRENT_VERSION { - Err(bad_version()) - } else { - // Lock the memory of first page to prevent it from being swapped out. - #[cfg(not(miri))] - if self.lock_meta { - unsafe { - let allocator = map.allocator(); - allocator.mlock(0, allocator.page_size().min(allocator.capacity()))?; - } - } - - Ok(map) - } - }) - .map_err(Either::Right) - }) - } - - /// Creates a new map or reopens a map which backed by a file backed memory map. - /// - /// ## Safety - /// - /// - All file-backed memory map constructors are marked `unsafe` because of the potential for - /// *Undefined Behavior* (UB) using the map if the underlying file is subsequently modified, in or - /// out of process. Applications must consider the risk and take appropriate precautions when - /// using file-backed maps. Solutions such as file permissions, locks or process-private (e.g. - /// unlinked) files exist but are platform specific and limited. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - #[inline] - pub unsafe fn map_mut(self, path: P) -> std::io::Result - where - K: ?Sized + 'static, - V: ?Sized + 'static, - T: Arena, - P: AsRef, - { - self - .map_mut_with_path_builder::(|| Ok(path.as_ref().to_path_buf())) - .map_err(Either::unwrap_right) - } - - /// Creates a new map or reopens a map which backed by a file backed memory map with path builder. - /// - /// # Safety - /// - All file-backed memory map constructors are marked `unsafe` because of the potential for - /// *Undefined Behavior* (UB) using the map if the underlying file is subsequently modified, in or - /// out of process. Applications must consider the risk and take appropriate precautions when - /// using file-backed maps. Solutions such as file permissions, locks or process-private (e.g. - /// unlinked) files exist but are platform specific and limited. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - pub unsafe fn map_mut_with_path_builder( - self, - path_builder: PB, - ) -> Result> - where - K: ?Sized + 'static, - V: ?Sized + 'static, - T: Arena, - PB: FnOnce() -> Result, - { - let node_align = mem::align_of::<::Node>(); - let magic_version = self.magic_version(); - let path = path_builder().map_err(Either::Left)?; - let exist = path.exists(); - - #[allow(clippy::bind_instead_of_map)] - self - .to_arena_options() - .with_maximum_alignment(node_align) - .with_unify(true) - .map_mut::<::Allocator, _>(path) - .map_err(Either::Right) - .and_then(|arena| { - T::construct(arena, self, exist) - .map_err(invalid_data) - .and_then(|map| { - let flags = map.as_ref().meta().flags(); - let node_flags = <::Node as Node>::flags(); - - if flags != node_flags { - return Err(flags_mismtach(flags, node_flags)); - } - - if Arena::magic_version(&map) != magic_version { - Err(bad_magic_version()) - } else if map.version() != CURRENT_VERSION { - Err(bad_version()) - } else { - // Lock the memory of first page to prevent it from being swapped out. - #[cfg(not(miri))] - if self.lock_meta { - unsafe { - let allocator = map.allocator(); - allocator.mlock(0, allocator.page_size().min(allocator.capacity()))?; - } - } - - Ok(map) - } - }) - .map_err(Either::Right) - }) - } -} +use super::super::Options; impl Options { /// Sets the option for read access. @@ -510,9 +260,7 @@ impl Options { self.populate = populate; self } -} -impl Options { /// Returns `true` if the file should be opened with read access. /// /// ## Examples diff --git a/src/ref_counter.rs b/src/ref_counter.rs new file mode 100644 index 0000000..69f315a --- /dev/null +++ b/src/ref_counter.rs @@ -0,0 +1,65 @@ +use core::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::Arc; + +/// A reference counter trait. +pub trait RefCounter: Clone { + /// Creates a new reference counter. + fn new() -> Self; + + /// Returns the current reference count. + fn load(&self, order: Ordering) -> usize; + + /// Increments the reference count, returning the old count. + fn fetch_add(&self, order: Ordering) -> usize; + + /// Decrements the reference count, returning the old count. + fn fetch_sub(&self, order: Ordering) -> usize; +} + +impl RefCounter for std::rc::Rc> { + #[inline] + fn new() -> Self { + std::rc::Rc::new(core::cell::Cell::new(1)) + } + + #[inline] + fn load(&self, _: Ordering) -> usize { + self.get() + } + + #[inline] + fn fetch_add(&self, _: Ordering) -> usize { + let count = self.get(); + self.set(count + 1); + count + } + + #[inline] + fn fetch_sub(&self, _: Ordering) -> usize { + let count = self.get(); + self.set(count - 1); + count + } +} + +impl RefCounter for Arc { + #[inline] + fn new() -> Self { + Arc::new(AtomicUsize::new(1)) + } + + #[inline] + fn load(&self, order: Ordering) -> usize { + AtomicUsize::load(self, order) + } + + #[inline] + fn fetch_add(&self, order: Ordering) -> usize { + AtomicUsize::fetch_add(self, 1, order) + } + + #[inline] + fn fetch_sub(&self, order: Ordering) -> usize { + AtomicUsize::fetch_sub(self, 1, order) + } +} diff --git a/src/sync.rs b/src/sync.rs index efac646..5d277c4 100644 --- a/src/sync.rs +++ b/src/sync.rs @@ -1,6 +1,7 @@ pub use rarena_allocator::sync::Arena; use core::ptr::NonNull; +use std::sync::Arc; use crate::internal::Flags; @@ -10,6 +11,9 @@ use super::{ decode_value_pointer, encode_value_pointer, Version, MIN_VERSION, REMOVE, }; +/// The reference counter type used in the `SkipMap`. +pub type RefCounter = Arc; + /// Versioned header of the skiplist. #[derive(Debug)] #[repr(C)] @@ -25,7 +29,7 @@ pub struct VersionedMeta { flags: Flags, } -impl Header for VersionedMeta { +impl crate::allocator::Meta for VersionedMeta { #[inline] fn new(version: u16) -> Self { Self { @@ -125,7 +129,7 @@ impl Header for VersionedMeta { } } -/// Header of the skipmap. +/// Meta of the skipmap. #[derive(Debug)] #[repr(C)] pub struct Meta { @@ -136,7 +140,7 @@ pub struct Meta { flags: Flags, } -impl Header for Meta { +impl crate::allocator::Meta for Meta { #[inline] fn new(version: u16) -> Self { Self { diff --git a/src/sync/map.rs b/src/sync/map.rs index e4132dc..98c07f5 100644 --- a/src/sync/map.rs +++ b/src/sync/map.rs @@ -1,50 +1,7 @@ use super::*; -#[cfg(any(all(test, not(miri)), all_tests, test_sync_map,))] -mod tests { - crate::__map_tests!("sync_map": super::SkipMap<[u8], [u8]>); -} - -#[cfg(any(all(test, not(miri)), all_tests, test_sync_map_concurrent,))] -mod concurrent_tests { - crate::__map_tests!(go "sync_map": super::SkipMap<[u8], [u8]> => crate::tests::TEST_OPTIONS); -} - -#[cfg(any( - all(test, not(miri)), - all_tests, - test_sync_map_concurrent_with_optimistic_freelist, -))] -mod concurrent_tests_with_optimistic_freelist { - crate::__map_tests!(go "sync_map": super::SkipMap<[u8], [u8]> => crate::tests::TEST_OPTIONS_WITH_OPTIMISTIC_FREELIST); -} - -#[cfg(any( - all(test, not(miri)), - all_tests, - test_sync_map_concurrent_with_pessimistic_freelist, -))] -mod concurrent_tests_with_pessimistic_freelist { - crate::__map_tests!(go "sync_map": super::SkipMap<[u8], [u8]> => crate::tests::TEST_OPTIONS_WITH_PESSIMISTIC_FREELIST); -} - -type Allocator = GenericAllocator; -type SkipList = crate::base::SkipList; - -/// Iterator over the [`SkipMap`]. -pub type Iter<'a, K, V> = crate::iter::Iter<'a, K, V, Allocator>; - -/// Iterator over a subset of the [`SkipMap`]. -pub type Range<'a, K, V, Q, R> = crate::iter::Iter<'a, K, V, Allocator, Q, R>; - -/// Iterator over the [`SkipMap`]. -pub type IterAll<'a, K, V> = crate::iter::IterAll<'a, K, V, Allocator>; - -/// Iterator over a subset of the [`SkipMap`]. -pub type RangeAll<'a, K, V, Q, R> = crate::iter::IterAll<'a, K, V, Allocator, Q, R>; - -/// The entry reference of the [`SkipMap`]. -pub type Entry<'a, K, V> = crate::EntryRef<'a, K, V, Allocator>; +/// The allocator used to allocate nodes in the `SkipMap`. +pub type Allocator = GenericAllocator; node!( /// A raw node that does not support version. @@ -69,37 +26,3 @@ node!( } } ); - -/// A fast, lock-free, thread-safe ARENA based `SkipMap` that supports forward and backward iteration. -/// -/// If you want to use in non-concurrent environment, you can use [`map::unsync::SkipMap`](crate::map::unsync::SkipMap). -#[repr(transparent)] -pub struct SkipMap(SkipList); - -impl Clone for SkipMap { - #[inline] - fn clone(&self) -> Self { - Self(self.0.clone()) - } -} - -impl From> for SkipMap { - #[inline] - fn from(list: SkipList) -> Self { - Self(list) - } -} - -impl crate::traits::List for SkipMap { - type Allocator = Allocator; - - #[inline] - fn as_ref(&self) -> &SkipList { - &self.0 - } - - #[inline] - fn as_mut(&mut self) -> &mut SkipList { - &mut self.0 - } -} diff --git a/src/sync/multiple_version.rs b/src/sync/multiple_version.rs index 661bfa6..75f0c43 100644 --- a/src/sync/multiple_version.rs +++ b/src/sync/multiple_version.rs @@ -1,54 +1,5 @@ use super::*; -#[cfg(any(all(test, not(miri)), all_tests, test_sync_versioned,))] -mod tests { - crate::__multiple_version_map_tests!("sync_multiple_version_map": super::SkipMap<[u8], [u8]>); -} - -#[cfg(any(all(test, not(miri)), all_tests, test_sync_multiple_version_concurrent,))] -mod concurrent_tests { - crate::__multiple_version_map_tests!(go "sync_multiple_version_map": super::SkipMap<[u8], [u8]> => crate::tests::TEST_OPTIONS); -} - -#[cfg(any( - all(test, not(miri)), - all_tests, - test_sync_multiple_version_concurrent_with_optimistic_freelist, -))] -mod concurrent_tests_with_optimistic_freelist { - crate::__multiple_version_map_tests!(go "sync_multiple_version_map": super::SkipMap<[u8], [u8]> => crate::tests::TEST_OPTIONS_WITH_OPTIMISTIC_FREELIST); -} - -#[cfg(any( - all(test, not(miri)), - all_tests, - test_sync_multiple_version_concurrent_with_pessimistic_freelist, -))] -mod concurrent_tests_with_pessimistic_freelist { - crate::__multiple_version_map_tests!(go "sync_multiple_version_map": super::SkipMap<[u8], [u8]> => crate::tests::TEST_OPTIONS_WITH_PESSIMISTIC_FREELIST); -} - -type Allocator = GenericAllocator; -type SkipList = crate::base::SkipList; - -/// Iterator over the [`SkipMap`]. -pub type Iter<'a, K, V> = crate::iter::Iter<'a, K, V, Allocator>; - -/// Iterator over a subset of the [`SkipMap`]. -pub type Range<'a, K, V, Q, R> = crate::iter::Iter<'a, K, V, Allocator, Q, R>; - -/// Iterator over the [`SkipMap`]. -pub type IterAll<'a, K, V> = crate::iter::IterAll<'a, K, V, Allocator>; - -/// Iterator over a subset of the [`SkipMap`]. -pub type RangeAll<'a, K, V, Q, R> = crate::iter::IterAll<'a, K, V, Allocator, Q, R>; - -/// The entry reference of the [`SkipMap`]. -pub type Entry<'a, K, V> = crate::EntryRef<'a, K, V, Allocator>; - -/// The versioned entry reference of the [`SkipMap`]. -pub type VersionedEntry<'a, K, V> = crate::VersionedEntryRef<'a, K, V, Allocator>; - node!( /// A node that supports version. struct VersionedNode { @@ -80,36 +31,5 @@ node!( } ); -/// A fast, lock-free, thread-safe ARENA based `SkipMap` that supports multiple versions, forward and backward iteration. -/// -/// If you want to use in non-concurrent environment, you can use [`multiple_version::unsync::SkipMap`](crate::multiple_version::unsync::SkipMap). -#[repr(transparent)] -pub struct SkipMap(SkipList); - -impl Clone for SkipMap { - #[inline] - fn clone(&self) -> Self { - Self(self.0.clone()) - } -} - -impl From> for SkipMap { - #[inline] - fn from(list: SkipList) -> Self { - Self(list) - } -} - -impl crate::traits::List for SkipMap { - type Allocator = Allocator; - - #[inline] - fn as_ref(&self) -> &SkipList { - &self.0 - } - - #[inline] - fn as_mut(&mut self) -> &mut SkipList { - &mut self.0 - } -} +/// Concurrent safe allocator for multiple versioned nodes. +pub type Allocator = GenericAllocator; diff --git a/src/tests.rs b/src/tests.rs index b8c2c22..8b225a3 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -1,66 +1,39 @@ #![allow(dead_code)] -use super::Options; - -pub(crate) const KB: usize = 1 << 10; -const ARENA_SIZE: usize = 1 << 20; -pub(crate) const TEST_OPTIONS: Options = Options::new().with_capacity(ARENA_SIZE as u32); -pub(crate) const TEST_FULL_OPTIONS: Options = Options::new().with_capacity(1024); -pub(crate) const TEST_OPTIONS_WITH_OPTIMISTIC_FREELIST: Options = Options::new() - .with_capacity(ARENA_SIZE as u32) - .with_freelist(rarena_allocator::Freelist::Optimistic); -pub(crate) const TEST_OPTIONS_WITH_PESSIMISTIC_FREELIST: Options = Options::new() - .with_capacity(ARENA_SIZE as u32) - .with_freelist(rarena_allocator::Freelist::Pessimistic); -// pub(crate) const TEST_HIGH_COMPRESSION_OPTIONS: Options = Options::new() -// .with_capacity(ARENA_SIZE as u32) -// .with_compression_policy(crate::CompressionPolicy::High); -#[cfg(all( - all(feature = "std", not(miri)), - any( - all(test, not(miri)), - all_tests, - test_sync_full, - test_sync_map, - test_sync_trailed, - test_sync_versioned, - ) -))] -const BIG_ARENA_SIZE: usize = 120 << 20; - -#[cfg(all( - all(feature = "std", not(miri)), - any( - all(test, not(miri)), - all_tests, - test_sync_full, - test_sync_map, - test_sync_trailed, - test_sync_versioned, - ) -))] -pub(crate) const BIG_TEST_OPTIONS: Options = Options::new().with_capacity(BIG_ARENA_SIZE as u32); #[cfg(any( all(test, not(miri)), - all_tests, - test_unsync_map, - test_sync_map, - test_sync_map_concurrent, - test_sync_map_concurrent_with_optimistic_freelist, - test_sync_map_concurrent_with_pessimistic_freelist, + all_skl_tests, + test_generic_unsync_map, + test_generic_unsync_versioned, + test_generic_sync_map, + test_generic_sync_versioned, + test_generic_sync_map_concurrent, + test_generic_sync_multiple_version_concurrent, + test_generic_sync_map_concurrent_with_optimistic_freelist, + test_generic_sync_multiple_version_concurrent_with_optimistic_freelist, + test_generic_sync_map_concurrent_with_pessimistic_freelist, + test_generic_sync_multiple_version_concurrent_with_pessimistic_freelist, ))] -pub(crate) mod map; +pub mod generic; #[cfg(any( all(test, not(miri)), - all_tests, - test_unsync_versioned, - test_sync_versioned, - test_sync_multiple_version_concurrent, - test_sync_multiple_version_concurrent_with_optimistic_freelist, - test_sync_multiple_version_concurrent_with_pessimistic_freelist, + all_skl_tests, + test_dynamic_unsync_map, + test_dynamic_unsync_versioned, + test_dynamic_sync_map, + test_dynamic_sync_versioned, + test_dynamic_sync_map_concurrent, + test_dynamic_sync_multiple_version_concurrent, + test_dynamic_sync_map_concurrent_with_optimistic_freelist, + test_dynamic_sync_multiple_version_concurrent_with_optimistic_freelist, + test_dynamic_sync_map_concurrent_with_pessimistic_freelist, + test_dynamic_sync_multiple_version_concurrent_with_pessimistic_freelist, ))] -pub(crate) mod multiple_version; +pub mod dynamic; + +pub(crate) const KB: usize = 1 << 10; +const ARENA_SIZE: usize = 1 << 20; /// Only used for testing pub fn key(i: usize) -> std::vec::Vec { @@ -122,7 +95,7 @@ macro_rules! __unit_test_expand { fn [< test_ $name >]() { $fn::$name( $opts - .alloc::<[u8], [u8], $ty>() + .alloc::<$ty>() .unwrap(), ); } @@ -133,7 +106,7 @@ macro_rules! __unit_test_expand { $fn::$name( $opts .with_unify(true) - .alloc::<[u8], [u8], $ty>() + .alloc::<$ty>() .unwrap(), ); } @@ -154,7 +127,7 @@ macro_rules! __unit_test_expand { .with_create_new(true) .with_read(true) .with_write(true) - .map_mut::<[u8], [u8], $ty, _>(p) + .map_mut::<$ty, _>(p) .unwrap(), ); } @@ -166,7 +139,7 @@ macro_rules! __unit_test_expand { fn [< test_ $name _map_anon >] () { $fn::$name( $opts - .map_anon::<[u8], [u8], $ty>() + .map_anon::<$ty>() .unwrap(), ); } @@ -178,7 +151,7 @@ macro_rules! __unit_test_expand { $fn::$name( $opts .with_unify(true) - .map_anon::<[u8], [u8], $ty>() + .map_anon::<$ty>() .unwrap(), ); } diff --git a/src/tests/dynamic.rs b/src/tests/dynamic.rs new file mode 100644 index 0000000..1b96f6c --- /dev/null +++ b/src/tests/dynamic.rs @@ -0,0 +1,62 @@ +#[cfg(any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_unsync_map, + test_dynamic_sync_map, + test_dynamic_sync_map_concurrent, + test_dynamic_sync_map_concurrent_with_optimistic_freelist, + test_dynamic_sync_map_concurrent_with_pessimistic_freelist, +))] +pub(crate) mod map; + +#[cfg(any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_unsync_versioned, + test_dynamic_sync_versioned, + test_dynamic_sync_multiple_version_concurrent, + test_dynamic_sync_multiple_version_concurrent_with_optimistic_freelist, + test_dynamic_sync_multiple_version_concurrent_with_pessimistic_freelist, +))] +pub(crate) mod multiple_version; + +use crate::dynamic::Builder; + +use super::*; + +pub(crate) const TEST_OPTIONS: Builder = Builder::new().with_capacity(ARENA_SIZE as u32); +pub(crate) const TEST_FULL_OPTIONS: Builder = Builder::new().with_capacity(1024); +pub(crate) const TEST_OPTIONS_WITH_OPTIMISTIC_FREELIST: Builder = Builder::new() + .with_capacity(ARENA_SIZE as u32) + .with_freelist(rarena_allocator::Freelist::Optimistic); +pub(crate) const TEST_OPTIONS_WITH_PESSIMISTIC_FREELIST: Builder = Builder::new() + .with_capacity(ARENA_SIZE as u32) + .with_freelist(rarena_allocator::Freelist::Pessimistic); +// pub(crate) const TEST_HIGH_COMPRESSION_OPTIONS: Options = Options::new() +// .with_capacity(ARENA_SIZE as u32) +// .with_compression_policy(crate::CompressionPolicy::High); +#[cfg(all( + all(feature = "std", not(miri)), + any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_sync_full, + test_dynamic_sync_map, + test_dynamic_sync_trailed, + test_dynamic_sync_versioned, + ) +))] +const BIG_ARENA_SIZE: usize = 120 << 20; + +#[cfg(all( + all(feature = "std", not(miri)), + any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_sync_full, + test_dynamic_sync_map, + test_dynamic_sync_trailed, + test_dynamic_sync_versioned, + ) +))] +pub(crate) const BIG_TEST_OPTIONS: Builder = Builder::new().with_capacity(BIG_ARENA_SIZE as u32); diff --git a/src/tests/dynamic/map.rs b/src/tests/dynamic/map.rs new file mode 100644 index 0000000..725d00f --- /dev/null +++ b/src/tests/dynamic/map.rs @@ -0,0 +1,1686 @@ +#![allow(dead_code)] + +use core::ops::Bound; + +use crate::{ + allocator::Sealed, + error::{ArenaError, Error}, +}; + +use core::sync::atomic::Ordering; + +use dbutils::{buffer::VacantBuffer, equivalentor::Ascend}; + +use crate::{allocator::WithoutVersion, dynamic::unique::Map, KeyBuilder, ValueBuilder}; + +use super::*; + +pub(crate) fn empty(l: M) +where + M: Map, + ::Node: WithoutVersion, +{ + let mut it = l.iter(); + + assert!(it.seek_lower_bound::<[u8]>(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound::<[u8]>(Bound::Unbounded).is_none()); + assert!(it.seek_lower_bound(Bound::Included(b"aaa")).is_none()); + assert!(it.seek_upper_bound(Bound::Excluded(b"aaa")).is_none()); + assert!(it.seek_lower_bound(Bound::Excluded(b"aaa")).is_none()); + assert!(it.seek_upper_bound(Bound::Included(b"aaa")).is_none()); + assert!(l.first().is_none()); + assert!(l.last().is_none()); + + assert!(l.get(b"aaa".as_slice()).is_none()); + assert!(!l.contains_key(b"aaa".as_slice())); + assert!(l.allocated() > 0); + assert!(l.capacity() > 0); + assert_eq!(l.remaining(), l.capacity() - l.allocated()); +} + +pub(crate) fn full(l: M) +where + M: Map, + ::Node: WithoutVersion, +{ + let mut found_arena_full = false; + + for i in 0..100 { + if let Err(e) = l.get_or_insert(make_int_key(i).as_slice(), make_value(i).as_slice()) { + assert!(matches!( + e, + Error::Arena(ArenaError::InsufficientSpace { .. }) + )); + found_arena_full = true; + break; + } + } + + assert!(found_arena_full); +} + +pub(crate) fn basic(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + // Try adding values. + l.get_or_insert(b"key1".as_slice(), make_value(1).as_slice()) + .unwrap(); + l.get_or_insert(b"key3".as_slice(), make_value(3).as_slice()) + .unwrap(); + l.get_or_insert(b"key2".as_slice(), make_value(2).as_slice()) + .unwrap(); + + { + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"key1")).unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), make_value(1).as_slice()); + + let ent = it.seek_lower_bound(Bound::Included(b"key2")).unwrap(); + assert_eq!(ent.key(), b"key2"); + assert_eq!(ent.value(), make_value(2).as_slice()); + + let ent = it.seek_lower_bound(Bound::Included(b"key3")).unwrap(); + assert_eq!(ent.key(), b"key3"); + assert_eq!(ent.value(), make_value(3).as_slice()); + } + + l.get_or_insert("a".as_bytes(), [].as_slice()).unwrap(); + l.get_or_insert("a".as_bytes(), [].as_slice()).unwrap(); + + { + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), &[]); + + let ent = it.next().unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), make_value(1).as_slice()); + } + + l.get_or_insert("b".as_bytes(), [].as_slice()).unwrap(); + l.get_or_insert("b".as_bytes(), [].as_slice()).unwrap(); + + { + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"b"); + assert_eq!(ent.value(), &[]); + + let ent = it.next().unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), make_value(1).as_slice()); + + let ent = it.head().unwrap(); + assert_eq!(ent.key(), b"key1"); + assert_eq!(ent.value(), make_value(1).as_slice()); + } + + l.get_or_insert(b"b".as_slice(), [].as_slice()) + .unwrap() + .unwrap(); + + assert!(l + .get_or_insert(b"c".as_slice(), [].as_slice()) + .unwrap() + .is_none()); + + #[cfg(feature = "memmap")] + l.flush().unwrap(); + + #[cfg(feature = "memmap")] + l.flush_async().unwrap(); +} + +pub(crate) fn get(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + l.get_or_insert(b"a".as_slice(), b"a1".as_slice()).unwrap(); + l.get_or_insert(b"a".as_slice(), b"a2".as_slice()).unwrap(); + l.get_or_insert(b"c".as_slice(), b"c1".as_slice()).unwrap(); + l.get_or_insert(b"c".as_slice(), b"c2".as_slice()).unwrap(); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + l.insert(b"a".as_slice(), b"a2".as_slice()).unwrap(); + + let ent = l.get(b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + assert!(l.get(b"b").is_none()); + + let ent = l.get(b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.get(b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + assert!(l.get(b"d").is_none()); +} + +pub(crate) fn gt(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + l.get_or_insert(b"a".as_slice(), b"a1".as_slice()).unwrap(); + l.get_or_insert(b"a".as_slice(), b"a2".as_slice()).unwrap(); + l.get_or_insert(b"c".as_slice(), b"c1".as_slice()).unwrap(); + l.get_or_insert(b"c".as_slice(), b"c2".as_slice()).unwrap(); + l.get_or_insert(b"c".as_slice(), b"c3".as_slice()).unwrap(); + + assert!(l.lower_bound(Bound::Excluded(b"a")).is_some()); + assert!(l.lower_bound(Bound::Excluded(b"b")).is_some()); + assert!(l.lower_bound(Bound::Excluded(b"c")).is_none()); + + let ent = l.lower_bound(Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.lower_bound(Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.lower_bound(Bound::Excluded(b"a")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + assert!(l.lower_bound(Bound::Excluded(b"c")).is_none()); +} + +pub(crate) fn ge(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + l.get_or_insert(b"a".as_slice(), b"a1".as_slice()).unwrap(); + l.get_or_insert(b"a".as_slice(), b"a2".as_slice()).unwrap(); + l.get_or_insert(b"c".as_slice(), b"c1".as_slice()).unwrap(); + l.get_or_insert(b"c".as_slice(), b"c2".as_slice()).unwrap(); + + assert!(l.lower_bound(Bound::Included(b"a")).is_some()); + assert!(l.lower_bound(Bound::Included(b"b")).is_some()); + assert!(l.lower_bound(Bound::Included(b"c")).is_some()); + + let ent = l.lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + l.insert(b"a".as_slice(), b"a2".as_slice()).unwrap(); + + let ent = l.lower_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a2"); + + let ent = l.lower_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.lower_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + assert!(l.lower_bound(Bound::Included(b"d")).is_none()); +} + +pub(crate) fn le(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + l.get_or_insert(b"a".as_slice(), b"a1".as_slice()).unwrap(); + l.get_or_insert(b"a".as_slice(), b"a2".as_slice()).unwrap(); + l.get_or_insert(b"c".as_slice(), b"c1".as_slice()).unwrap(); + l.get_or_insert(b"c".as_slice(), b"c2".as_slice()).unwrap(); + + assert!(l.upper_bound(Bound::Included(b"a")).is_some()); + assert!(l.upper_bound(Bound::Included(b"b")).is_some()); + assert!(l.upper_bound(Bound::Included(b"c")).is_some()); + + let ent = l.upper_bound(Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); + + let ent = l.upper_bound(Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); +} + +pub(crate) fn lt(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + l.get_or_insert(b"a".as_slice(), b"a1".as_slice()).unwrap(); + l.get_or_insert(b"a".as_slice(), b"a2".as_slice()).unwrap(); + l.get_or_insert(b"c".as_slice(), b"c1".as_slice()).unwrap(); + l.get_or_insert(b"c".as_slice(), b"c2".as_slice()).unwrap(); + + assert!(l.upper_bound(Bound::Excluded(b"a")).is_none()); + assert!(l.upper_bound(Bound::Excluded(b"b")).is_some()); + assert!(l.upper_bound(Bound::Excluded(b"c")).is_some()); + + let ent = l.upper_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.value(), b"a1"); + + let ent = l.upper_bound(Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.value(), b"c1"); +} + +#[cfg(not(miri))] +pub(crate) fn basic_large(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + let n = 1000; + + for i in 0..n { + l.get_or_insert(key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + } + + for i in 0..n { + let k = key(i); + let ent = l.get(k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + + assert_eq!(ent.key(), k.as_slice()); + } + + assert_eq!(n, l.len()); +} + +#[cfg(all( + feature = "std", + any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_sync_map_concurrent, + test_dynamic_sync_map_concurrent_with_optimistic_freelist, + test_dynamic_sync_map_concurrent_with_pessimistic_freelist, + ) +))] +pub(crate) fn concurrent_basic_two_maps(l: M) +where + M: Map + Clone + Send + 'static, + ::Node: WithoutVersion, +{ + #[cfg(not(miri))] + const N: usize = 1000; + #[cfg(miri)] + const N: usize = 200; + + let l2 = M::create_from_allocator(l.allocator().clone(), Ascend).unwrap(); + + for i in (0..N / 2).rev() { + let l = l.clone(); + let l2 = l2.clone(); + std::thread::spawn(move || { + l.get_or_insert(key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + }); + std::thread::spawn(move || { + l2.get_or_insert(key(i + N / 2).as_slice(), new_value(i + N / 2).as_slice()) + .unwrap(); + }); + } + while l.refs() > 2 { + ::core::hint::spin_loop(); + } + for i in 0..N / 2 { + let l = l.clone(); + let l2 = l2.clone(); + std::thread::spawn(move || { + let k = key(i); + assert_eq!( + l.get(k.as_slice()).unwrap().value(), + new_value(i).as_slice(), + "broken: {i}" + ); + }); + std::thread::spawn(move || { + let k = key(i + N / 2); + assert_eq!( + l2.get(k.as_slice()).unwrap().value(), + new_value(i + N / 2).as_slice(), + "broken: {i}" + ); + }); + } + while l.refs() > 2 { + ::core::hint::spin_loop(); + } +} + +#[cfg(all( + feature = "std", + any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_sync_map_concurrent, + test_dynamic_sync_map_concurrent_with_optimistic_freelist, + test_dynamic_sync_map_concurrent_with_pessimistic_freelist, + ) +))] +pub(crate) fn concurrent_basic(l: M) +where + M: Map + Clone + Send + 'static, + ::Node: WithoutVersion, +{ + #[cfg(not(miri))] + const N: usize = 1000; + #[cfg(miri)] + const N: usize = 200; + + for i in (0..N).rev() { + let l = l.clone(); + std::thread::spawn(move || { + l.get_or_insert(key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + }); + } + while l.refs() > 1 { + ::core::hint::spin_loop(); + } + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + let k = key(i); + assert_eq!( + l.get(k.as_slice()).unwrap().value(), + new_value(i).as_slice(), + "broken: {i}" + ); + }); + } + while l.refs() > 1 { + ::core::hint::spin_loop(); + } +} + +#[cfg(all( + feature = "std", + any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_sync_map_concurrent, + test_dynamic_sync_map_concurrent_with_optimistic_freelist, + test_dynamic_sync_map_concurrent_with_pessimistic_freelist + ) +))] +pub(crate) fn concurrent_basic2(l: M) +where + M: Map + Clone + Send + 'static, + ::Node: WithoutVersion, +{ + #[cfg(not(miri))] + const N: usize = 1000; + #[cfg(miri)] + const N: usize = 200; + + for i in 0..N { + let l1 = l.clone(); + let l2 = l.clone(); + std::thread::Builder::new() + .name(std::format!("map-concurrent-basic2-writer-{i}-1")) + .spawn(move || { + let _ = l1.insert(int_key(i).as_slice(), new_value(i).as_slice()); + }) + .unwrap(); + + std::thread::Builder::new() + .name(std::format!("map-concurrent-basic2-writer{i}-2")) + .spawn(move || { + let _ = l2.insert(int_key(i).as_slice(), new_value(i).as_slice()); + }) + .unwrap(); + } + while l.refs() > 1 { + ::core::hint::spin_loop(); + } + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + let k = int_key(i); + assert_eq!( + l.get(k.as_slice()).unwrap().value(), + new_value(i).as_slice(), + "broken: {i}" + ); + }); + } + while l.refs() > 1 { + ::core::hint::spin_loop(); + } +} + +#[cfg(all( + all(feature = "std", not(miri)), + any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_sync_map_concurrent, + test_dynamic_sync_map_concurrent_with_optimistic_freelist, + test_dynamic_sync_map_concurrent_with_pessimistic_freelist + ) +))] +pub(crate) fn concurrent_basic_big_values(l: M) +where + M: Map + Clone + Send + 'static, + ::Node: WithoutVersion, +{ + #[cfg(not(miri))] + const N: usize = 1000; + #[cfg(miri)] + const N: usize = 200; + + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + l.get_or_insert(key(i).as_slice(), big_value(i).as_slice()) + .unwrap(); + }); + } + while l.refs() > 1 { + ::core::hint::spin_loop(); + } + // assert_eq!(N, l.len()); + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + let k = key(i); + assert_eq!( + l.get(k.as_slice()).unwrap().value(), + big_value(i).as_slice(), + "broken: {i}" + ); + }); + } + while l.refs() > 1 { + ::core::hint::spin_loop(); + } +} + +#[cfg(all( + feature = "std", + any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_sync_map_concurrent, + test_dynamic_sync_map_concurrent_with_optimistic_freelist, + test_dynamic_sync_map_concurrent_with_pessimistic_freelist + ) +))] +pub(crate) fn concurrent_one_key(l: M) +where + M: Map + Clone + Send + 'static, + ::Node: WithoutVersion, +{ + use std::sync::Arc; + + #[cfg(not(miri))] + const N: usize = 1000; + #[cfg(miri)] + const N: usize = 200; + + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + let _ = l.get_or_insert(b"thekey".as_slice(), make_value(i).as_slice()); + }); + } + + while l.refs() > 1 { + ::core::hint::spin_loop(); + } + + let saw_value = Arc::new(crate::common::AtomicU32::new(0)); + for _ in 0..N { + let l = l.clone(); + let saw_value = saw_value.clone(); + std::thread::spawn(move || { + let ent = l.get(b"thekey").unwrap(); + let val = ent.value(); + let num: usize = core::str::from_utf8(&val[1..]).unwrap().parse().unwrap(); + assert!((0..N).contains(&num)); + + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"thekey")).unwrap(); + let val = ent.value(); + let num: usize = core::str::from_utf8(&val[1..]).unwrap().parse().unwrap(); + assert!((0..N).contains(&num)); + assert_eq!(ent.key(), b"thekey"); + saw_value.fetch_add(1, Ordering::SeqCst); + }); + } + + while l.refs() > 1 { + ::core::hint::spin_loop(); + } + + assert_eq!(N, saw_value.load(Ordering::SeqCst) as usize); + assert_eq!(l.len(), 1); +} + +#[cfg(all( + feature = "std", + any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_sync_map_concurrent, + test_dynamic_sync_map_concurrent_with_optimistic_freelist, + test_dynamic_sync_map_concurrent_with_pessimistic_freelist + ) +))] +pub(crate) fn concurrent_one_key2(l: M) +where + M: Map + Clone + Send + 'static, + ::Node: WithoutVersion, +{ + use std::sync::Arc; + + #[cfg(not(miri))] + const N: usize = 100; + #[cfg(miri)] + const N: usize = 20; + + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + let _ = l.insert(b"thekey".as_slice(), make_value(i).as_slice()); + }); + } + + while l.refs() > 1 { + ::core::hint::spin_loop(); + } + + let saw_value = Arc::new(crate::common::AtomicU32::new(0)); + for _ in 0..N { + let l = l.clone(); + let saw_value = saw_value.clone(); + std::thread::spawn(move || { + let ent = l.get(b"thekey").unwrap(); + let val = ent.value(); + let num: usize = core::str::from_utf8(&val[1..]).unwrap().parse().unwrap(); + assert!((0..N).contains(&num)); + + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"thekey")).unwrap(); + let val = ent.value(); + let num: usize = core::str::from_utf8(&val[1..]).unwrap().parse().unwrap(); + assert!((0..N).contains(&num)); + assert_eq!(ent.key(), b"thekey"); + saw_value.fetch_add(1, Ordering::SeqCst); + }); + } + + while l.refs() > 1 { + ::core::hint::spin_loop(); + } + + assert_eq!(N, saw_value.load(Ordering::SeqCst) as usize); + assert_eq!(l.len(), 1); +} + +pub(crate) fn iter_all_versions_next(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + const N: usize = 100; + + for i in (0..N).rev() { + l.get_or_insert(make_int_key(i).as_slice(), make_value(i).as_slice()) + .unwrap(); + } + + let mut it = l.iter(); + let mut ent = it.seek_lower_bound::<[u8]>(Bound::Unbounded).unwrap(); + for i in 0..N { + assert_eq!(ent.key(), make_int_key(i).as_slice()); + assert_eq!(ent.value(), make_value(i).as_slice()); + if i != N - 1 { + ent = it.next().unwrap(); + } + } + + assert!(it.next().is_none()); +} + +pub(crate) fn range_next(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + const N: usize = 100; + + for i in (0..N).rev() { + l.get_or_insert(make_int_key(i).as_slice(), make_value(i).as_slice()) + .unwrap(); + } + + let upper = make_int_key(50); + let mut i = 0; + let mut it = l.range(..=upper.as_slice()); + for ent in &mut it { + assert_eq!(ent.key(), make_int_key(i).as_slice()); + assert_eq!(ent.value(), make_value(i).as_slice()); + i += 1; + } + + assert_eq!(i, 51); +} + +pub(crate) fn iter_all_versions_prev(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(make_int_key(i).as_slice(), make_value(i).as_slice()) + .unwrap(); + } + + let mut it = l.iter(); + let mut ent = it.seek_upper_bound::<[u8]>(Bound::Unbounded).unwrap(); + for i in (0..N).rev() { + assert_eq!(ent.key(), make_int_key(i).as_slice()); + assert_eq!(ent.value(), make_value(i).as_slice()); + if i != 0 { + ent = it.next_back().unwrap(); + } + } + + assert!(it.next_back().is_none()); +} + +pub(crate) fn range_prev(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(make_int_key(i).as_slice(), make_value(i).as_slice()) + .unwrap(); + } + + let lower = make_int_key(50); + let it = l.range(lower.as_slice()..); + let mut i = 99; + for ent in it.rev() { + assert_eq!(ent.key(), make_int_key(i).as_slice()); + assert_eq!(ent.value(), make_value(i).as_slice()); + i -= 1; + } +} + +pub(crate) fn iter_all_versions_seek_ge(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + const N: usize = 100; + + for i in (0..N).rev() { + let v = i * 10 + 1000; + l.get_or_insert(make_int_key(v).as_slice(), make_value(v).as_slice()) + .unwrap(); + } + + let mut it = l.iter(); + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000).as_slice()); + assert_eq!(ent.value(), make_value(1000).as_slice()); + + let ent = it.seek_lower_bound(Bound::Included(b"01000")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000).as_slice()); + assert_eq!(ent.value(), make_value(1000).as_slice()); + + let ent = it.seek_lower_bound(Bound::Included(b"01005")).unwrap(); + assert_eq!(ent.key(), make_int_key(1010).as_slice()); + assert_eq!(ent.value(), make_value(1010).as_slice()); + + let ent = it.seek_lower_bound(Bound::Included(b"01010")).unwrap(); + assert_eq!(ent.key(), make_int_key(1010).as_slice()); + assert_eq!(ent.value(), make_value(1010).as_slice()); + + let ent = it.seek_lower_bound(Bound::Included(b"01020")).unwrap(); + assert_eq!(ent.key(), make_int_key(1020).as_slice()); + assert_eq!(ent.value(), make_value(1020).as_slice()); + + let ent = it.seek_lower_bound(Bound::Included(b"01200")).unwrap(); + assert_eq!(ent.key(), make_int_key(1200).as_slice()); + assert_eq!(ent.value(), make_value(1200).as_slice()); + + let ent = it.seek_lower_bound(Bound::Included(b"01100")).unwrap(); + assert_eq!(ent.key(), make_int_key(1100).as_slice()); + assert_eq!(ent.value(), make_value(1100).as_slice()); + + let ent = it.seek_lower_bound(Bound::Included(b"99999")); + assert!(ent.is_none()); + + l.get_or_insert([].as_slice(), [].as_slice()).unwrap(); + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value(), &[]); + + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value(), &[]); +} + +pub(crate) fn iter_all_versions_seek_lt(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + const N: usize = 100; + + for i in (0..N).rev() { + let v = i * 10 + 1000; + l.get_or_insert(make_int_key(v).as_slice(), make_value(v).as_slice()) + .unwrap(); + } + + let mut it = l.iter(); + assert!(it.seek_upper_bound(Bound::Excluded(b"")).is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01000")); + assert!(ent.is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01001")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000).as_slice()); + assert_eq!(ent.value(), make_value(1000).as_slice()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01991")).unwrap(); + assert_eq!(ent.key(), make_int_key(1990).as_slice()); + assert_eq!(ent.value(), make_value(1990).as_slice()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"99999")).unwrap(); + assert_eq!(ent.key(), make_int_key(1990).as_slice()); + assert_eq!(ent.value(), make_value(1990).as_slice()); + + l.get_or_insert([].as_slice(), [].as_slice()).unwrap(); + + let ent = it.seek_upper_bound(Bound::Excluded(b"")); + assert!(ent.is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"\x01")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value(), &[]); +} + +pub(crate) fn range(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + for i in 1..10 { + l.get_or_insert(make_int_key(i).as_slice(), make_value(i).as_slice()) + .unwrap(); + } + + let k3 = make_int_key(3); + let k7 = make_int_key(7); + let mut it = l.range(k3.as_slice()..k7.as_slice()).clone(); + assert_eq!(it.start_bound(), Bound::Included(&k3.as_slice())); + assert_eq!(it.end_bound(), Bound::Excluded(&k7.as_slice())); + + for i in 3..=6 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Included(k.as_slice())).unwrap(); + assert_eq!(ent.key(), make_int_key(i).as_slice()); + assert_eq!(ent.value(), make_value(i).as_slice()); + } + + for i in 1..3 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Included(k.as_slice())).unwrap(); + assert_eq!(ent.key(), make_int_key(3).as_slice()); + assert_eq!(ent.value(), make_value(3).as_slice()); + } + + for i in 7..10 { + let k = make_int_key(i); + assert!(it.seek_lower_bound(Bound::Included(k.as_slice())).is_none()); + } + + for i in 7..10 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Included(k.as_slice())).unwrap(); + assert_eq!(ent.key(), make_int_key(6).as_slice()); + assert_eq!(ent.value(), make_value(6).as_slice()); + } + + let ent = it + .seek_lower_bound(Bound::Included(make_int_key(6).as_slice())) + .unwrap(); + assert_eq!(ent.key(), make_int_key(6).as_slice()); + assert_eq!(ent.value(), make_value(6).as_slice()); + + assert!(it.next().is_none()); + + let ent = it + .seek_upper_bound(Bound::Included(make_int_key(6).as_slice())) + .unwrap(); + assert_eq!(ent.key(), make_int_key(6).as_slice()); + assert_eq!(ent.value(), make_value(6).as_slice()); + + assert!(it.next().is_none()); + + for i in 4..=7 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Excluded(k.as_slice())).unwrap(); + assert_eq!(ent.key(), make_int_key(i - 1).as_slice()); + assert_eq!(ent.value(), make_value(i - 1).as_slice()); + } + + for i in 7..10 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Excluded(k.as_slice())).unwrap(); + assert_eq!(ent.key(), make_int_key(6).as_slice()); + assert_eq!(ent.value(), make_value(6).as_slice()); + } + + for i in 1..3 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Excluded(k.as_slice())).unwrap(); + assert_eq!(ent.key(), make_int_key(3).as_slice()); + assert_eq!(ent.value(), make_value(3).as_slice()); + } + + for i in 1..4 { + let k = make_int_key(i); + assert!(it.seek_upper_bound(Bound::Excluded(k.as_slice())).is_none()); + } + + let ent = it + .seek_upper_bound(Bound::Excluded(make_int_key(4).as_slice())) + .unwrap(); + assert_eq!(ent.key(), make_int_key(3).as_slice()); + assert_eq!(ent.value(), make_value(3).as_slice()); + + let ent = it.next_back().unwrap(); + assert_eq!(ent.key(), make_int_key(6).as_slice()); + assert_eq!(ent.value(), make_value(6).as_slice()); +} + +pub(crate) fn iter_latest(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(make_int_key(i).as_slice(), make_value(i).as_slice()) + .unwrap(); + } + + for i in 50..N { + l.insert(make_int_key(i).as_slice(), make_value(i + 1000).as_slice()) + .unwrap(); + } + + for i in 0..50 { + l.insert(make_int_key(i).as_slice(), make_value(i + 1000).as_slice()) + .unwrap(); + } + + let mut it = l.iter(); + let mut num = 0; + for i in 0..N { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), make_int_key(i).as_slice()); + assert_eq!(ent.value(), make_value(i + 1000).as_slice()); + + num += 1; + } + assert_eq!(num, N); +} + +pub(crate) fn range_latest(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + const N: usize = 100; + + for i in 0..N { + l.get_or_insert(make_int_key(i).as_slice(), make_value(i).as_slice()) + .unwrap(); + } + + for i in 50..N { + l.insert(make_int_key(i).as_slice(), make_value(i + 1000).as_slice()) + .unwrap(); + } + + for i in 0..50 { + l.insert(make_int_key(i).as_slice(), make_value(i + 1000).as_slice()) + .unwrap(); + } + + let mut it = l.range::<[u8], _>(..); + let mut num = 0; + for i in 0..N { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), make_int_key(i).as_slice()); + assert_eq!(ent.value(), make_value(i + 1000).as_slice()); + + num += 1; + } + assert_eq!(num, N); +} + +#[cfg(feature = "memmap")] +pub(crate) fn reopen_mmap(prefix: &str) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + use crate::dynamic::Builder; + + unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join(std::format!("{prefix}_reopen_skipmap")); + let _ = std::fs::remove_file(&p); + { + let l = Builder::new() + .with_create_new(true) + .with_read(true) + .with_write(true) + .with_capacity(ARENA_SIZE as u32) + .map_mut::(&p) + .unwrap(); + for i in 0..1000 { + l.get_or_insert(key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + } + l.flush().unwrap(); + } + + let l = Builder::new() + .with_read(true) + .with_write(true) + .with_capacity(ARENA_SIZE as u32) + .map::(&p) + .unwrap(); + assert_eq!(1000, l.len()); + for i in 0..1000 { + let k = key(i); + let ent = l.get(k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + assert_eq!(ent.key(), k.as_slice()); + } + } +} + +#[cfg(feature = "memmap")] +pub(crate) fn reopen_mmap2(prefix: &str) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + use crate::dynamic::Builder; + + unsafe { + use rand::seq::SliceRandom; + + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join(::std::format!("{prefix}_reopen2_skipmap")); + { + let l = Builder::new() + .with_create_new(true) + .with_read(true) + .with_write(true) + .with_capacity(ARENA_SIZE as u32) + .map_mut::(&p) + .unwrap(); + let mut data = (0..1000).collect::<::std::vec::Vec>(); + data.shuffle(&mut rand::thread_rng()); + for i in &data { + let i = *i; + l.get_or_insert(key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + } + l.flush_async().unwrap(); + + for i in data { + let k = key(i); + let ent = l.get(k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + assert_eq!(ent.key(), k.as_slice()); + } + } + + let l = Builder::new() + .with_read(true) + .with_write(true) + .with_capacity(ARENA_SIZE as u32) + .map::(&p) + .unwrap(); + assert_eq!(1000, l.len()); + let mut data = (0..1000).collect::<::std::vec::Vec>(); + data.shuffle(&mut rand::thread_rng()); + for i in data { + let k = key(i); + let ent = l.get(k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + assert_eq!(ent.key(), k.as_slice()); + } + } +} + +#[cfg(feature = "memmap")] +pub(crate) fn reopen_mmap3(prefix: &str) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + use crate::dynamic::Builder; + + unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join(std::format!("{prefix}_reopen3_skipmap")); + { + let l = Builder::new() + .with_create_new(true) + .with_read(true) + .with_write(true) + .with_capacity(ARENA_SIZE as u32) + .map_mut::(&p) + .unwrap(); + for i in 0..1000 { + l.get_or_insert(key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + } + l.flush().unwrap(); + } + + let l = Builder::new() + .with_read(true) + .with_write(true) + .with_capacity((ARENA_SIZE * 2) as u32) + .map_mut::(&p) + .unwrap(); + assert_eq!(1000, l.len()); + for i in 0..1000 { + let k = key(i); + let ent = l.get(k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + assert_eq!(ent.key(), k.as_slice()); + } + } +} + +// reopen multiple skipmaps based on the same allocator +#[cfg(feature = "memmap")] +pub(crate) fn reopen_mmap4(prefix: &str) +where + M: Map + Clone + Send + Sync + 'static, + ::Node: WithoutVersion, +{ + use crate::dynamic::Builder; + + unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join(std::format!("{prefix}_reopen4_skipmap")); + let header = { + let l = Builder::new() + .with_create_new(true) + .with_read(true) + .with_write(true) + .with_capacity(ARENA_SIZE as u32) + .map_mut::(&p) + .unwrap(); + let l2 = M::create_from_allocator(l.allocator().clone(), Ascend).unwrap(); + let h2 = l2.header().copied().unwrap(); + let t1 = std::thread::spawn(move || { + for i in 0..500 { + l.get_or_insert(key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + } + l.flush().unwrap(); + }); + + let t2 = std::thread::spawn(move || { + for i in 500..1000 { + l2.get_or_insert(key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + } + l2.flush().unwrap(); + }); + + t1.join().unwrap(); + t2.join().unwrap(); + + h2 + }; + + let l = Builder::new() + .with_read(true) + .with_write(true) + .with_capacity((ARENA_SIZE * 2) as u32) + .map_mut::(&p) + .unwrap(); + let l2 = M::open_from_allocator(header, l.allocator().clone(), Ascend).unwrap(); + assert_eq!(500, l.len()); + assert_eq!(500, l2.len()); + + for i in 0..500 { + let k = key(i); + let ent = l.get(k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + assert_eq!(ent.key(), k.as_slice()); + } + + for i in 500..1000 { + let k = key(i); + let ent = l2.get(k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + assert_eq!(ent.key(), k.as_slice()); + } + } +} + +struct Person { + id: u32, + name: std::string::String, +} + +impl Person { + fn encoded_size(&self) -> usize { + 4 + self.name.len() + } +} + +pub(crate) fn get_or_insert_with_value(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size(); + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer<'_>| { + assert_eq!(val.capacity(), encoded_size); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size - 4); + assert_eq!(&*val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(&*val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" + ); + Ok(encoded_size) + }); + + l.get_or_insert_with_value_builder::<()>(b"alice".as_slice(), vb) + .unwrap(); +} + +pub(crate) fn get_or_insert_with(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size(); + + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer<'_>| { + key.put_slice(b"alice").unwrap(); + Ok(5) + }); + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer<'_>| { + assert_eq!(val.capacity(), encoded_size); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size - 4); + assert_eq!(&*val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(&*val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" + ); + Ok(encoded_size) + }); + + l.get_or_insert_with_builders::<(), ()>(kb, vb).unwrap(); +} + +pub(crate) fn insert(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + let k = 0u64.to_le_bytes(); + for i in 0..100 { + let v = new_value(i); + let old = l.insert(k.as_slice(), v.as_slice()).unwrap(); + if let Some(old) = old { + assert_eq!(old.key(), k.as_slice()); + assert_eq!(old.value(), new_value(i - 1).as_slice()); + } + } + + let ent = l.get(k.as_slice()).unwrap(); + assert_eq!(ent.key(), k.as_slice()); + assert_eq!(ent.value(), new_value(99).as_slice()); +} + +pub(crate) fn insert_with_value(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size(); + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer<'_>| { + assert_eq!(val.capacity(), encoded_size); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size - 4); + assert_eq!(val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" + ); + Ok(encoded_size) + }); + + l.insert_with_value_builder::<()>(b"alice".as_slice(), vb) + .unwrap(); + + let alice2 = Person { + id: 2, + name: std::string::String::from("Alice"), + }; + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer<'_>| { + assert_eq!(val.capacity(), encoded_size); + assert!(val.is_empty()); + val.put_u32_le(alice2.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" + ); + Ok(encoded_size) + }); + + let old = l + .insert_with_value_builder::<()>(b"alice".as_slice(), vb) + .unwrap() + .unwrap(); + + assert_eq!(old.key(), b"alice"); + assert!(old.value().starts_with(&alice.id.to_be_bytes())); + + let ent = l.get(b"alice").unwrap(); + assert_eq!(ent.key(), b"alice"); + assert!(ent.value().starts_with(&alice2.id.to_be_bytes())); +} + +pub(crate) fn insert_with(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size(); + + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer<'_>| { + key.put_slice(b"alice").unwrap(); + Ok(5) + }); + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer<'_>| { + assert_eq!(val.capacity(), encoded_size); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size - 4); + assert_eq!(val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" + ); + Ok(encoded_size) + }); + + l.insert_with_builders::<(), ()>(kb, vb).unwrap(); + + let alice2 = Person { + id: 2, + name: std::string::String::from("Alice"), + }; + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer<'_>| { + assert_eq!(val.capacity(), encoded_size); + assert!(val.is_empty()); + val.put_u32_le(alice2.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" + ); + Ok(encoded_size) + }); + let old = l.insert_with_builders::<(), ()>(kb, vb).unwrap().unwrap(); + + assert_eq!(old.key(), b"alice"); + assert!(old.value().starts_with(&alice.id.to_be_bytes())); + + let ent = l.get(b"alice").unwrap(); + assert_eq!(ent.key(), b"alice"); + assert!(ent.value().starts_with(&alice2.id.to_be_bytes())); +} + +pub(crate) fn get_or_remove(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + for i in 0..100 { + let v = new_value(i); + l.insert(key(i).as_slice(), v.as_slice()).unwrap(); + } + + for i in 0..100 { + let k = key(i); + let old = l.get_or_remove(k.as_slice()).unwrap().unwrap(); + assert_eq!(old.key(), k.as_slice()); + assert_eq!(old.value(), new_value(i).as_slice()); + + let old = l.get_or_remove(k.as_slice()).unwrap().unwrap(); + assert_eq!(old.key(), k.as_slice()); + assert_eq!(old.value(), new_value(i).as_slice()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(k.as_slice()).unwrap(); + assert_eq!(ent.key(), k.as_slice()); + assert_eq!(ent.value(), new_value(i).as_slice()); + } +} + +pub(crate) fn remove(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + for i in 0..100 { + let v = new_value(i); + l.insert(key(i).as_slice(), v.as_slice()).unwrap(); + } + + for i in 0..100 { + let k = key(i); + // no race, remove should succeed + let old = l.remove(k.as_slice()).unwrap(); + assert!(old.is_none()); + + // key already removed + let old = l.remove(k.as_slice()).unwrap(); + assert!(old.is_none()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(k.as_slice()); + assert!(ent.is_none()); + } +} + +pub(crate) fn remove2(l: M) +where + M: Map + Clone, + ::Node: WithoutVersion, +{ + for i in 0..100 { + let v = new_value(i); + l.insert(key(i).as_slice(), v.as_slice()).unwrap(); + } + + for i in 0..100 { + let k = key(i); + // not found, remove should succeed + let old = l.remove(k.as_slice()).unwrap(); + assert!(old.is_none()); + + // no-race, remove should succeed + let old = l.remove(k.as_slice()).unwrap(); + assert!(old.is_none()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(k.as_slice()); + assert!(ent.is_none()); + } +} + +#[macro_export] +#[doc(hidden)] +macro_rules! __dynamic_map_tests { + ($prefix:literal: $ty:ty) => { + $crate::__unit_tests!($crate::tests::dynamic::map |$prefix, $ty, $crate::tests::dynamic::TEST_OPTIONS| { + empty, + basic, + #[cfg(not(miri))] + basic_large, + get, + iter_all_versions_next, + range_next, + iter_all_versions_prev, + range_prev, + iter_all_versions_seek_ge, + iter_all_versions_seek_lt, + range, + iter_latest, + range_latest, + get_or_insert_with_value, + get_or_insert_with, + insert, + insert_with_value, + insert_with, + get_or_remove, + remove, + remove2, + gt, + ge, + lt, + le, + }); + + $crate::__unit_tests!($crate::tests::dynamic::map |$prefix, $ty, $crate::tests::dynamic::TEST_FULL_OPTIONS| { + full, + }); + + #[test] + #[cfg(feature = "memmap")] + #[cfg_attr(miri, ignore)] + #[allow(clippy::macro_metavars_in_unsafe)] + fn reopen() { + $crate::tests::dynamic::map::reopen_mmap::<$ty>($prefix); + } + + #[test] + #[cfg(feature = "memmap")] + #[cfg_attr(miri, ignore)] + #[allow(clippy::macro_metavars_in_unsafe)] + fn reopen2() { + $crate::tests::dynamic::map::reopen_mmap2::<$ty>($prefix); + } + + #[test] + #[cfg(feature = "memmap")] + #[cfg_attr(miri, ignore)] + #[allow(clippy::macro_metavars_in_unsafe)] + fn reopen3() { + $crate::tests::dynamic::map::reopen_mmap3::<$ty>($prefix); + } + }; + // Support from golang :) + (go $prefix:literal: $ty:ty => $opts:path) => { + $crate::__unit_tests!($crate::tests::dynamic::map |$prefix, $ty, $opts| { + #[cfg(feature = "std")] + concurrent_basic_two_maps, + #[cfg(feature = "std")] + concurrent_basic, + #[cfg(feature = "std")] + concurrent_basic2, + #[cfg(feature = "std")] + concurrent_one_key, + #[cfg(feature = "std")] + concurrent_one_key2, + }); + + #[test] + #[cfg(feature = "memmap")] + #[cfg_attr(miri, ignore)] + #[allow(clippy::macro_metavars_in_unsafe)] + fn reopen4() { + $crate::tests::dynamic::map::reopen_mmap4::<$ty>($prefix); + } + + // #[cfg(not(miri))] + // mod high_compression { + // use super::*; + + // __unit_tests!($crate::tests::map |$prefix, $ty, $crate::tests::TEST_HIGH_COMPRESSION_OPTIONS| { + // #[cfg(feature = "std")] + // concurrent_basic, + // #[cfg(feature = "std")] + // concurrent_basic2, + // #[cfg(feature = "std")] + // concurrent_one_key, + // #[cfg(feature = "std")] + // concurrent_one_key2, + // }); + // } + + $crate::__unit_tests!($crate::tests::dynamic::map |$prefix, $ty, $crate::tests::dynamic::BIG_TEST_OPTIONS| { + #[cfg(all(feature = "std", not(miri)))] + concurrent_basic_big_values, + }); + } +} diff --git a/src/tests/dynamic/multiple_version.rs b/src/tests/dynamic/multiple_version.rs new file mode 100644 index 0000000..8153849 --- /dev/null +++ b/src/tests/dynamic/multiple_version.rs @@ -0,0 +1,2363 @@ +#![allow(dead_code)] + +use core::ops::Bound; + +use crate::{ + allocator::Sealed, + error::{ArenaError, Error}, +}; + +use core::sync::atomic::Ordering; + +use dbutils::{buffer::VacantBuffer, equivalentor::Ascend}; + +use crate::{ + allocator::WithVersion, dynamic::multiple_version::Map, KeyBuilder, ValueBuilder, MIN_VERSION, +}; + +use super::*; + +pub(crate) fn empty(l: M) +where + M: Map, + ::Node: WithVersion, +{ + let mut it = l.iter(MIN_VERSION); + + assert!(it.seek_lower_bound::<[u8]>(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound::<[u8]>(Bound::Unbounded).is_none()); + assert!(it.seek_lower_bound(Bound::Included(b"aaa")).is_none()); + assert!(it.seek_upper_bound(Bound::Excluded(b"aaa")).is_none()); + assert!(it.seek_lower_bound(Bound::Excluded(b"aaa")).is_none()); + assert!(it.seek_upper_bound(Bound::Included(b"aaa")).is_none()); + assert!(l.first(MIN_VERSION,).is_none()); + assert!(l.last(MIN_VERSION,).is_none()); + + assert!(l.get(MIN_VERSION, b"aaa".as_slice()).is_none()); + assert!(!l.contains_key(MIN_VERSION, b"aaa".as_slice())); + assert!(l.allocated() > 0); + assert!(l.capacity() > 0); + assert_eq!(l.remaining(), l.capacity() - l.allocated()); +} + +pub(crate) fn full(l: M) +where + M: Map, + ::Node: WithVersion, +{ + let mut found_arena_full = false; + + for i in 0..100 { + if let Err(e) = l.get_or_insert( + MIN_VERSION, + make_int_key(i).as_slice(), + make_value(i).as_slice(), + ) { + assert!(matches!( + e, + Error::Arena(ArenaError::InsufficientSpace { .. }) + )); + found_arena_full = true; + break; + } + } + + assert!(found_arena_full); +} + +pub(crate) fn basic(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + // Try adding values. + l.get_or_insert(0, b"key1".as_slice(), make_value(1).as_slice()) + .unwrap(); + l.get_or_insert(0, b"key3".as_slice(), make_value(3).as_slice()) + .unwrap(); + l.get_or_insert(0, b"key2".as_slice(), make_value(2).as_slice()) + .unwrap(); + + { + let mut it = l.iter_all_versions(0); + let ent = it.seek_lower_bound(Bound::Included(b"key1")).unwrap(); + assert_eq!(ent.key(), b"key1".as_slice()); + assert_eq!(ent.value().unwrap(), make_value(1).as_slice()); + assert_eq!(ent.version(), 0); + + let ent = it + .seek_lower_bound(Bound::Included(b"key2".as_slice())) + .unwrap(); + assert_eq!(ent.key(), b"key2".as_slice()); + assert_eq!(ent.value().unwrap(), make_value(2).as_slice()); + assert_eq!(ent.version(), 0); + + let ent = it + .seek_lower_bound(Bound::Included(b"key3".as_slice())) + .unwrap(); + assert_eq!(ent.key(), b"key3".as_slice()); + assert_eq!(ent.value().unwrap(), make_value(3).as_slice()); + assert_eq!(ent.version(), 0); + } + + l.get_or_insert(1, "a".as_bytes(), [].as_slice()).unwrap(); + l.get_or_insert(2, "a".as_bytes(), [].as_slice()).unwrap(); + + { + let mut it = l.iter_all_versions(2); + let ent = it + .seek_lower_bound(Bound::Included(b"a".as_slice())) + .unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 2); + + let ent = it.next().unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 1); + } + + l.get_or_insert(2, "b".as_bytes(), [].as_slice()).unwrap(); + l.get_or_insert(1, "b".as_bytes(), [].as_slice()).unwrap(); + + { + let mut it = l.iter_all_versions(2); + let ent = it.seek_lower_bound(Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"b"); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 2); + + let ent = it.next().unwrap(); + assert_eq!(ent.key(), b"b"); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 1); + + let ent = it.head().unwrap(); + assert_eq!(ent.key(), b"b"); + assert_eq!(ent.value().unwrap(), &[]); + assert_eq!(ent.version(), 1); + } + + l.get_or_insert(2, b"b".as_slice(), [].as_slice()) + .unwrap() + .unwrap(); + + assert!(l + .get_or_insert(2, b"c".as_slice(), [].as_slice()) + .unwrap() + .is_none()); + + #[cfg(feature = "memmap")] + l.flush().unwrap(); + + #[cfg(feature = "memmap")] + l.flush_async().unwrap(); +} + +pub(crate) fn iter_all_versions_mvcc(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + l.get_or_insert(1, b"a".as_slice(), b"a1".as_slice()) + .unwrap(); + l.get_or_insert(3, b"a".as_slice(), b"a2".as_slice()) + .unwrap(); + l.get_or_insert(1, b"c".as_slice(), b"c1".as_slice()) + .unwrap(); + l.get_or_insert(3, b"c".as_slice(), b"c2".as_slice()) + .unwrap(); + + let mut it = l.iter_all_versions(0); + let mut num = 0; + while it.next().is_some() { + num += 1; + } + assert_eq!(num, 0); + + let mut it = l.iter_all_versions(1); + let mut num = 0; + while it.next().is_some() { + num += 1; + } + assert_eq!(num, 2); + + let mut it = l.iter_all_versions(2); + let mut num = 0; + while it.next().is_some() { + num += 1; + } + assert_eq!(num, 2); + + let mut it = l.iter_all_versions(3); + let mut num = 0; + while it.next().is_some() { + num += 1; + } + assert_eq!(num, 4); + + let mut it = l.iter_all_versions(0); + assert!(it.seek_lower_bound::<[u8]>(Bound::Unbounded).is_none()); + assert!(it.seek_upper_bound::<[u8]>(Bound::Unbounded).is_none()); + + let mut it = l.iter_all_versions(1); + let ent = it.seek_lower_bound::<[u8]>(Bound::Unbounded).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value().unwrap(), b"a1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = it.seek_upper_bound::<[u8]>(Bound::Unbounded).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value().unwrap(), b"c1".as_slice()); + assert_eq!(ent.version(), 1); + + let mut it = l.iter_all_versions(2); + let ent = it.seek_lower_bound::<[u8]>(Bound::Unbounded).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value().unwrap(), b"a1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = it.seek_upper_bound::<[u8]>(Bound::Unbounded).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value().unwrap(), b"c1".as_slice()); + assert_eq!(ent.version(), 1); + + let mut it = l.iter_all_versions(3); + + let ent = it.seek_upper_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value().unwrap(), b"a1".as_slice(),); + assert_eq!(ent.version(), 1); + + let ent = ent.prev().unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value().unwrap(), b"a2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = it + .seek_upper_bound(Bound::Included(b"c".as_slice())) + .unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value().unwrap(), b"c1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = ent.prev().unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value().unwrap(), b"c2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = it.seek_lower_bound(Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value().unwrap(), b"c2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = it + .seek_lower_bound(Bound::Included(b"c".as_slice())) + .unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value().unwrap(), b"c2".as_slice()); + assert_eq!(ent.version(), 3); +} + +pub(crate) fn get_mvcc(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + l.get_or_insert(1, b"a".as_slice(), b"a1".as_slice()) + .unwrap(); + l.get_or_insert(3, b"a".as_slice(), b"a2".as_slice()) + .unwrap(); + l.get_or_insert(1, b"c".as_slice(), b"c1".as_slice()) + .unwrap(); + l.get_or_insert(3, b"c".as_slice(), b"c2".as_slice()) + .unwrap(); + + let ent = l.get(1, b"a".as_slice()).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.get(2, b"a".as_slice()).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.get(3, b"a".as_slice()).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.get(4, b"a".as_slice()).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a2".as_slice()); + assert_eq!(ent.version(), 3); + + assert!(l.get(0, b"b").is_none()); + assert!(l.get(1, b"b").is_none()); + assert!(l.get(2, b"b").is_none()); + assert!(l.get(3, b"b").is_none()); + assert!(l.get(4, b"b").is_none()); + + let ent = l.get(1, b"c".as_slice()).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.get(2, b"c".as_slice()).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.get(3, b"c".as_slice()).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.get(4, b"c".as_slice()).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c2".as_slice()); + assert_eq!(ent.version(), 3); + + assert!(l.get(5, b"d").is_none()); +} + +pub(crate) fn gt(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + l.get_or_insert(1, b"a".as_slice(), b"a1".as_slice()) + .unwrap(); + l.get_or_insert(3, b"a".as_slice(), b"a2".as_slice()) + .unwrap(); + l.get_or_insert(1, b"c".as_slice(), b"c1".as_slice()) + .unwrap(); + l.get_or_insert(3, b"c".as_slice(), b"c2".as_slice()) + .unwrap(); + l.get_or_insert(5, b"c".as_slice(), b"c3".as_slice()) + .unwrap(); + + assert!(l.lower_bound(0, Bound::Excluded(b"a".as_slice())).is_none()); + assert!(l.lower_bound(0, Bound::Excluded(b"b")).is_none()); + assert!(l.lower_bound(0, Bound::Excluded(b"c".as_slice())).is_none()); + + let ent = l.lower_bound(1, Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(1, Bound::Excluded(b"a".as_slice())).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Excluded(b"a".as_slice())).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Excluded(b"a".as_slice())).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(1, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(4, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(5, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c3".as_slice()); + assert_eq!(ent.version(), 5); + + let ent = l.lower_bound(6, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c3".as_slice()); + assert_eq!(ent.version(), 5); + + assert!(l.lower_bound(1, Bound::Excluded(b"c".as_slice())).is_none()); + assert!(l.lower_bound(2, Bound::Excluded(b"c".as_slice())).is_none()); + assert!(l.lower_bound(3, Bound::Excluded(b"c".as_slice())).is_none()); + assert!(l.lower_bound(4, Bound::Excluded(b"c".as_slice())).is_none()); + assert!(l.lower_bound(5, Bound::Excluded(b"c".as_slice())).is_none()); + assert!(l.lower_bound(6, Bound::Excluded(b"c".as_slice())).is_none()); +} + +pub(crate) fn ge(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + l.get_or_insert(1, b"a".as_slice(), b"a1".as_slice()) + .unwrap(); + l.get_or_insert(3, b"a".as_slice(), b"a2".as_slice()) + .unwrap(); + l.get_or_insert(1, b"c".as_slice(), b"c1".as_slice()) + .unwrap(); + l.get_or_insert(3, b"c".as_slice(), b"c2".as_slice()) + .unwrap(); + + assert!(l + .lower_bound(MIN_VERSION, Bound::Included(b"a".as_slice())) + .is_none()); + assert!(l.lower_bound(MIN_VERSION, Bound::Included(b"b")).is_none()); + assert!(l + .lower_bound(MIN_VERSION, Bound::Included(b"c".as_slice())) + .is_none()); + + let ent = l.lower_bound(1, Bound::Included(b"a".as_slice())).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Included(b"a".as_slice())).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Included(b"a".as_slice())).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(4, Bound::Included(b"a".as_slice())).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(1, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(4, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(1, Bound::Included(b"c".as_slice())).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(2, Bound::Included(b"c".as_slice())).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.lower_bound(3, Bound::Included(b"c".as_slice())).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.lower_bound(4, Bound::Included(b"c".as_slice())).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c2".as_slice()); + assert_eq!(ent.version(), 3); + + assert!(l.lower_bound(MIN_VERSION, Bound::Included(b"d")).is_none()); + assert!(l.lower_bound(1, Bound::Included(b"d")).is_none()); + assert!(l.lower_bound(2, Bound::Included(b"d")).is_none()); + assert!(l.lower_bound(3, Bound::Included(b"d")).is_none()); + assert!(l.lower_bound(4, Bound::Included(b"d")).is_none()); +} + +pub(crate) fn le(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + l.get_or_insert(1, b"a".as_slice(), b"a1".as_slice()) + .unwrap(); + l.get_or_insert(3, b"a".as_slice(), b"a2".as_slice()) + .unwrap(); + l.get_or_insert(1, b"c".as_slice(), b"c1".as_slice()) + .unwrap(); + l.get_or_insert(3, b"c".as_slice(), b"c2".as_slice()) + .unwrap(); + + assert!(l + .upper_bound(MIN_VERSION, Bound::Included(b"a".as_slice())) + .is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Included(b"b")).is_none()); + assert!(l + .upper_bound(MIN_VERSION, Bound::Included(b"c".as_slice())) + .is_none()); + + let ent = l.upper_bound(1, Bound::Included(b"a".as_slice())).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Included(b"a".as_slice())).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Included(b"a".as_slice())).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Included(b"a".as_slice())).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Included(b"c".as_slice())).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Included(b"c".as_slice())).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Included(b"c".as_slice())).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Included(b"c".as_slice())).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c2".as_slice()); + assert_eq!(ent.version(), 3); +} + +pub(crate) fn lt(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + l.get_or_insert(1, b"a".as_slice(), b"a1".as_slice()) + .unwrap(); + l.get_or_insert(3, b"a".as_slice(), b"a2".as_slice()) + .unwrap(); + l.get_or_insert(1, b"c".as_slice(), b"c1".as_slice()) + .unwrap(); + l.get_or_insert(3, b"c".as_slice(), b"c2".as_slice()) + .unwrap(); + + assert!(l + .upper_bound(MIN_VERSION, Bound::Excluded(b"a".as_slice())) + .is_none()); + assert!(l.upper_bound(MIN_VERSION, Bound::Excluded(b"b")).is_none()); + assert!(l + .upper_bound(MIN_VERSION, Bound::Excluded(b"c".as_slice())) + .is_none()); + assert!(l.upper_bound(1, Bound::Excluded(b"a".as_slice())).is_none()); + assert!(l.upper_bound(2, Bound::Excluded(b"a".as_slice())).is_none()); + + let ent = l.upper_bound(1, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Excluded(b"c".as_slice())).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Excluded(b"c".as_slice())).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Excluded(b"c".as_slice())).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Excluded(b"c".as_slice())).unwrap(); + assert_eq!(ent.key(), b"a".as_slice()); + assert_eq!(ent.value(), b"a2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(1, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(2, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c1".as_slice()); + assert_eq!(ent.version(), 1); + + let ent = l.upper_bound(3, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c2".as_slice()); + assert_eq!(ent.version(), 3); + + let ent = l.upper_bound(4, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c".as_slice()); + assert_eq!(ent.value(), b"c2".as_slice()); + assert_eq!(ent.version(), 3); +} + +#[cfg(not(miri))] +pub(crate) fn basic_large(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + let n = 1000; + + for i in 0..n { + l.get_or_insert(MIN_VERSION, key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + } + + for i in 0..n { + let k = key(i); + let ent = l.get(MIN_VERSION, k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + assert_eq!(ent.version(), 0); + assert_eq!(ent.key(), k.as_slice()); + } + + assert_eq!(n, l.len()); +} + +#[cfg(all( + feature = "std", + any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_sync_multiple_version_concurrent, + test_dynamic_sync_multiple_version_concurrent_with_optimistic_freelist, + test_dynamic_sync_multiple_version_concurrent_with_pessimistic_freelist + ) +))] +pub(crate) fn concurrent_basic_two_maps(l: M) +where + M: Map + Clone + Send + 'static, + ::Node: WithVersion, +{ + #[cfg(not(miri))] + const N: usize = 1000; + #[cfg(miri)] + const N: usize = 200; + + let l2 = M::create_from_allocator(l.allocator().clone(), Ascend).unwrap(); + + for i in (0..N / 2).rev() { + let l = l.clone(); + let l2 = l2.clone(); + std::thread::spawn(move || { + l.get_or_insert(MIN_VERSION, key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + }); + std::thread::spawn(move || { + l2.get_or_insert( + MIN_VERSION, + key(i + N / 2).as_slice(), + new_value(i + N / 2).as_slice(), + ) + .unwrap(); + }); + } + while l.refs() > 2 { + ::core::hint::spin_loop(); + } + for i in 0..N / 2 { + let l = l.clone(); + let l2 = l2.clone(); + std::thread::spawn(move || { + let k = key(i); + assert_eq!( + l.get(MIN_VERSION, k.as_slice()).unwrap().value(), + new_value(i).as_slice(), + "broken: {i}" + ); + }); + std::thread::spawn(move || { + let k = key(i + N / 2); + assert_eq!( + l2.get(MIN_VERSION, k.as_slice()).unwrap().value(), + new_value(i + N / 2).as_slice(), + "broken: {i}" + ); + }); + } + while l.refs() > 2 { + ::core::hint::spin_loop(); + } +} + +#[cfg(all( + feature = "std", + any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_sync_multiple_version_concurrent, + test_dynamic_sync_multiple_version_concurrent_with_optimistic_freelist, + test_dynamic_sync_multiple_version_concurrent_with_pessimistic_freelist + ) +))] +pub(crate) fn concurrent_basic(l: M) +where + M: Map + Clone + Send + 'static, + ::Node: WithVersion, +{ + #[cfg(not(miri))] + const N: usize = 1000; + #[cfg(miri)] + const N: usize = 200; + + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + l.get_or_insert(MIN_VERSION, key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + }); + } + while l.refs() > 1 { + ::core::hint::spin_loop(); + } + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + let k = key(i); + assert_eq!( + l.get(MIN_VERSION, k.as_slice()).unwrap().value(), + new_value(i).as_slice(), + "broken: {i}" + ); + }); + } + while l.refs() > 1 { + ::core::hint::spin_loop(); + } +} + +#[cfg(all( + feature = "std", + any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_sync_multiple_version_concurrent, + test_dynamic_sync_multiple_version_concurrent_with_optimistic_freelist, + test_dynamic_sync_multiple_version_concurrent_with_pessimistic_freelist + ) +))] +pub(crate) fn concurrent_basic2(l: M) +where + M: Map + Clone + Send + 'static, + ::Node: WithVersion, +{ + #[cfg(not(miri))] + const N: usize = 1000; + #[cfg(miri)] + const N: usize = 200; + + for i in (0..N).rev() { + let l1 = l.clone(); + let l2 = l.clone(); + std::thread::Builder::new() + .name(std::format!("fullmap-concurrent-basic2-writer-{i}-1")) + .spawn(move || { + let _ = l1.insert(MIN_VERSION, int_key(i).as_slice(), new_value(i).as_slice()); + }) + .unwrap(); + + std::thread::Builder::new() + .name(std::format!("fullmap-concurrent-basic2-writer{i}-2")) + .spawn(move || { + let _ = l2.insert(MIN_VERSION, int_key(i).as_slice(), new_value(i).as_slice()); + }) + .unwrap(); + } + while l.refs() > 1 { + ::core::hint::spin_loop(); + } + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + let k = int_key(i); + assert_eq!( + l.get(MIN_VERSION, k.as_slice()).unwrap().value(), + new_value(i).as_slice(), + "broken: {i}" + ); + }); + } + while l.refs() > 1 { + ::core::hint::spin_loop(); + } +} + +#[cfg(all( + all(feature = "std", not(miri)), + any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_sync_multiple_version_concurrent, + test_dynamic_sync_multiple_version_concurrent_with_optimistic_freelist, + test_dynamic_sync_multiple_version_concurrent_with_pessimistic_freelist + ) +))] +pub(crate) fn concurrent_basic_big_values(l: M) +where + M: Map + Clone + Send + 'static, + ::Node: WithVersion, +{ + #[cfg(not(any(miri, feature = "loom")))] + const N: usize = 100; + #[cfg(any(miri, feature = "loom"))] + const N: usize = 20; + + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + l.get_or_insert(MIN_VERSION, key(i).as_slice(), big_value(i).as_slice()) + .unwrap(); + }); + } + while l.refs() > 1 { + ::core::hint::spin_loop(); + } + // assert_eq!(N, l.len()); + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + let k = key(i); + assert_eq!( + l.get(MIN_VERSION, k.as_slice()).unwrap().value(), + big_value(i).as_slice(), + "broken: {i}" + ); + }); + } + while l.refs() > 1 { + ::core::hint::spin_loop(); + } +} + +#[cfg(all( + feature = "std", + any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_sync_multiple_version_concurrent, + test_dynamic_sync_multiple_version_concurrent_with_optimistic_freelist, + test_dynamic_sync_multiple_version_concurrent_with_pessimistic_freelist + ) +))] +pub(crate) fn concurrent_one_key(l: M) +where + M: Map + Clone + Send + 'static, + ::Node: WithVersion, +{ + use core::sync::atomic::Ordering; + use std::sync::Arc; + + #[cfg(not(miri))] + const N: usize = 1000; + #[cfg(miri)] + const N: usize = 200; + + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + let _ = l.get_or_insert(MIN_VERSION, b"thekey".as_slice(), make_value(i).as_slice()); + }); + } + + while l.refs() > 1 { + ::core::hint::spin_loop(); + } + + let saw_value = Arc::new(crate::common::AtomicU32::new(0)); + for _ in 0..N { + let l = l.clone(); + let saw_value = saw_value.clone(); + std::thread::spawn(move || { + let ent = l.get(MIN_VERSION, b"thekey".as_slice()).unwrap(); + let val = ent.value(); + let num: usize = core::str::from_utf8(&val[1..]).unwrap().parse().unwrap(); + assert!((0..N).contains(&num)); + + let mut it = l.iter_all_versions(MIN_VERSION); + let ent = it + .seek_lower_bound(Bound::Included(b"thekey".as_slice())) + .unwrap(); + let val = ent.value().unwrap(); + let num: usize = core::str::from_utf8(&val[1..]).unwrap().parse().unwrap(); + assert!((0..N).contains(&num)); + assert_eq!(ent.key(), b"thekey".as_slice()); + saw_value.fetch_add(1, Ordering::SeqCst); + }); + } + + while l.refs() > 1 { + ::core::hint::spin_loop(); + } + + assert_eq!(N, saw_value.load(Ordering::SeqCst) as usize); + assert_eq!(l.len(), 1); +} + +#[cfg(all( + feature = "std", + any( + all(test, not(miri)), + all_skl_tests, + test_dynamic_sync_multiple_version_concurrent, + test_dynamic_sync_multiple_version_concurrent_with_optimistic_freelist, + test_dynamic_sync_multiple_version_concurrent_with_pessimistic_freelist + ) +))] +pub(crate) fn concurrent_one_key2(l: M) +where + M: Map + Clone + Send + 'static, + ::Node: WithVersion, +{ + use core::sync::atomic::Ordering; + use std::sync::Arc; + + #[cfg(not(miri))] + const N: usize = 100; + #[cfg(miri)] + const N: usize = 20; + + for i in 0..N { + let l = l.clone(); + std::thread::spawn(move || { + let _ = l.insert(MIN_VERSION, b"thekey".as_slice(), make_value(i).as_slice()); + }); + } + + while l.refs() > 1 { + ::core::hint::spin_loop(); + } + + let saw_value = Arc::new(crate::common::AtomicU32::new(0)); + for _ in 0..N { + let l = l.clone(); + let saw_value = saw_value.clone(); + std::thread::spawn(move || { + let ent = l.get(MIN_VERSION, b"thekey".as_slice()).unwrap(); + let val = ent.value(); + let num: usize = core::str::from_utf8(&val[1..]).unwrap().parse().unwrap(); + assert!((0..N).contains(&num)); + + let mut it = l.iter_all_versions(MIN_VERSION); + let ent = it + .seek_lower_bound(Bound::Included(b"thekey".as_slice())) + .unwrap(); + let val = ent.value().unwrap(); + let num: usize = core::str::from_utf8(&val[1..]).unwrap().parse().unwrap(); + assert!((0..N).contains(&num)); + assert_eq!(ent.key(), b"thekey".as_slice()); + saw_value.fetch_add(1, Ordering::SeqCst); + }); + } + + while l.refs() > 1 { + ::core::hint::spin_loop(); + } + + assert_eq!(N, saw_value.load(Ordering::SeqCst) as usize); + assert_eq!(l.len(), 1); +} + +pub(crate) fn iter_all_versions_next(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + const N: usize = 100; + + for i in (0..N).rev() { + l.get_or_insert( + MIN_VERSION, + make_int_key(i).as_slice(), + make_value(i).as_slice(), + ) + .unwrap(); + } + + let mut it = l.iter_all_versions(MIN_VERSION); + let mut ent = it.seek_lower_bound::<[u8]>(Bound::Unbounded).unwrap(); + for i in 0..N { + assert_eq!(ent.key(), make_int_key(i).as_slice()); + assert_eq!(ent.value().unwrap(), make_value(i).as_slice()); + if i != N - 1 { + ent = it.next().unwrap(); + } + } + + assert!(it.next().is_none()); +} + +pub(crate) fn iter_all_versions_next_by_entry(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + const N: usize = 100; + + for i in (0..N).rev() { + l.get_or_insert( + MIN_VERSION, + make_int_key(i).as_slice(), + make_value(i).as_slice(), + ) + .unwrap(); + } + + let mut ent = l.first(MIN_VERSION); + + let mut i = 0; + while let Some(ref entry) = ent { + assert_eq!(entry.key(), make_int_key(i).as_slice()); + assert_eq!(entry.value(), make_value(i).as_slice()); + ent = entry.next(); + i += 1; + } + assert_eq!(i, N); +} + +pub(crate) fn iter_all_versions_next_by_multiple_version_entry(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + const N: usize = 100; + + for i in 0..N { + let k = make_int_key(i); + let v = make_value(i); + l.insert(MIN_VERSION, k.as_slice(), v.as_slice()).unwrap(); + + l.get_or_remove(MIN_VERSION + 1, k.as_slice()).unwrap(); + } + + let mut ent = l.first(MIN_VERSION); + let mut i = 0; + while let Some(ref entry) = ent { + assert_eq!(entry.key(), make_int_key(i).as_slice()); + assert_eq!(entry.value(), make_value(i).as_slice()); + ent = entry.next(); + i += 1; + } + assert_eq!(i, N); + + let mut ent = l.first_versioned(MIN_VERSION + 1); + let mut i = 0; + while let Some(ref entry) = ent { + if i % 2 == 1 { + assert_eq!(entry.version(), MIN_VERSION); + assert_eq!(entry.key(), make_int_key(i / 2).as_slice()); + assert_eq!(entry.value().unwrap(), make_value(i / 2).as_slice()); + } else { + assert_eq!(entry.version(), MIN_VERSION + 1); + assert_eq!(entry.key(), make_int_key(i / 2).as_slice()); + assert!(entry.value().is_none()); + } + + ent = entry.next(); + i += 1; + } + assert_eq!(i, 2 * N); + let ent = l.first(MIN_VERSION + 1); + assert!(ent.is_none(), "{:?}", ent); +} + +pub(crate) fn range_next(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + const N: usize = 100; + + for i in (0..N).rev() { + l.get_or_insert( + MIN_VERSION, + make_int_key(i).as_slice(), + make_value(i).as_slice(), + ) + .unwrap(); + } + + let upper = make_int_key(50); + let mut i = 0; + let mut it = l.range(MIN_VERSION, ..=upper.as_slice()); + for ent in &mut it { + assert_eq!(ent.key(), make_int_key(i).as_slice()); + assert_eq!(ent.value(), make_value(i).as_slice()); + i += 1; + } + + assert_eq!(i, 51); +} + +pub(crate) fn iter_all_versions_prev(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + const N: usize = 100; + + for i in 0..N { + l.get_or_insert( + MIN_VERSION, + make_int_key(i).as_slice(), + make_value(i).as_slice(), + ) + .unwrap(); + } + + let mut it = l.iter_all_versions(MIN_VERSION); + let mut ent = it.seek_upper_bound::<[u8]>(Bound::Unbounded).unwrap(); + for i in (0..N).rev() { + assert_eq!(ent.key(), make_int_key(i).as_slice()); + assert_eq!(ent.value().unwrap(), make_value(i).as_slice()); + if i != 0 { + ent = it.next_back().unwrap(); + } + } + + assert!(it.next_back().is_none()); +} + +pub(crate) fn iter_all_versions_prev_by_entry(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + const N: usize = 100; + + for i in 0..N { + l.get_or_insert( + MIN_VERSION, + make_int_key(i).as_slice(), + make_value(i).as_slice(), + ) + .unwrap(); + } + + let mut ent = l.last(MIN_VERSION); + + let mut i = 0; + while let Some(ref entry) = ent { + i += 1; + assert_eq!(entry.key(), make_int_key(N - i).as_slice()); + assert_eq!(entry.value(), make_value(N - i).as_slice()); + ent = entry.prev(); + } + assert_eq!(i, N); +} + +pub(crate) fn iter_all_versions_prev_by_multiple_version_entry(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + const N: usize = 100; + + for i in (0..N).rev() { + let k = make_int_key(i); + let v = make_value(i); + l.insert(MIN_VERSION, k.as_slice(), v.as_slice()).unwrap(); + + l.get_or_remove(MIN_VERSION + 1, k.as_slice()).unwrap(); + } + + let mut ent = l.last(MIN_VERSION); + let mut i = 0; + while let Some(ref entry) = ent { + i += 1; + assert_eq!(entry.key(), make_int_key(N - i).as_slice()); + assert_eq!(entry.value(), make_value(N - i).as_slice()); + ent = entry.prev(); + } + assert_eq!(i, N); + + let mut ent = l.last_versioned(MIN_VERSION + 1); + let mut i = 0; + while let Some(ref entry) = ent { + if i % 2 == 0 { + assert_eq!(entry.version(), MIN_VERSION); + assert_eq!(entry.key(), make_int_key(N - 1 - i / 2).as_slice(),); + assert_eq!(entry.value().unwrap(), make_value(N - 1 - i / 2).as_slice()); + i += 1; + } else { + assert_eq!(entry.version(), MIN_VERSION + 1); + assert_eq!(entry.key(), make_int_key(N - 1 - i / 2).as_slice()); + assert!(entry.value().is_none()); + i += 1; + } + ent = entry.prev(); + } + assert_eq!(i, 2 * N); + let ent = l.last(MIN_VERSION + 1); + assert!(ent.is_none(), "{:?}", ent); +} + +pub(crate) fn range_prev(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + const N: usize = 100; + + for i in 0..N { + l.get_or_insert( + MIN_VERSION, + make_int_key(i).as_slice(), + make_value(i).as_slice(), + ) + .unwrap(); + } + + let lower = make_int_key(50); + let it = l.range(MIN_VERSION, lower.as_slice()..); + let mut i = 99; + for ent in it.rev() { + assert_eq!(ent.key(), make_int_key(i).as_slice()); + assert_eq!(ent.value(), make_value(i).as_slice()); + i -= 1; + } +} + +pub(crate) fn iter_all_versions_seek_ge(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + const N: usize = 100; + + for i in (0..N).rev() { + let v = i * 10 + 1000; + l.get_or_insert( + MIN_VERSION, + make_int_key(v).as_slice(), + make_value(v).as_slice(), + ) + .unwrap(); + } + + let mut it = l.iter_all_versions(MIN_VERSION); + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000).as_slice()); + assert_eq!(ent.value().unwrap(), make_value(1000).as_slice()); + + let ent = it.seek_lower_bound(Bound::Included(b"01000")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000).as_slice()); + assert_eq!(ent.value().unwrap(), make_value(1000).as_slice()); + + let ent = it.seek_lower_bound(Bound::Included(b"01005")).unwrap(); + assert_eq!(ent.key(), make_int_key(1010).as_slice()); + assert_eq!(ent.value().unwrap(), make_value(1010).as_slice()); + + let ent = it.seek_lower_bound(Bound::Included(b"01010")).unwrap(); + assert_eq!(ent.key(), make_int_key(1010).as_slice()); + assert_eq!(ent.value().unwrap(), make_value(1010).as_slice()); + + let ent = it.seek_lower_bound(Bound::Included(b"01020")).unwrap(); + assert_eq!(ent.key(), make_int_key(1020).as_slice()); + assert_eq!(ent.value().unwrap(), make_value(1020).as_slice()); + + let ent = it.seek_lower_bound(Bound::Included(b"01200")).unwrap(); + assert_eq!(ent.key(), make_int_key(1200).as_slice()); + assert_eq!(ent.value().unwrap(), make_value(1200).as_slice()); + + let ent = it.seek_lower_bound(Bound::Included(b"01100")).unwrap(); + assert_eq!(ent.key(), make_int_key(1100).as_slice()); + assert_eq!(ent.value().unwrap(), make_value(1100).as_slice()); + + let ent = it.seek_lower_bound(Bound::Included(b"99999")); + assert!(ent.is_none()); + + l.get_or_insert(MIN_VERSION, [].as_slice(), [].as_slice()) + .unwrap(); + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value().unwrap(), &[]); + + let ent = it.seek_lower_bound(Bound::Included(b"")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value().unwrap(), &[]); +} + +pub(crate) fn iter_all_versions_seek_lt(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + const N: usize = 100; + + for i in (0..N).rev() { + let v = i * 10 + 1000; + l.get_or_insert( + MIN_VERSION, + make_int_key(v).as_slice(), + make_value(v).as_slice(), + ) + .unwrap(); + } + + let mut it = l.iter_all_versions(MIN_VERSION); + assert!(it.seek_upper_bound(Bound::Excluded(b"")).is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01000")); + assert!(ent.is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01001")).unwrap(); + assert_eq!(ent.key(), make_int_key(1000).as_slice()); + assert_eq!(ent.value().unwrap(), make_value(1000).as_slice()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"01991")).unwrap(); + assert_eq!(ent.key(), make_int_key(1990).as_slice()); + assert_eq!(ent.value().unwrap(), make_value(1990).as_slice()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"99999")).unwrap(); + assert_eq!(ent.key(), make_int_key(1990).as_slice()); + assert_eq!(ent.value().unwrap(), make_value(1990).as_slice()); + + l.get_or_insert(MIN_VERSION, [].as_slice(), [].as_slice()) + .unwrap(); + assert!(l + .as_ref() + .upper_bound(MIN_VERSION, Bound::Excluded(&[])) + .is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"")); + assert!(ent.is_none()); + + let ent = it.seek_upper_bound(Bound::Excluded(b"\x01")).unwrap(); + assert_eq!(ent.key(), &[]); + assert_eq!(ent.value().unwrap(), &[]); +} + +pub(crate) fn range(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + for i in 1..10 { + l.get_or_insert( + MIN_VERSION, + make_int_key(i).as_slice(), + make_value(i).as_slice(), + ) + .unwrap(); + } + + let k3 = make_int_key(3); + let k7 = make_int_key(7); + let mut it = l.range(MIN_VERSION, k3.as_slice()..k7.as_slice()).clone(); + assert_eq!(it.start_bound(), Bound::Included(&k3.as_slice())); + assert_eq!(it.end_bound(), Bound::Excluded(&k7.as_slice())); + + for i in 3..=6 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Included(k.as_slice())).unwrap(); + assert_eq!(ent.key(), make_int_key(i).as_slice()); + assert_eq!(ent.value(), make_value(i).as_slice()); + } + + for i in 1..3 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Included(k.as_slice())).unwrap(); + assert_eq!(ent.key(), make_int_key(3).as_slice(),); + assert_eq!(ent.value(), make_value(3).as_slice()); + } + + for i in 7..10 { + let k = make_int_key(i); + assert!(it.seek_lower_bound(Bound::Included(k.as_slice())).is_none()); + } + + for i in 7..10 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Included(k.as_slice())).unwrap(); + assert_eq!(ent.key(), make_int_key(6).as_slice()); + assert_eq!(ent.value(), make_value(6).as_slice()); + } + + let ent = it + .seek_lower_bound(Bound::Included(make_int_key(6).as_slice())) + .unwrap(); + assert_eq!(ent.key(), make_int_key(6).as_slice()); + assert_eq!(ent.value(), make_value(6).as_slice()); + + assert!(it.next().is_none()); + + let ent = it + .seek_upper_bound(Bound::Included(make_int_key(6).as_slice())) + .unwrap(); + assert_eq!(ent.key(), make_int_key(6).as_slice()); + assert_eq!(ent.value(), make_value(6).as_slice()); + + assert!(it.next().is_none()); + + for i in 4..=7 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Excluded(k.as_slice())).unwrap(); + assert_eq!(ent.key(), make_int_key(i - 1).as_slice()); + assert_eq!(ent.value(), make_value(i - 1).as_slice()); + } + + for i in 7..10 { + let k = make_int_key(i); + let ent = it.seek_upper_bound(Bound::Excluded(k.as_slice())).unwrap(); + assert_eq!(ent.key(), make_int_key(6).as_slice()); + assert_eq!(ent.value(), make_value(6).as_slice()); + } + + for i in 1..3 { + let k = make_int_key(i); + let ent = it.seek_lower_bound(Bound::Excluded(k.as_slice())).unwrap(); + assert_eq!(ent.key(), make_int_key(3).as_slice()); + assert_eq!(ent.value(), make_value(3).as_slice()); + } + + for i in 1..4 { + let k = make_int_key(i); + assert!(it.seek_upper_bound(Bound::Excluded(k.as_slice())).is_none()); + } + + let ent = it + .seek_upper_bound(Bound::Excluded(make_int_key(4).as_slice())) + .unwrap(); + assert_eq!(ent.key(), make_int_key(3).as_slice()); + assert_eq!(ent.value(), make_value(3).as_slice()); + + let ent = it.next_back().unwrap(); + assert_eq!(ent.key(), make_int_key(6).as_slice()); + assert_eq!(ent.value(), make_value(6).as_slice()); +} + +pub(crate) fn iter_latest(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + const N: usize = 100; + + for i in 0..N { + l.get_or_insert( + MIN_VERSION, + make_int_key(i).as_slice(), + make_value(i).as_slice(), + ) + .unwrap(); + } + + for i in 50..N { + l.get_or_insert( + 1, + make_int_key(i).as_slice(), + make_value(i + 1000).as_slice(), + ) + .unwrap(); + } + + for i in 0..50 { + l.get_or_insert( + 2, + make_int_key(i).as_slice(), + make_value(i + 1000).as_slice(), + ) + .unwrap(); + } + + let mut it = l.iter(4); + + let mut num = 0; + for i in 0..N { + let ent = it.next().unwrap(); + + assert_eq!( + ent.key(), + make_int_key(i).as_slice(), + "{} != {}", + core::str::from_utf8(ent.key()).unwrap(), + core::str::from_utf8(make_int_key(i).as_slice()).unwrap() + ); + assert_eq!( + ent.value(), + make_value(i + 1000).as_slice(), + "{} != {}", + core::str::from_utf8(ent.value()).unwrap(), + core::str::from_utf8(make_value(i + 1000).as_slice()).unwrap() + ); + + num += 1; + } + assert_eq!(num, N); +} + +pub(crate) fn range_latest(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + const N: usize = 100; + + for i in 0..N { + l.get_or_insert( + MIN_VERSION, + make_int_key(i).as_slice(), + make_value(i).as_slice(), + ) + .unwrap(); + } + + for i in 50..N { + l.get_or_insert( + 1, + make_int_key(i).as_slice(), + make_value(i + 1000).as_slice(), + ) + .unwrap(); + } + + for i in 0..50 { + l.get_or_insert( + 2, + make_int_key(i).as_slice(), + make_value(i + 1000).as_slice(), + ) + .unwrap(); + } + + let mut it = l.range::<[u8], _>(4, ..); + let mut num = 0; + for i in 0..N { + let ent = it.next().unwrap(); + assert_eq!(ent.key(), make_int_key(i).as_slice()); + assert_eq!(ent.value(), make_value(i + 1000).as_slice()); + + num += 1; + } + assert_eq!(num, N); +} + +#[cfg(feature = "memmap")] +pub(crate) fn reopen_mmap(prefix: &str) +where + M: Map + Clone, + ::Node: WithVersion, +{ + use crate::dynamic::Builder; + + unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join(std::format!("{prefix}_reopen_skipmap")); + { + let l = Builder::new() + .with_create_new(true) + .with_read(true) + .with_write(true) + .with_capacity(ARENA_SIZE as u32) + .map_mut::(&p) + .unwrap(); + for i in 0..1000 { + l.get_or_insert(MIN_VERSION, key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + } + l.flush().unwrap(); + } + + let l = Builder::new() + .with_read(true) + .with_write(true) + .with_capacity(ARENA_SIZE as u32) + .map::(&p) + .unwrap(); + assert_eq!(1000, l.len()); + for i in 0..1000 { + let k = key(i); + let ent = l.get(MIN_VERSION, k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + assert_eq!(ent.version(), 0); + assert_eq!(ent.key(), k.as_slice()); + } + } +} + +#[cfg(feature = "memmap")] +pub(crate) fn reopen_mmap2(prefix: &str) +where + M: Map + Clone, + ::Node: WithVersion, +{ + use crate::dynamic::Builder; + + unsafe { + use rand::seq::SliceRandom; + + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join(::std::format!("{prefix}_reopen2_skipmap")); + { + let l = Builder::new() + .with_create_new(true) + .with_read(true) + .with_write(true) + .with_capacity(ARENA_SIZE as u32) + .map_mut::(&p) + .unwrap(); + let mut data = (0..1000).collect::<::std::vec::Vec>(); + data.shuffle(&mut rand::thread_rng()); + for i in &data { + let i = *i; + l.get_or_insert(i as u64, key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + } + l.flush_async().unwrap(); + assert_eq!(l.maximum_version(), 999); + assert_eq!(l.minimum_version(), 0); + + for i in data { + let k = key(i); + let ent = l.get(i as u64, k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + assert_eq!(ent.version(), i as u64); + assert_eq!(ent.key(), k.as_slice()); + } + } + + let l = Builder::new() + .with_read(true) + .with_write(true) + .with_capacity(ARENA_SIZE as u32) + .map::(&p) + .unwrap(); + assert_eq!(1000, l.len()); + let mut data = (0..1000).collect::<::std::vec::Vec>(); + data.shuffle(&mut rand::thread_rng()); + for i in data { + let k = key(i); + let ent = l.get(i as u64, k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + assert_eq!(ent.version(), i as u64); + assert_eq!(ent.key(), k.as_slice()); + } + assert_eq!(l.maximum_version(), 999); + assert_eq!(l.minimum_version(), 0); + } +} + +#[cfg(feature = "memmap")] +pub(crate) fn reopen_mmap3(prefix: &str) +where + M: Map + Clone, + ::Node: WithVersion, +{ + use crate::dynamic::Builder; + + unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join(std::format!("{prefix}_reopen3_skipmap")); + { + let l = Builder::new() + .with_create_new(true) + .with_read(true) + .with_write(true) + .with_capacity(ARENA_SIZE as u32) + .map_mut::(&p) + .unwrap(); + for i in 0..1000 { + l.get_or_insert(MIN_VERSION, key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + } + l.flush().unwrap(); + } + + let l = Builder::new() + .with_read(true) + .with_write(true) + .with_capacity((ARENA_SIZE * 2) as u32) + .map_mut::(&p) + .unwrap(); + assert_eq!(1000, l.len()); + for i in 0..1000 { + let k = key(i); + let ent = l.get(MIN_VERSION, k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + assert_eq!(ent.version(), 0); + assert_eq!(ent.key(), k.as_slice()); + } + } +} + +// reopen multiple skipmaps based on the same allocator +#[cfg(feature = "memmap")] +pub(crate) fn reopen_mmap4(prefix: &str) +where + M: Map + Clone + Send + Sync + 'static, + ::Node: WithVersion, +{ + use crate::dynamic::Builder; + + unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join(std::format!("{prefix}_reopen4_skipmap")); + let header = { + let l = Builder::new() + .with_create_new(true) + .with_read(true) + .with_write(true) + .with_capacity(ARENA_SIZE as u32) + .map_mut::(&p) + .unwrap(); + let l2 = M::create_from_allocator(l.allocator().clone(), Ascend).unwrap(); + let h2 = l2.header().copied().unwrap(); + + let t1 = std::thread::spawn(move || { + for i in 0..500 { + l.get_or_insert(MIN_VERSION, key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + } + l.flush().unwrap(); + }); + + let t2 = std::thread::spawn(move || { + for i in 500..1000 { + l2.get_or_insert(MIN_VERSION, key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + } + l2.flush().unwrap(); + }); + + t1.join().unwrap(); + t2.join().unwrap(); + + h2 + }; + + let l = Builder::new() + .with_read(true) + .with_write(true) + .with_capacity((ARENA_SIZE * 2) as u32) + .map_mut::(&p) + .unwrap(); + let l2 = M::open_from_allocator(header, l.allocator().clone(), Ascend).unwrap(); + assert_eq!(500, l.len()); + assert_eq!(500, l2.len()); + + for i in 0..500 { + let k = key(i); + let ent = l.get(MIN_VERSION, k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + assert_eq!(ent.key(), k.as_slice()); + } + + for i in 500..1000 { + let k = key(i); + let ent = l2.get(MIN_VERSION, k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + assert_eq!(ent.key(), k.as_slice()); + } + } +} + +struct Person { + id: u32, + name: std::string::String, +} + +impl Person { + fn encoded_size(&self) -> usize { + 4 + self.name.len() + } +} + +pub(crate) fn get_or_insert_with_value(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size(); + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer<'_>| { + assert_eq!(val.capacity(), encoded_size); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size - 4); + assert_eq!(&*val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(&*val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" + ); + Ok(encoded_size) + }); + + l.get_or_insert_with_value_builder::<()>(1, b"alice".as_slice(), vb) + .unwrap(); +} + +pub(crate) fn get_or_insert_with(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size(); + + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer<'_>| { + key.put_slice(b"alice").unwrap(); + Ok(5) + }); + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer<'_>| { + assert_eq!(val.capacity(), encoded_size); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size - 4); + assert_eq!(&*val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(&*val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" + ); + Ok(encoded_size) + }); + + l.get_or_insert_with_builders::<(), ()>(1, kb, vb).unwrap(); +} + +pub(crate) fn insert(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + let k = 0u64.to_le_bytes(); + for i in 0..100 { + let v = new_value(i); + let old = l.insert(MIN_VERSION, k.as_slice(), v.as_slice()).unwrap(); + if let Some(old) = old { + assert_eq!(old.key(), k.as_slice()); + assert_eq!(old.value(), new_value(i - 1).as_slice()); + } + } + + let ent = l.get(MIN_VERSION, k.as_slice()).unwrap(); + assert_eq!(ent.key(), k.as_slice()); + assert_eq!(ent.value(), new_value(99).as_slice()); +} + +pub(crate) fn insert_with_value(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size(); + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer<'_>| { + assert_eq!(val.capacity(), encoded_size); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size - 4); + assert_eq!(val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" + ); + Ok(encoded_size) + }); + + l.insert_with_value_builder::<()>(1, b"alice".as_slice(), vb) + .unwrap(); + + let alice2 = Person { + id: 2, + name: std::string::String::from("Alice"), + }; + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer<'_>| { + assert_eq!(val.capacity(), encoded_size); + assert!(val.is_empty()); + val.put_u32_le(alice2.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" + ); + Ok(encoded_size) + }); + + let old = l + .insert_with_value_builder::<()>(1, b"alice".as_slice(), vb) + .unwrap() + .unwrap(); + + assert_eq!(old.key(), b"alice"); + assert!(old.value().starts_with(&alice.id.to_be_bytes())); + + let ent = l.get(1, b"alice").unwrap(); + assert_eq!(ent.key(), b"alice"); + assert!(ent.value().starts_with(&alice2.id.to_be_bytes())); +} + +pub(crate) fn insert_with(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + let alice = Person { + id: 1, + name: std::string::String::from("Alice"), + }; + + let encoded_size = alice.encoded_size(); + + let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer<'_>| { + key.put_slice(b"alice").unwrap(); + Ok(5) + }); + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer<'_>| { + assert_eq!(val.capacity(), encoded_size); + assert!(val.is_empty()); + val.put_u32_le(alice.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size - 4); + assert_eq!(val, alice.id.to_le_bytes()); + val[..4].copy_from_slice(&alice.id.to_be_bytes()); + assert_eq!(val, alice.id.to_be_bytes()); + val.put_slice(alice.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" + ); + Ok(encoded_size) + }); + + l.insert_with_builders::<(), ()>(1, kb, vb).unwrap(); + + let alice2 = Person { + id: 2, + name: std::string::String::from("Alice"), + }; + + let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer<'_>| { + assert_eq!(val.capacity(), encoded_size); + assert!(val.is_empty()); + val.put_u32_le(alice2.id).unwrap(); + assert_eq!(val.len(), 4); + assert_eq!(val.remaining(), encoded_size - 4); + assert_eq!(&*val, alice2.id.to_le_bytes()); + val[..4].copy_from_slice(&alice2.id.to_be_bytes()); + assert_eq!(&*val, alice2.id.to_be_bytes()); + val.put_slice(alice2.name.as_bytes()).unwrap(); + assert_eq!(val.len(), encoded_size); + let err = val.put_slice(&[1]).unwrap_err(); + assert_eq!( + std::string::ToString::to_string(&err), + "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" + ); + Ok(encoded_size) + }); + let old = l + .insert_with_builders::<(), ()>(1, kb, vb) + .unwrap() + .unwrap(); + + assert_eq!(old.key(), b"alice"); + assert!(old.value().starts_with(&alice.id.to_be_bytes())); + + let ent = l.get(1, b"alice").unwrap(); + assert_eq!(ent.key(), b"alice"); + assert!(ent.value().starts_with(&alice2.id.to_be_bytes())); +} + +pub(crate) fn get_or_remove(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + for i in 0..100 { + let v = new_value(i); + l.insert(MIN_VERSION, key(i).as_slice(), v.as_slice()) + .unwrap(); + } + + for i in 0..100 { + let k = key(i); + let old = l.get_or_remove(MIN_VERSION, k.as_slice()).unwrap().unwrap(); + assert_eq!(old.key(), k.as_slice()); + assert_eq!(old.value(), new_value(i).as_slice()); + + let old = l.get_or_remove(MIN_VERSION, k.as_slice()).unwrap().unwrap(); + assert_eq!(old.key(), k.as_slice()); + assert_eq!(old.value(), new_value(i).as_slice()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(MIN_VERSION, k.as_slice()).unwrap(); + assert_eq!(ent.key(), k.as_slice()); + assert_eq!(ent.value(), new_value(i).as_slice()); + } +} + +pub(crate) fn remove(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + for i in 0..100 { + let v = new_value(i); + l.insert(MIN_VERSION, key(i).as_slice(), v.as_slice()) + .unwrap(); + } + + for i in 0..100 { + let k = key(i); + // no race, remove should succeed + let old = l + .compare_remove( + MIN_VERSION, + k.as_slice(), + Ordering::SeqCst, + Ordering::Acquire, + ) + .unwrap(); + assert!(old.is_none()); + + // key already removed + let old = l + .compare_remove( + MIN_VERSION, + k.as_slice(), + Ordering::SeqCst, + Ordering::Acquire, + ) + .unwrap(); + assert!(old.is_none()); + } + + for i in 100..150 { + let k = key(i); + let res = l + .compare_remove( + MIN_VERSION, + k.as_slice(), + Ordering::SeqCst, + Ordering::Acquire, + ) + .unwrap(); + assert!(res.is_none()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(MIN_VERSION, k.as_slice()); + assert!(ent.is_none()); + } + + for i in 100..150 { + let k = key(i); + let ent = l.get_versioned(MIN_VERSION, k.as_slice()).unwrap(); + assert_eq!(ent.key(), k.as_slice()); + assert_eq!(ent.value(), None); + } +} + +pub(crate) fn remove2(l: M) +where + M: Map + Clone, + ::Node: WithVersion, +{ + for i in 0..100 { + let v = new_value(i); + l.insert(MIN_VERSION, key(i).as_slice(), v.as_slice()) + .unwrap(); + } + + for i in 0..100 { + let k = key(i); + // not found, remove should succeed + let old = l + .compare_remove(1, k.as_slice(), Ordering::SeqCst, Ordering::Acquire) + .unwrap(); + assert!(old.is_none()); + + // no-race, remove should succeed + let old = l + .compare_remove( + MIN_VERSION, + k.as_slice(), + Ordering::SeqCst, + Ordering::Acquire, + ) + .unwrap(); + assert!(old.is_none()); + } + + for i in 100..150 { + let k = key(i); + let res = l + .compare_remove( + MIN_VERSION, + k.as_slice(), + Ordering::SeqCst, + Ordering::Acquire, + ) + .unwrap(); + assert!(res.is_none()); + } + + for i in 0..100 { + let k = key(i); + let ent = l.get(MIN_VERSION, k.as_slice()); + assert!(ent.is_none()); + } + + for i in 100..150 { + let k = key(i); + let ent = l.get_versioned(MIN_VERSION, k.as_slice()).unwrap(); + assert_eq!(ent.key(), k.as_slice()); + assert_eq!(ent.value(), None); + } +} + +#[macro_export] +#[doc(hidden)] +macro_rules! __dynamic_multiple_version_map_tests { + ($prefix:literal: $ty:ty) => { + $crate::__unit_tests!($crate::tests::dynamic::multiple_version |$prefix, $ty, $crate::tests::dynamic::TEST_OPTIONS| { + empty, + basic, + #[cfg(not(miri))] + basic_large, + iter_all_versions_mvcc, + iter_all_versions_next, + iter_all_versions_next_by_entry, + iter_all_versions_next_by_multiple_version_entry, + range_next, + iter_all_versions_prev, + iter_all_versions_prev_by_entry, + iter_all_versions_prev_by_multiple_version_entry, + range_prev, + iter_all_versions_seek_ge, + iter_all_versions_seek_lt, + range, + iter_latest, + range_latest, + get_mvcc, + get_or_insert_with_value, + get_or_insert_with, + insert, + insert_with_value, + insert_with, + get_or_remove, + remove, + remove2, + gt, + ge, + lt, + le, + }); + + $crate::__unit_tests!($crate::tests::dynamic::multiple_version |$prefix, $ty, $crate::tests::dynamic::TEST_FULL_OPTIONS| { + full, + }); + + #[test] + #[cfg(feature = "memmap")] + #[cfg_attr(miri, ignore)] + #[allow(clippy::macro_metavars_in_unsafe)] + fn reopen() { + $crate::tests::dynamic::multiple_version::reopen_mmap::<$ty>($prefix); + } + + #[test] + #[cfg(feature = "memmap")] + #[cfg_attr(miri, ignore)] + #[allow(clippy::macro_metavars_in_unsafe)] + fn reopen2() { + $crate::tests::dynamic::multiple_version::reopen_mmap2::<$ty>($prefix); + } + + #[test] + #[cfg(feature = "memmap")] + #[cfg_attr(miri, ignore)] + #[allow(clippy::macro_metavars_in_unsafe)] + fn reopen3() { + $crate::tests::dynamic::multiple_version::reopen_mmap3::<$ty>($prefix); + } + }; + // Support from golang :) + (go $prefix:literal: $ty:ty => $opts:path) => { + $crate::__unit_tests!($crate::tests::dynamic::multiple_version |$prefix, $ty, $opts| { + #[cfg(feature = "std")] + concurrent_basic_two_maps, + #[cfg(feature = "std")] + concurrent_basic, + #[cfg(feature = "std")] + concurrent_basic2, + #[cfg(feature = "std")] + concurrent_one_key, + #[cfg(feature = "std")] + concurrent_one_key2, + }); + + #[test] + #[cfg(feature = "memmap")] + #[cfg_attr(miri, ignore)] + #[allow(clippy::macro_metavars_in_unsafe)] + fn reopen4() { + $crate::tests::dynamic::multiple_version::reopen_mmap4::<$ty>($prefix); + } + + // #[cfg(not(miri))] + // mod high_compression { + // use super::*; + + // __unit_tests!($crate::tests::multiple_version |$prefix, $ty, $crate::tests::TEST_HIGH_COMPRESSION_OPTIONS| { + // #[cfg(feature = "std")] + // concurrent_basic, + // #[cfg(feature = "std")] + // concurrent_basic2, + // #[cfg(feature = "std")] + // concurrent_one_key, + // #[cfg(feature = "std")] + // concurrent_one_key2, + // }); + // } + + $crate::__unit_tests!($crate::tests::dynamic::multiple_version |$prefix, $ty, $crate::tests::dynamic::BIG_TEST_OPTIONS| { + #[cfg(all(feature = "std", not(miri)))] + concurrent_basic_big_values, + }); + } +} diff --git a/src/tests/generic.rs b/src/tests/generic.rs new file mode 100644 index 0000000..a4631ff --- /dev/null +++ b/src/tests/generic.rs @@ -0,0 +1,62 @@ +#[cfg(any( + all(test, not(miri)), + all_skl_tests, + test_generic_unsync_map, + test_generic_sync_map, + test_generic_sync_map_concurrent, + test_generic_sync_map_concurrent_with_optimistic_freelist, + test_generic_sync_map_concurrent_with_pessimistic_freelist, +))] +pub(crate) mod map; + +#[cfg(any( + all(test, not(miri)), + all_skl_tests, + test_generic_unsync_versioned, + test_generic_sync_versioned, + test_generic_sync_multiple_version_concurrent, + test_generic_sync_multiple_version_concurrent_with_optimistic_freelist, + test_generic_sync_multiple_version_concurrent_with_pessimistic_freelist, +))] +pub(crate) mod multiple_version; + +use crate::generic::Builder; + +use super::*; + +pub(crate) const TEST_OPTIONS: Builder = Builder::new().with_capacity(ARENA_SIZE as u32); +pub(crate) const TEST_FULL_OPTIONS: Builder = Builder::new().with_capacity(1024); +pub(crate) const TEST_OPTIONS_WITH_OPTIMISTIC_FREELIST: Builder = Builder::new() + .with_capacity(ARENA_SIZE as u32) + .with_freelist(rarena_allocator::Freelist::Optimistic); +pub(crate) const TEST_OPTIONS_WITH_PESSIMISTIC_FREELIST: Builder = Builder::new() + .with_capacity(ARENA_SIZE as u32) + .with_freelist(rarena_allocator::Freelist::Pessimistic); +// pub(crate) const TEST_HIGH_COMPRESSION_OPTIONS: Options = Options::new() +// .with_capacity(ARENA_SIZE as u32) +// .with_compression_policy(crate::CompressionPolicy::High); +#[cfg(all( + all(feature = "std", not(miri)), + any( + all(test, not(miri)), + all_skl_tests, + test_generic_sync_full, + test_generic_sync_map, + test_generic_sync_trailed, + test_generic_sync_versioned, + ) +))] +const BIG_ARENA_SIZE: usize = 120 << 20; + +#[cfg(all( + all(feature = "std", not(miri)), + any( + all(test, not(miri)), + all_skl_tests, + test_generic_sync_full, + test_generic_sync_map, + test_generic_sync_trailed, + test_generic_sync_versioned, + ) +))] +pub(crate) const BIG_TEST_OPTIONS: Builder = Builder::new().with_capacity(BIG_ARENA_SIZE as u32); diff --git a/src/tests/map.rs b/src/tests/generic/map.rs similarity index 86% rename from src/tests/map.rs rename to src/tests/generic/map.rs index de6cf42..2edba70 100644 --- a/src/tests/map.rs +++ b/src/tests/generic/map.rs @@ -11,7 +11,7 @@ use core::sync::atomic::Ordering; use dbutils::buffer::VacantBuffer; -use crate::{allocator::WithoutVersion, map::Map, KeyBuilder, ValueBuilder}; +use crate::{allocator::WithoutVersion, generic::unique::Map, KeyBuilder, ValueBuilder}; use super::*; @@ -59,7 +59,7 @@ where assert!(found_arena_full); } -pub(crate) fn basic(mut l: M) +pub(crate) fn basic(l: M) where M: Map<[u8], [u8]> + Clone, ::Node: WithoutVersion, @@ -128,18 +128,6 @@ where .unwrap() .is_none()); - unsafe { - l.clear().unwrap(); - } - - let l = l.clone(); - { - let mut it = l.iter(); - assert!(it.seek_lower_bound::<[u8]>(Bound::Unbounded).is_none()); - assert!(it.seek_upper_bound::<[u8]>(Bound::Unbounded).is_none()); - } - assert!(l.is_empty()); - #[cfg(feature = "memmap")] l.flush().unwrap(); @@ -363,10 +351,72 @@ where feature = "std", any( all(test, not(miri)), - all_tests, - test_sync_map_concurrent, - test_sync_map_concurrent_with_optimistic_freelist, - test_sync_map_concurrent_with_pessimistic_freelist, + all_skl_tests, + test_generic_sync_map_concurrent, + test_generic_sync_map_concurrent_with_optimistic_freelist, + test_generic_sync_map_concurrent_with_pessimistic_freelist, + ) +))] +pub(crate) fn concurrent_basic_two_maps(l: M) +where + M: Map<[u8], [u8]> + Clone + Send + 'static, + ::Node: WithoutVersion, +{ + #[cfg(not(miri))] + const N: usize = 1000; + #[cfg(miri)] + const N: usize = 200; + + let l2 = M::create_from_allocator(l.allocator().clone()).unwrap(); + + for i in (0..N / 2).rev() { + let l = l.clone(); + let l2 = l2.clone(); + std::thread::spawn(move || { + l.get_or_insert(key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + }); + std::thread::spawn(move || { + l2.get_or_insert(key(i + N / 2).as_slice(), new_value(i + N / 2).as_slice()) + .unwrap(); + }); + } + while l.refs() > 2 { + ::core::hint::spin_loop(); + } + for i in 0..N / 2 { + let l = l.clone(); + let l2 = l2.clone(); + std::thread::spawn(move || { + let k = key(i); + assert_eq!( + l.get(k.as_slice()).unwrap().value(), + new_value(i).as_slice(), + "broken: {i}" + ); + }); + std::thread::spawn(move || { + let k = key(i + N / 2); + assert_eq!( + l2.get(k.as_slice()).unwrap().value(), + new_value(i + N / 2).as_slice(), + "broken: {i}" + ); + }); + } + while l.refs() > 2 { + ::core::hint::spin_loop(); + } +} + +#[cfg(all( + feature = "std", + any( + all(test, not(miri)), + all_skl_tests, + test_generic_sync_map_concurrent, + test_generic_sync_map_concurrent_with_optimistic_freelist, + test_generic_sync_map_concurrent_with_pessimistic_freelist, ) ))] pub(crate) fn concurrent_basic(l: M) @@ -409,10 +459,10 @@ where feature = "std", any( all(test, not(miri)), - all_tests, - test_sync_map_concurrent, - test_sync_map_concurrent_with_optimistic_freelist, - test_sync_map_concurrent_with_pessimistic_freelist + all_skl_tests, + test_generic_sync_map_concurrent, + test_generic_sync_map_concurrent_with_optimistic_freelist, + test_generic_sync_map_concurrent_with_pessimistic_freelist ) ))] pub(crate) fn concurrent_basic2(l: M) @@ -429,14 +479,14 @@ where let l1 = l.clone(); let l2 = l.clone(); std::thread::Builder::new() - .name(format!("map-concurrent-basic2-writer-{i}-1")) + .name(std::format!("map-concurrent-basic2-writer-{i}-1")) .spawn(move || { let _ = l1.insert(int_key(i).as_slice(), new_value(i).as_slice()); }) .unwrap(); std::thread::Builder::new() - .name(format!("map-concurrent-basic2-writer{i}-2")) + .name(std::format!("map-concurrent-basic2-writer{i}-2")) .spawn(move || { let _ = l2.insert(int_key(i).as_slice(), new_value(i).as_slice()); }) @@ -465,10 +515,10 @@ where all(feature = "std", not(miri)), any( all(test, not(miri)), - all_tests, - test_sync_map_concurrent, - test_sync_map_concurrent_with_optimistic_freelist, - test_sync_map_concurrent_with_pessimistic_freelist + all_skl_tests, + test_generic_sync_map_concurrent, + test_generic_sync_map_concurrent_with_optimistic_freelist, + test_generic_sync_map_concurrent_with_pessimistic_freelist ) ))] pub(crate) fn concurrent_basic_big_values(l: M) @@ -512,10 +562,10 @@ where feature = "std", any( all(test, not(miri)), - all_tests, - test_sync_map_concurrent, - test_sync_map_concurrent_with_optimistic_freelist, - test_sync_map_concurrent_with_pessimistic_freelist + all_skl_tests, + test_generic_sync_map_concurrent, + test_generic_sync_map_concurrent_with_optimistic_freelist, + test_generic_sync_map_concurrent_with_pessimistic_freelist ) ))] pub(crate) fn concurrent_one_key(l: M) @@ -573,10 +623,10 @@ where feature = "std", any( all(test, not(miri)), - all_tests, - test_sync_map_concurrent, - test_sync_map_concurrent_with_optimistic_freelist, - test_sync_map_concurrent_with_pessimistic_freelist + all_skl_tests, + test_generic_sync_map_concurrent, + test_generic_sync_map_concurrent_with_optimistic_freelist, + test_generic_sync_map_concurrent_with_pessimistic_freelist ) ))] pub(crate) fn concurrent_one_key2(l: M) @@ -991,18 +1041,18 @@ where M: Map<[u8], [u8]> + Clone, ::Node: WithoutVersion, { - use crate::Options; + use crate::generic::Builder; unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join(std::format!("{prefix}_reopen_skipmap")); { - let l = Options::new() + let l = Builder::new() .with_create_new(true) .with_read(true) .with_write(true) .with_capacity(ARENA_SIZE as u32) - .map_mut::<[u8], [u8], M, _>(&p) + .map_mut::(&p) .unwrap(); for i in 0..1000 { l.get_or_insert(key(i).as_slice(), new_value(i).as_slice()) @@ -1011,11 +1061,11 @@ where l.flush().unwrap(); } - let l = Options::new() + let l = Builder::new() .with_read(true) .with_write(true) .with_capacity(ARENA_SIZE as u32) - .map::<[u8], [u8], M, _>(&p) + .map::(&p) .unwrap(); assert_eq!(1000, l.len()); for i in 0..1000 { @@ -1033,7 +1083,7 @@ where M: Map<[u8], [u8]> + Clone, ::Node: WithoutVersion, { - use crate::Options; + use crate::generic::Builder; unsafe { use rand::seq::SliceRandom; @@ -1041,12 +1091,12 @@ where let dir = tempfile::tempdir().unwrap(); let p = dir.path().join(::std::format!("{prefix}_reopen2_skipmap")); { - let l = Options::new() + let l = Builder::new() .with_create_new(true) .with_read(true) .with_write(true) .with_capacity(ARENA_SIZE as u32) - .map_mut::<[u8], [u8], M, _>(&p) + .map_mut::(&p) .unwrap(); let mut data = (0..1000).collect::<::std::vec::Vec>(); data.shuffle(&mut rand::thread_rng()); @@ -1065,11 +1115,11 @@ where } } - let l = Options::new() + let l = Builder::new() .with_read(true) .with_write(true) .with_capacity(ARENA_SIZE as u32) - .map::<[u8], [u8], M, _>(&p) + .map::(&p) .unwrap(); assert_eq!(1000, l.len()); let mut data = (0..1000).collect::<::std::vec::Vec>(); @@ -1089,18 +1139,18 @@ where M: Map<[u8], [u8]> + Clone, ::Node: WithoutVersion, { - use crate::Options; + use crate::generic::Builder; unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join(std::format!("{prefix}_reopen3_skipmap")); { - let l = Options::new() + let l = Builder::new() .with_create_new(true) .with_read(true) .with_write(true) .with_capacity(ARENA_SIZE as u32) - .map_mut::<[u8], [u8], M, _>(&p) + .map_mut::(&p) .unwrap(); for i in 0..1000 { l.get_or_insert(key(i).as_slice(), new_value(i).as_slice()) @@ -1109,11 +1159,11 @@ where l.flush().unwrap(); } - let l = Options::new() + let l = Builder::new() .with_read(true) .with_write(true) .with_capacity((ARENA_SIZE * 2) as u32) - .map_mut::<[u8], [u8], M, _>(&p) + .map_mut::(&p) .unwrap(); assert_eq!(1000, l.len()); for i in 0..1000 { @@ -1125,6 +1175,77 @@ where } } +// reopen multiple skipmaps based on the same allocator +#[cfg(feature = "memmap")] +pub(crate) fn reopen_mmap4(prefix: &str) +where + M: Map<[u8], [u8]> + Clone + Send + Sync + 'static, + ::Node: WithoutVersion, +{ + use crate::generic::Builder; + + unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join(std::format!("{prefix}_reopen4_skipmap")); + let header = { + let l = Builder::new() + .with_create_new(true) + .with_read(true) + .with_write(true) + .with_capacity(ARENA_SIZE as u32) + .map_mut::(&p) + .unwrap(); + let l2 = M::create_from_allocator(l.allocator().clone()).unwrap(); + let h2 = l2.header().copied().unwrap(); + + let t1 = std::thread::spawn(move || { + for i in 0..500 { + l.get_or_insert(key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + } + l.flush().unwrap(); + }); + + let t2 = std::thread::spawn(move || { + for i in 500..1000 { + l2.get_or_insert(key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + } + l2.flush().unwrap(); + }); + + t1.join().unwrap(); + t2.join().unwrap(); + + h2 + }; + + let l = Builder::new() + .with_read(true) + .with_write(true) + .with_capacity((ARENA_SIZE * 2) as u32) + .map_mut::(&p) + .unwrap(); + let l2 = M::open_from_allocator(header, l.allocator().clone()).unwrap(); + assert_eq!(500, l.len()); + assert_eq!(500, l2.len()); + + for i in 0..500 { + let k = key(i); + let ent = l.get(k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + assert_eq!(ent.key(), k.as_slice()); + } + + for i in 500..1000 { + let k = key(i); + let ent = l2.get(k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + assert_eq!(ent.key(), k.as_slice()); + } + } +} + struct Person { id: u32, name: std::string::String, @@ -1164,7 +1285,7 @@ where std::string::ToString::to_string(&err), "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" ); - Ok(()) + Ok(encoded_size) }); l.get_or_insert_with_value_builder::<()>(b"alice".as_slice(), vb) @@ -1185,7 +1306,7 @@ where let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer<'_>| { key.put_slice(b"alice").unwrap(); - Ok(()) + Ok(5) }); let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer<'_>| { @@ -1204,7 +1325,7 @@ where std::string::ToString::to_string(&err), "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" ); - Ok(()) + Ok(encoded_size) }); l.get_or_insert_with_builders::<(), ()>(kb, vb).unwrap(); @@ -1258,7 +1379,7 @@ where std::string::ToString::to_string(&err), "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" ); - Ok(()) + Ok(encoded_size) }); l.insert_with_value_builder::<()>(b"alice".as_slice(), vb) @@ -1285,7 +1406,7 @@ where std::string::ToString::to_string(&err), "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" ); - Ok(()) + Ok(encoded_size) }); let old = l @@ -1315,7 +1436,7 @@ where let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer<'_>| { key.put_slice(b"alice").unwrap(); - Ok(()) + Ok(5) }); let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer<'_>| { @@ -1334,7 +1455,7 @@ where std::string::ToString::to_string(&err), "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" ); - Ok(()) + Ok(encoded_size) }); l.insert_with_builders::<(), ()>(kb, vb).unwrap(); @@ -1360,7 +1481,7 @@ where std::string::ToString::to_string(&err), "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" ); - Ok(()) + Ok(encoded_size) }); let old = l.insert_with_builders::<(), ()>(kb, vb).unwrap().unwrap(); @@ -1459,9 +1580,9 @@ where #[macro_export] #[doc(hidden)] -macro_rules! __map_tests { +macro_rules! __generic_map_tests { ($prefix:literal: $ty:ty) => { - $crate::__unit_tests!($crate::tests::map |$prefix, $ty, $crate::tests::TEST_OPTIONS| { + $crate::__unit_tests!($crate::tests::generic::map |$prefix, $ty, $crate::tests::generic::TEST_OPTIONS| { empty, basic, #[cfg(not(miri))] @@ -1490,7 +1611,7 @@ macro_rules! __map_tests { le, }); - $crate::__unit_tests!($crate::tests::map |$prefix, $ty, $crate::tests::TEST_FULL_OPTIONS| { + $crate::__unit_tests!($crate::tests::generic::map |$prefix, $ty, $crate::tests::generic::TEST_FULL_OPTIONS| { full, }); @@ -1499,7 +1620,7 @@ macro_rules! __map_tests { #[cfg_attr(miri, ignore)] #[allow(clippy::macro_metavars_in_unsafe)] fn reopen() { - $crate::tests::map::reopen_mmap::<$ty>($prefix); + $crate::tests::generic::map::reopen_mmap::<$ty>($prefix); } #[test] @@ -1507,7 +1628,7 @@ macro_rules! __map_tests { #[cfg_attr(miri, ignore)] #[allow(clippy::macro_metavars_in_unsafe)] fn reopen2() { - $crate::tests::map::reopen_mmap2::<$ty>($prefix); + $crate::tests::generic::map::reopen_mmap2::<$ty>($prefix); } #[test] @@ -1515,12 +1636,14 @@ macro_rules! __map_tests { #[cfg_attr(miri, ignore)] #[allow(clippy::macro_metavars_in_unsafe)] fn reopen3() { - $crate::tests::map::reopen_mmap3::<$ty>($prefix); + $crate::tests::generic::map::reopen_mmap3::<$ty>($prefix); } }; // Support from golang :) (go $prefix:literal: $ty:ty => $opts:path) => { - $crate::__unit_tests!($crate::tests::map |$prefix, $ty, $opts| { + $crate::__unit_tests!($crate::tests::generic::map |$prefix, $ty, $opts| { + #[cfg(feature = "std")] + concurrent_basic_two_maps, #[cfg(feature = "std")] concurrent_basic, #[cfg(feature = "std")] @@ -1531,6 +1654,14 @@ macro_rules! __map_tests { concurrent_one_key2, }); + #[test] + #[cfg(feature = "memmap")] + #[cfg_attr(miri, ignore)] + #[allow(clippy::macro_metavars_in_unsafe)] + fn reopen4() { + $crate::tests::generic::map::reopen_mmap4::<$ty>($prefix); + } + // #[cfg(not(miri))] // mod high_compression { // use super::*; @@ -1547,7 +1678,7 @@ macro_rules! __map_tests { // }); // } - $crate::__unit_tests!($crate::tests::map |$prefix, $ty, $crate::tests::BIG_TEST_OPTIONS| { + $crate::__unit_tests!($crate::tests::generic::map |$prefix, $ty, $crate::tests::generic::BIG_TEST_OPTIONS| { #[cfg(all(feature = "std", not(miri)))] concurrent_basic_big_values, }); diff --git a/src/tests/multiple_version.rs b/src/tests/generic/multiple_version.rs similarity index 89% rename from src/tests/multiple_version.rs rename to src/tests/generic/multiple_version.rs index 3c71640..0cbec76 100644 --- a/src/tests/multiple_version.rs +++ b/src/tests/generic/multiple_version.rs @@ -11,7 +11,9 @@ use core::sync::atomic::Ordering; use dbutils::buffer::VacantBuffer; -use crate::{allocator::WithVersion, multiple_version::Map, KeyBuilder, ValueBuilder, MIN_VERSION}; +use crate::{ + allocator::WithVersion, generic::multiple_version::Map, KeyBuilder, ValueBuilder, MIN_VERSION, +}; use super::*; @@ -63,7 +65,7 @@ where assert!(found_arena_full); } -pub(crate) fn basic(mut l: M) +pub(crate) fn basic(l: M) where M: Map<[u8], [u8]> + Clone, ::Node: WithVersion, @@ -162,18 +164,6 @@ where .unwrap() .is_none()); - unsafe { - l.clear().unwrap(); - } - - let l = l.clone(); - { - let mut it = l.iter_all_versions(0); - assert!(it.seek_lower_bound::<[u8]>(Bound::Unbounded).is_none()); - assert!(it.seek_upper_bound::<[u8]>(Bound::Unbounded).is_none()); - } - assert!(l.is_empty()); - #[cfg(feature = "memmap")] l.flush().unwrap(); @@ -742,10 +732,76 @@ where feature = "std", any( all(test, not(miri)), - all_tests, - test_sync_multiple_version_concurrent, - test_sync_multiple_version_concurrent_with_optimistic_freelist, - test_sync_multiple_version_concurrent_with_pessimistic_freelist + all_skl_tests, + test_generic_sync_multiple_version_concurrent, + test_generic_sync_multiple_version_concurrent_with_optimistic_freelist, + test_generic_sync_multiple_version_concurrent_with_pessimistic_freelist + ) +))] +pub(crate) fn concurrent_basic_two_maps(l: M) +where + M: Map<[u8], [u8]> + Clone + Send + 'static, + ::Node: WithVersion, +{ + #[cfg(not(miri))] + const N: usize = 1000; + #[cfg(miri)] + const N: usize = 200; + + let l2 = M::create_from_allocator(l.allocator().clone()).unwrap(); + + for i in (0..N / 2).rev() { + let l = l.clone(); + let l2 = l2.clone(); + std::thread::spawn(move || { + l.get_or_insert(MIN_VERSION, key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + }); + std::thread::spawn(move || { + l2.get_or_insert( + MIN_VERSION, + key(i + N / 2).as_slice(), + new_value(i + N / 2).as_slice(), + ) + .unwrap(); + }); + } + while >::refs(&l) > 2 { + ::core::hint::spin_loop(); + } + for i in 0..N / 2 { + let l = l.clone(); + let l2 = l2.clone(); + std::thread::spawn(move || { + let k = key(i); + assert_eq!( + l.get(MIN_VERSION, k.as_slice()).unwrap().value(), + new_value(i).as_slice(), + "broken: {i}" + ); + }); + std::thread::spawn(move || { + let k = key(i + N / 2); + assert_eq!( + l2.get(MIN_VERSION, k.as_slice()).unwrap().value(), + new_value(i + N / 2).as_slice(), + "broken: {i}" + ); + }); + } + while >::refs(&l) > 2 { + ::core::hint::spin_loop(); + } +} + +#[cfg(all( + feature = "std", + any( + all(test, not(miri)), + all_skl_tests, + test_generic_sync_multiple_version_concurrent, + test_generic_sync_multiple_version_concurrent_with_optimistic_freelist, + test_generic_sync_multiple_version_concurrent_with_pessimistic_freelist ) ))] pub(crate) fn concurrent_basic(l: M) @@ -765,7 +821,7 @@ where .unwrap(); }); } - while l.refs() > 1 { + while >::refs(&l) > 1 { ::core::hint::spin_loop(); } for i in 0..N { @@ -779,7 +835,7 @@ where ); }); } - while l.refs() > 1 { + while >::refs(&l) > 1 { ::core::hint::spin_loop(); } } @@ -788,10 +844,10 @@ where feature = "std", any( all(test, not(miri)), - all_tests, - test_sync_multiple_version_concurrent, - test_sync_multiple_version_concurrent_with_optimistic_freelist, - test_sync_multiple_version_concurrent_with_pessimistic_freelist + all_skl_tests, + test_generic_sync_multiple_version_concurrent, + test_generic_sync_multiple_version_concurrent_with_optimistic_freelist, + test_generic_sync_multiple_version_concurrent_with_pessimistic_freelist ) ))] pub(crate) fn concurrent_basic2(l: M) @@ -808,20 +864,20 @@ where let l1 = l.clone(); let l2 = l.clone(); std::thread::Builder::new() - .name(format!("fullmap-concurrent-basic2-writer-{i}-1")) + .name(std::format!("fullmap-concurrent-basic2-writer-{i}-1")) .spawn(move || { let _ = l1.insert(MIN_VERSION, int_key(i).as_slice(), new_value(i).as_slice()); }) .unwrap(); std::thread::Builder::new() - .name(format!("fullmap-concurrent-basic2-writer{i}-2")) + .name(std::format!("fullmap-concurrent-basic2-writer{i}-2")) .spawn(move || { let _ = l2.insert(MIN_VERSION, int_key(i).as_slice(), new_value(i).as_slice()); }) .unwrap(); } - while l.refs() > 1 { + while >::refs(&l) > 1 { ::core::hint::spin_loop(); } for i in 0..N { @@ -835,7 +891,7 @@ where ); }); } - while l.refs() > 1 { + while >::refs(&l) > 1 { ::core::hint::spin_loop(); } } @@ -844,10 +900,10 @@ where all(feature = "std", not(miri)), any( all(test, not(miri)), - all_tests, - test_sync_multiple_version_concurrent, - test_sync_multiple_version_concurrent_with_optimistic_freelist, - test_sync_multiple_version_concurrent_with_pessimistic_freelist + all_skl_tests, + test_generic_sync_multiple_version_concurrent, + test_generic_sync_multiple_version_concurrent_with_optimistic_freelist, + test_generic_sync_multiple_version_concurrent_with_pessimistic_freelist ) ))] pub(crate) fn concurrent_basic_big_values(l: M) @@ -867,7 +923,7 @@ where .unwrap(); }); } - while l.refs() > 1 { + while >::refs(&l) > 1 { ::core::hint::spin_loop(); } // assert_eq!(N, l.len()); @@ -882,7 +938,7 @@ where ); }); } - while l.refs() > 1 { + while >::refs(&l) > 1 { ::core::hint::spin_loop(); } } @@ -891,10 +947,10 @@ where feature = "std", any( all(test, not(miri)), - all_tests, - test_sync_multiple_version_concurrent, - test_sync_multiple_version_concurrent_with_optimistic_freelist, - test_sync_multiple_version_concurrent_with_pessimistic_freelist + all_skl_tests, + test_generic_sync_multiple_version_concurrent, + test_generic_sync_multiple_version_concurrent_with_optimistic_freelist, + test_generic_sync_multiple_version_concurrent_with_pessimistic_freelist ) ))] pub(crate) fn concurrent_one_key(l: M) @@ -917,7 +973,7 @@ where }); } - while l.refs() > 1 { + while >::refs(&l) > 1 { ::core::hint::spin_loop(); } @@ -943,7 +999,7 @@ where }); } - while l.refs() > 1 { + while >::refs(&l) > 1 { ::core::hint::spin_loop(); } @@ -955,10 +1011,10 @@ where feature = "std", any( all(test, not(miri)), - all_tests, - test_sync_multiple_version_concurrent, - test_sync_multiple_version_concurrent_with_optimistic_freelist, - test_sync_multiple_version_concurrent_with_pessimistic_freelist + all_skl_tests, + test_generic_sync_multiple_version_concurrent, + test_generic_sync_multiple_version_concurrent_with_optimistic_freelist, + test_generic_sync_multiple_version_concurrent_with_pessimistic_freelist ) ))] pub(crate) fn concurrent_one_key2(l: M) @@ -981,7 +1037,7 @@ where }); } - while l.refs() > 1 { + while >::refs(&l) > 1 { ::core::hint::spin_loop(); } @@ -1007,7 +1063,7 @@ where }); } - while l.refs() > 1 { + while >::refs(&l) > 1 { ::core::hint::spin_loop(); } @@ -1596,18 +1652,18 @@ where M: Map<[u8], [u8]> + Clone, ::Node: WithVersion, { - use crate::Options; + use crate::generic::Builder; unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join(std::format!("{prefix}_reopen_skipmap")); { - let l = Options::new() + let l = Builder::new() .with_create_new(true) .with_read(true) .with_write(true) .with_capacity(ARENA_SIZE as u32) - .map_mut::<[u8], [u8], M, _>(&p) + .map_mut::(&p) .unwrap(); for i in 0..1000 { l.get_or_insert(MIN_VERSION, key(i).as_slice(), new_value(i).as_slice()) @@ -1616,11 +1672,11 @@ where l.flush().unwrap(); } - let l = Options::new() + let l = Builder::new() .with_read(true) .with_write(true) .with_capacity(ARENA_SIZE as u32) - .map::<[u8], [u8], M, _>(&p) + .map::(&p) .unwrap(); assert_eq!(1000, l.len()); for i in 0..1000 { @@ -1639,7 +1695,7 @@ where M: Map<[u8], [u8]> + Clone, ::Node: WithVersion, { - use crate::Options; + use crate::generic::Builder; unsafe { use rand::seq::SliceRandom; @@ -1647,12 +1703,12 @@ where let dir = tempfile::tempdir().unwrap(); let p = dir.path().join(::std::format!("{prefix}_reopen2_skipmap")); { - let l = Options::new() + let l = Builder::new() .with_create_new(true) .with_read(true) .with_write(true) .with_capacity(ARENA_SIZE as u32) - .map_mut::<[u8], [u8], M, _>(&p) + .map_mut::(&p) .unwrap(); let mut data = (0..1000).collect::<::std::vec::Vec>(); data.shuffle(&mut rand::thread_rng()); @@ -1674,11 +1730,11 @@ where } } - let l = Options::new() + let l = Builder::new() .with_read(true) .with_write(true) .with_capacity(ARENA_SIZE as u32) - .map::<[u8], [u8], M, _>(&p) + .map::(&p) .unwrap(); assert_eq!(1000, l.len()); let mut data = (0..1000).collect::<::std::vec::Vec>(); @@ -1701,18 +1757,18 @@ where M: Map<[u8], [u8]> + Clone, ::Node: WithVersion, { - use crate::Options; + use crate::generic::Builder; unsafe { let dir = tempfile::tempdir().unwrap(); let p = dir.path().join(std::format!("{prefix}_reopen3_skipmap")); { - let l = Options::new() + let l = Builder::new() .with_create_new(true) .with_read(true) .with_write(true) .with_capacity(ARENA_SIZE as u32) - .map_mut::<[u8], [u8], M, _>(&p) + .map_mut::(&p) .unwrap(); for i in 0..1000 { l.get_or_insert(MIN_VERSION, key(i).as_slice(), new_value(i).as_slice()) @@ -1721,11 +1777,11 @@ where l.flush().unwrap(); } - let l = Options::new() + let l = Builder::new() .with_read(true) .with_write(true) .with_capacity((ARENA_SIZE * 2) as u32) - .map_mut::<[u8], [u8], M, _>(&p) + .map_mut::(&p) .unwrap(); assert_eq!(1000, l.len()); for i in 0..1000 { @@ -1738,6 +1794,77 @@ where } } +// reopen multiple skipmaps based on the same allocator +#[cfg(feature = "memmap")] +pub(crate) fn reopen_mmap4(prefix: &str) +where + M: Map<[u8], [u8]> + Clone + Send + Sync + 'static, + ::Node: WithVersion, +{ + use crate::generic::Builder; + + unsafe { + let dir = tempfile::tempdir().unwrap(); + let p = dir.path().join(std::format!("{prefix}_reopen4_skipmap")); + let header = { + let l = Builder::new() + .with_create_new(true) + .with_read(true) + .with_write(true) + .with_capacity(ARENA_SIZE as u32) + .map_mut::(&p) + .unwrap(); + let l2 = M::create_from_allocator(l.allocator().clone()).unwrap(); + let h2 = l2.header().copied().unwrap(); + + let t1 = std::thread::spawn(move || { + for i in 0..500 { + l.get_or_insert(MIN_VERSION, key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + } + l.flush().unwrap(); + }); + + let t2 = std::thread::spawn(move || { + for i in 500..1000 { + l2.get_or_insert(MIN_VERSION, key(i).as_slice(), new_value(i).as_slice()) + .unwrap(); + } + l2.flush().unwrap(); + }); + + t1.join().unwrap(); + t2.join().unwrap(); + + h2 + }; + + let l = Builder::new() + .with_read(true) + .with_write(true) + .with_capacity((ARENA_SIZE * 2) as u32) + .map_mut::(&p) + .unwrap(); + let l2 = M::open_from_allocator(header, l.allocator().clone()).unwrap(); + assert_eq!(500, l.len()); + assert_eq!(500, l2.len()); + + for i in 0..500 { + let k = key(i); + let ent = l.get(MIN_VERSION, k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + assert_eq!(ent.key(), k.as_slice()); + } + + for i in 500..1000 { + let k = key(i); + let ent = l2.get(MIN_VERSION, k.as_slice()).unwrap(); + assert_eq!(new_value(i).as_slice(), ent.value()); + assert_eq!(ent.key(), k.as_slice()); + } + } +} + struct Person { id: u32, name: std::string::String, @@ -1777,7 +1904,7 @@ where std::string::ToString::to_string(&err), "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" ); - Ok(()) + Ok(encoded_size) }); l.get_or_insert_with_value_builder::<()>(1, b"alice".as_slice(), vb) @@ -1798,7 +1925,7 @@ where let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer<'_>| { key.put_slice(b"alice").unwrap(); - Ok(()) + Ok(5) }); let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer<'_>| { @@ -1817,7 +1944,7 @@ where std::string::ToString::to_string(&err), "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" ); - Ok(()) + Ok(encoded_size) }); l.get_or_insert_with_builders::<(), ()>(1, kb, vb).unwrap(); @@ -1871,7 +1998,7 @@ where std::string::ToString::to_string(&err), "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" ); - Ok(()) + Ok(encoded_size) }); l.insert_with_value_builder::<()>(1, b"alice".as_slice(), vb) @@ -1898,7 +2025,7 @@ where std::string::ToString::to_string(&err), "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" ); - Ok(()) + Ok(encoded_size) }); let old = l @@ -1928,7 +2055,7 @@ where let kb = KeyBuilder::new(5u8.into(), |key: &mut VacantBuffer<'_>| { key.put_slice(b"alice").unwrap(); - Ok(()) + Ok(5) }); let vb = ValueBuilder::new(encoded_size, |val: &mut VacantBuffer<'_>| { @@ -1947,7 +2074,7 @@ where std::string::ToString::to_string(&err), "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" ); - Ok(()) + Ok(encoded_size) }); l.insert_with_builders::<(), ()>(1, kb, vb).unwrap(); @@ -1973,7 +2100,7 @@ where std::string::ToString::to_string(&err), "incomplete buffer data: expected 0 bytes for decoding, but only 1 bytes were available" ); - Ok(()) + Ok(encoded_size) }); let old = l .insert_with_builders::<(), ()>(1, kb, vb) @@ -2141,9 +2268,9 @@ where #[macro_export] #[doc(hidden)] -macro_rules! __multiple_version_map_tests { +macro_rules! __generic_multiple_version_map_tests { ($prefix:literal: $ty:ty) => { - $crate::__unit_tests!($crate::tests::multiple_version |$prefix, $ty, $crate::tests::TEST_OPTIONS| { + $crate::__unit_tests!($crate::tests::generic::multiple_version |$prefix, $ty, $crate::tests::generic::TEST_OPTIONS| { empty, basic, #[cfg(not(miri))] @@ -2177,7 +2304,7 @@ macro_rules! __multiple_version_map_tests { le, }); - $crate::__unit_tests!($crate::tests::multiple_version |$prefix, $ty, $crate::tests::TEST_FULL_OPTIONS| { + $crate::__unit_tests!($crate::tests::generic::multiple_version |$prefix, $ty, $crate::tests::generic::TEST_FULL_OPTIONS| { full, }); @@ -2186,7 +2313,7 @@ macro_rules! __multiple_version_map_tests { #[cfg_attr(miri, ignore)] #[allow(clippy::macro_metavars_in_unsafe)] fn reopen() { - $crate::tests::multiple_version::reopen_mmap::<$ty>($prefix); + $crate::tests::generic::multiple_version::reopen_mmap::<$ty>($prefix); } #[test] @@ -2194,7 +2321,7 @@ macro_rules! __multiple_version_map_tests { #[cfg_attr(miri, ignore)] #[allow(clippy::macro_metavars_in_unsafe)] fn reopen2() { - $crate::tests::multiple_version::reopen_mmap2::<$ty>($prefix); + $crate::tests::generic::multiple_version::reopen_mmap2::<$ty>($prefix); } #[test] @@ -2202,12 +2329,14 @@ macro_rules! __multiple_version_map_tests { #[cfg_attr(miri, ignore)] #[allow(clippy::macro_metavars_in_unsafe)] fn reopen3() { - $crate::tests::multiple_version::reopen_mmap3::<$ty>($prefix); + $crate::tests::generic::multiple_version::reopen_mmap3::<$ty>($prefix); } }; // Support from golang :) (go $prefix:literal: $ty:ty => $opts:path) => { - $crate::__unit_tests!($crate::tests::multiple_version |$prefix, $ty, $opts| { + $crate::__unit_tests!($crate::tests::generic::multiple_version |$prefix, $ty, $opts| { + #[cfg(feature = "std")] + concurrent_basic_two_maps, #[cfg(feature = "std")] concurrent_basic, #[cfg(feature = "std")] @@ -2218,6 +2347,14 @@ macro_rules! __multiple_version_map_tests { concurrent_one_key2, }); + #[test] + #[cfg(feature = "memmap")] + #[cfg_attr(miri, ignore)] + #[allow(clippy::macro_metavars_in_unsafe)] + fn reopen4() { + $crate::tests::generic::multiple_version::reopen_mmap4::<$ty>($prefix); + } + // #[cfg(not(miri))] // mod high_compression { // use super::*; @@ -2234,7 +2371,7 @@ macro_rules! __multiple_version_map_tests { // }); // } - $crate::__unit_tests!($crate::tests::multiple_version |$prefix, $ty, $crate::tests::BIG_TEST_OPTIONS| { + $crate::__unit_tests!($crate::tests::generic::multiple_version |$prefix, $ty, $crate::tests::generic::BIG_TEST_OPTIONS| { #[cfg(all(feature = "std", not(miri)))] concurrent_basic_big_values, }); diff --git a/src/traits.rs b/src/traits.rs index d9c536d..94e1ef9 100644 --- a/src/traits.rs +++ b/src/traits.rs @@ -1,90 +1,104 @@ -use among::Among; -use core::{ - ops::{Bound, RangeBounds}, - ptr::NonNull, - sync::atomic::Ordering, -}; -use dbutils::buffer::VacantBuffer; -use either::Either; +use core::{mem, ptr::NonNull, sync::atomic::Ordering}; use rarena_allocator::Allocator as ArenaAllocator; +use crate::Header; + use super::{ - allocator::{Allocator, AllocatorExt, Header, Link, NodePointer, Sealed as AllocatorSealed}, - base::{EntryRef, SkipList, VersionedEntryRef}, + allocator::{Allocator, AllocatorExt, Link, Meta, Node, NodePointer, Sealed as AllocatorSealed}, error::Error, - iter::Iter, options::Options, - types::{Height, KeyBuilder, ValueBuilder}, - MIN_VERSION, + types::Height, }; -/// [`Map`](map::Map) implementation -pub mod map; - -/// [`Map`](multiple_version::Map) implementation -pub mod multiple_version; - -/// The underlying skip list for skip maps -pub trait List: - Sized + From>::Allocator>> -{ +pub trait Constructable: Sized { type Allocator: Allocator; + type Comparator; - fn as_ref(&self) -> &SkipList; + fn allocator(&self) -> &Self::Allocator; - fn as_mut(&mut self) -> &mut SkipList; + fn allocator_mut(&mut self) -> &mut Self::Allocator; - #[inline] - fn allocator(&self) -> &Self::Allocator { - &self.as_ref().arena - } - - #[inline] - fn magic_version(&self) -> u16 { - self.as_ref().magic_version() - } + fn magic_version(&self) -> u16; #[inline] fn version(&self) -> u16 { - ArenaAllocator::magic_version(core::ops::Deref::deref(&self.as_ref().arena)) + ArenaAllocator::magic_version(self.allocator().arena()) } + fn len(&self) -> usize; + + fn height(&self) -> u8; + + fn random_height(&self) -> Height; + + fn header(&self) -> Option<&Header>; + fn construct( - arena: ::Allocator, + arena: Self::Allocator, + meta: NonNull<::Meta>, + head: <::Node as Node>::Pointer, + tail: <::Node as Node>::Pointer, + header: Option

, + cmp: Self::Comparator, + ) -> Self; +} + +/// The underlying skip list for skip maps +pub trait List: Sized + From { + type Constructable: Constructable; + + fn as_ref(&self) -> &Self::Constructable; + + fn as_mut(&mut self) -> &mut Self::Constructable; + + fn meta(&self) -> &<::Allocator as AllocatorSealed>::Meta; + + fn construct( + arena: <::Allocator as AllocatorSealed>::Allocator, opts: Options, exist: bool, + cmp: ::Comparator, ) -> Result { use std::boxed::Box; - let arena = ::new(arena, opts); + let arena = + <::Allocator as AllocatorSealed>::new(arena, opts); let opts = arena.options(); let max_height: u8 = opts.max_height().into(); - let data_offset = arena.check_capacity(max_height)?; if arena.read_only() || exist { - let (meta, head, tail) = arena.get_pointers(); - - return Ok(Self::from(SkipList::construct( - arena, - meta, - head, - tail, - data_offset, - ))); + let header = arena.calculate_header(max_height)?; + let (meta, head, tail) = arena.get_pointers(header); + + return Ok(Self::from( + ::construct( + arena, + meta, + head, + tail, + Some(header), + cmp, + ), + )); } - let meta = if AllocatorSealed::unify(&arena) { - arena.allocate_header(opts.magic_version())? + let (header_offset, meta) = if AllocatorSealed::unify(&arena) { + arena + .allocate_header(opts.magic_version()) + .map(|(header_offset, meta)| (Some(header_offset as u32), meta))? } else { unsafe { - NonNull::new_unchecked(Box::into_raw(Box::new( - <::Header as Header>::new(opts.magic_version()), - ))) + (None, NonNull::new_unchecked(Box::into_raw(Box::new( + <<::Allocator as AllocatorSealed>::Meta as Meta>::new(opts.magic_version()), + )))) } }; let head = arena.allocate_full_node(max_height)?; let tail = arena.allocate_full_node(max_height)?; + let head_offset = head.offset(); + let tail_offset = tail.offset(); + // Safety: // We will always allocate enough space for the head node and the tail node. unsafe { @@ -97,28 +111,122 @@ pub trait List: } } - Ok(Self::from(SkipList::construct( + let header = + header_offset.map(|meta_offset| Header::new(meta_offset, head_offset, tail_offset)); + + Ok(Self::from( + ::construct(arena, meta, head, tail, header, cmp), + )) + } + + unsafe fn try_open_from_allocator( + arena: ::Allocator, + cmp: ::Comparator, + header: Header, + ) -> Result { + let (meta, head, tail) = arena.get_pointers(header); + + Ok(L::from(::construct( arena, meta, head, tail, - data_offset, + Some(header), + cmp, + ))) + } + + fn try_create_from_allocator( + arena: ::Allocator, + cmp: ::Comparator, + ) -> Result { + use std::boxed::Box; + + let opts = arena.options(); + let max_height: u8 = opts.max_height().into(); + if arena.read_only() { + return Err(Error::read_only()); + } + + let (header_offset, meta) = if AllocatorSealed::unify(&arena) { + arena + .allocate_header(opts.magic_version()) + .map(|(header_offset, meta)| (Some(header_offset as u32), meta))? + } else { + unsafe { + (None, NonNull::new_unchecked(Box::into_raw(Box::new( + <<::Allocator as AllocatorSealed>::Meta as Meta>::new(opts.magic_version()), + )))) + } + }; + + let head = arena.allocate_full_node(max_height)?; + let tail = arena.allocate_full_node(max_height)?; + + let head_offset = NodePointer::offset(&head); + let tail_offset = NodePointer::offset(&tail); + + // Safety: + // We will always allocate enough space for the head node and the tail node. + unsafe { + // Link all head/tail levels together. + for i in 0..(max_height as usize) { + let head_link = head.tower(&arena, i); + let tail_link = tail.tower(&arena, i); + head_link.store_next_offset(tail_offset, Ordering::Relaxed); + tail_link.store_prev_offset(head_offset, Ordering::Relaxed); + } + } + + Ok(L::from(::construct( + arena, + meta, + head, + tail, + header_offset.map(|offset| Header::new(offset, head_offset, tail_offset)), + cmp, ))) } } /// The wrapper trait over a underlying [`Allocator`](rarena_allocator::Allocator). -pub trait Arena: List { +pub trait Arena: List { /// Returns how many bytes are reserved by the ARENA. #[inline] fn reserved_bytes(&self) -> usize { - self.as_ref().arena.reserved_bytes() + self.as_ref().allocator().reserved_bytes() } /// Returns the reserved bytes of the allocator specified in the [`Options::with_reserved`]. #[inline] fn reserved_slice(&self) -> &[u8] { - self.as_ref().arena.reserved_slice() + self.as_ref().allocator().reserved_slice() + } + + /// Clear the allocator to empty and re-initialize. + /// + /// ## Safety + /// - The current pointers get from the allocator cannot be used anymore after calling this method. + /// - This method is not thread-safe. + /// - This will clear the whole ARENA, all `SkipMap`s based on this ARENA cannot be used anymore after calling this method. + /// + /// ## Example + /// + /// Undefine behavior: + /// + /// ```ignore + /// let mut map = Builder::new().with_capacity(100).alloc().unwrap(); + /// + /// map.insert(b"hello", b"world").unwrap(); + /// + /// let data = map.get(b"hello").unwrap(); + /// + /// map.allocator_mut().clear().unwrap(); + /// + /// let w = data[0]; // undefined behavior + /// ``` + unsafe fn clear(&mut self) -> Result<(), Error> { + self.allocator_mut().clear().map_err(Into::into) } /// Returns the mutable reserved bytes of the allocator specified in the [`Options::with_reserved`]. @@ -131,7 +239,7 @@ pub trait Arena: List { #[allow(clippy::mut_from_ref)] #[inline] unsafe fn reserved_slice_mut(&self) -> &mut [u8] { - self.as_ref().arena.reserved_slice_mut() + self.as_ref().allocator().reserved_slice_mut() } /// Returns the path of the mmap file, only returns `Some` when the ARENA is backed by a mmap file. @@ -140,8 +248,8 @@ pub trait Arena: List { #[inline] fn path( &self, - ) -> Option<&<::Allocator as ArenaAllocator>::Path> { - self.as_ref().arena.path() + ) -> Option<&<<::Allocator as AllocatorSealed>::Allocator as ArenaAllocator>::Path>{ + self.as_ref().allocator().path() } /// Sets remove on drop, only works on mmap with a file backend. @@ -154,18 +262,7 @@ pub trait Arena: List { #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] #[inline] fn remove_on_drop(&self, val: bool) { - self.as_ref().arena.remove_on_drop(val) - } - - /// Returns the offset of the data section in the `SkipMap`. - /// - /// By default, `SkipMap` will allocate meta, head node, and tail node in the ARENA, - /// and the data section will be allocated after the tail node. - /// - /// This method will return the offset of the data section in the ARENA. - #[inline] - fn data_offset(&self) -> usize { - self.as_ref().data_offset() + self.as_ref().allocator().remove_on_drop(val) } /// Returns the magic version number of the [`Arena`]. @@ -176,78 +273,52 @@ pub trait Arena: List { self.as_ref().magic_version() } - /// Returns the height of the highest tower within any of the nodes that - /// have ever been allocated as part of this skiplist. - #[inline] - fn height(&self) -> u8 { - self.as_ref().height() - } - - /// Returns the number of remaining bytes can be allocated by the arena. - #[inline] - fn remaining(&self) -> usize { - self.as_ref().remaining() - } - /// Returns the number of bytes that have allocated from the arena. #[inline] fn allocated(&self) -> usize { - self.as_ref().allocated() + self.as_ref().allocator().allocated() } /// Returns the capacity of the arena. #[inline] fn capacity(&self) -> usize { - self.as_ref().capacity() + self.as_ref().allocator().capacity() } - /// Returns the number of entries in the skipmap. - #[inline] - fn len(&self) -> usize { - self.as_ref().len() - } - - /// Returns true if the skipmap is empty. + /// Returns the number of remaining bytes can be allocated by the arena. #[inline] - fn is_empty(&self) -> bool { - self.len() == 0 + fn remaining(&self) -> usize { + self.as_ref().allocator().remaining() } /// Gets the number of pointers to this `SkipMap` similar to [`Arc::strong_count`](std::sync::Arc::strong_count). #[inline] fn refs(&self) -> usize { - self.as_ref().refs() + self.as_ref().allocator().refs() } /// Returns how many bytes are discarded by the ARENA. #[inline] fn discarded(&self) -> u32 { - self.as_ref().discarded() + self.as_ref().allocator().discarded() } /// Returns `true` if the Arena is using unify memory layout. #[inline] fn unify(&self) -> bool { - self.as_ref().arena.unify() + self.as_ref().allocator().unify() } - /// Returns a random generated height. - /// - /// This method is useful when you want to check if the underlying allocator can allocate a node. - /// - /// ## Example - /// - /// ```rust - /// use skl::{map::sync::SkipMap, Arena, Options}; - /// - /// let map = Options::new().with_capacity(1024).alloc::<_, _, SkipMap<[u8], [u8]>>().unwrap(); - /// let height = map.random_height(); - /// - /// let needed = SkipMap::<[u8], [u8]>::estimated_node_size(height, b"k1".len(), b"k2".len()); - /// ``` + /// Returns the allocator used to allocate nodes. #[inline] - fn random_height(&self) -> Height { - self.as_ref().random_height() + fn allocator(&self) -> &::Allocator { + self.as_ref().allocator() + } + + /// Returns the mutable reference to the allocator used to allocate nodes. + #[inline] + fn allocator_mut(&mut self) -> &mut ::Allocator { + self.as_mut().allocator_mut() } /// Returns the estimated size of a node with the given height and key/value sizes. @@ -255,33 +326,26 @@ pub trait Arena: List { /// **Note**: The returned size is only an estimate and may not be accurate, which means that the actual size is less than or equal to the returned size. #[inline] fn estimated_node_size(height: Height, key_size: usize, value_size: usize) -> usize { - SkipList::::estimated_node_size(height, key_size, value_size) + let height: usize = height.into(); + 7 // max padding + + mem::size_of::<<::Allocator as AllocatorSealed>::Node>() + + mem::size_of::<<<::Allocator as AllocatorSealed>::Node as Node>::Link>() * height + + key_size + + value_size } - /// Clear the skiplist to empty and re-initialize. - /// - /// ## Safety - /// - The current pointers get from the ARENA cannot be used anymore after calling this method. - /// - This method is not thread-safe. - /// - /// ## Example - /// - /// Undefine behavior: - /// - /// ```ignore - /// let map = Options::new().with_capacity(100).alloc().unwrap(); - /// - /// map.insert(b"hello", b"world").unwrap(); - /// - /// let data = map.get(b"hello").unwrap(); - /// - /// map.clear().unwrap(); - /// - /// let w = data[0]; // undefined behavior - /// ``` + /// Returns the full node size of the allocator. #[inline] - unsafe fn clear(&mut self) -> Result<(), Error> { - self.as_mut().clear() + fn full_node_size(max_height: Height) -> usize { + let max_height: usize = max_height.into(); + mem::size_of::<<::Allocator as AllocatorSealed>::Node>() + + mem::size_of::<<<::Allocator as AllocatorSealed>::Node as Node>::Link>() * max_height + } + + /// Returns the metadata of the allocator. + #[inline] + fn meta_size() -> usize { + mem::size_of::<<::Allocator as AllocatorSealed>::Meta>() } /// Flushes outstanding memory map modifications to disk. @@ -293,7 +357,7 @@ pub trait Arena: List { #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] #[inline] fn flush(&self) -> std::io::Result<()> { - self.as_ref().arena.flush() + self.as_ref().allocator().flush() } /// Asynchronously flushes outstanding memory map modifications to disk. @@ -305,8 +369,8 @@ pub trait Arena: List { #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] #[inline] fn flush_async(&self) -> std::io::Result<()> { - self.as_ref().arena.flush_async() + self.as_ref().allocator().flush_async() } } -impl Arena for T where T: List {} +impl Arena for T where T: List {} diff --git a/src/types.rs b/src/types.rs index 0a7f4f0..d03c794 100644 --- a/src/types.rs +++ b/src/types.rs @@ -1,9 +1,7 @@ use core::ops::{Add, AddAssign, Sub, SubAssign}; use arbitrary_int::{u27, u5, Number, TryNewError}; -pub use dbutils::{buffer::*, types::*}; - -pub use super::base::{EntryRef, VersionedEntryRef}; +pub use dbutils::buffer::*; const MAX_U5: u8 = (1 << 5) - 1; const MAX_U27: u32 = (1 << 27) - 1; @@ -11,7 +9,49 @@ const MAX_U27: u32 = (1 << 27) - 1; /// Version, used for MVCC purpose, it is a 56-bit unsigned integer. pub type Version = u64; +/// The information of the `SkipMap`, which can be used to reconstruct the `SkipMap`. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Header { + meta_offset: u32, + head_node_offset: u32, + tail_node_offset: u32, +} + +impl Header { + /// Returns a new `Meta` with the given meta, head node, and tail node offsets. + #[inline] + pub const fn new(meta_offset: u32, head_node_offset: u32, tail_node_offset: u32) -> Self { + Self { + meta_offset, + head_node_offset, + tail_node_offset, + } + } + + /// Returns the meta offset of the `SkipMap`. + #[inline] + pub const fn meta_offset(&self) -> u32 { + self.meta_offset + } + + /// Returns the head node offset of the `SkipMap`. + #[inline] + pub const fn head_node_offset(&self) -> u32 { + self.head_node_offset + } + + /// Returns the tail node offset of the `SkipMap`. + #[inline] + pub const fn tail_node_offset(&self) -> u32 { + self.tail_node_offset + } +} + pub(crate) mod internal { + use core::{ptr::NonNull, sync::atomic::Ordering}; + + use crate::ref_counter::RefCounter; + /// A pointer to a value in the `SkipMap`. #[derive(Debug)] pub struct ValuePointer { @@ -44,6 +84,116 @@ pub(crate) mod internal { const MULTIPLE_VERSION = 0b0000_0001; } } + + /// A reference counter for [`Meta`](crate::allocator::Meta). + #[doc(hidden)] + pub(crate) struct RefMeta { + unify: bool, + meta: NonNull, + ref_counter: R, + } + + impl core::fmt::Debug for RefMeta { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_struct("RefMeta") + .field("meta", unsafe { &*self.meta.as_ptr() }) + .field("refs", &self.ref_counter.load(Ordering::Acquire)) + .field("unify", &self.unify) + .finish() + } + } + + impl core::ops::Deref for RefMeta { + type Target = M; + + #[inline] + fn deref(&self) -> &Self::Target { + unsafe { self.meta.as_ref() } + } + } + + impl core::ops::DerefMut for RefMeta { + #[inline] + fn deref_mut(&mut self) -> &mut Self::Target { + unsafe { self.meta.as_mut() } + } + } + + impl RefMeta + where + R: RefCounter, + { + #[inline] + pub(crate) fn new(meta: NonNull, unify: bool) -> Self { + Self { + meta, + ref_counter: R::new(), + unify, + } + } + + #[inline] + pub(crate) fn refs(&self) -> usize { + self.ref_counter.load(Ordering::Acquire) + } + } + + impl Clone for RefMeta { + #[inline] + fn clone(&self) -> Self { + let old_size = self.ref_counter.fetch_add(Ordering::Release); + if old_size > usize::MAX >> 1 { + dbutils::abort(); + } + + // Safety: + // The ptr is always non-null, and the data is only deallocated when the + // last Arena is dropped. + Self { + meta: self.meta, + ref_counter: self.ref_counter.clone(), + unify: self.unify, + } + } + } + + impl Drop for RefMeta { + fn drop(&mut self) { + if self.ref_counter.fetch_sub(Ordering::Release) != 1 { + return; + } + + if self.unify { + return; + } + + unsafe { + // This fence is needed to prevent reordering of use of the data and + // deletion of the data. Because it is marked `Release`, the decreasing + // of the reference count synchronizes with this `Acquire` fence. This + // means that use of the data happens before decreasing the reference + // count, which happens before this fence, which happens before the + // deletion of the data. + // + // As explained in the [Boost documentation][1], + // + // > It is important to enforce any possible access to the object in one + // > thread (through an existing reference) to *happen before* deleting + // > the object in a different thread. This is achieved by a "release" + // > operation after dropping a reference (any access to the object + // > through this reference must obviously happened before), and an + // > "acquire" operation before deleting the object. + // + // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html) + // + // Thread sanitizer does not support atomic fences. Use an atomic load + // instead. + self.ref_counter.load(Ordering::Acquire); + // Drop the data + let _ = std::boxed::Box::from_raw(self.meta.as_ptr()); + } + } + } } macro_rules! impl_eq_and_ord { diff --git a/src/unsync.rs b/src/unsync.rs index d43eda3..540edc1 100644 --- a/src/unsync.rs +++ b/src/unsync.rs @@ -10,6 +10,9 @@ use super::{ decode_value_pointer, encode_value_pointer, Version, MIN_VERSION, REMOVE, }; +/// The reference counter type used in the unsync `SkipMap`. +pub type RefCounter = std::rc::Rc>; + /// Versioned header of the skipmap. #[derive(Debug)] #[repr(C)] @@ -25,7 +28,7 @@ pub struct VersionedMeta { flags: Flags, } -impl Header for VersionedMeta { +impl crate::allocator::Meta for VersionedMeta { #[inline] fn new(version: u16) -> Self { Self { @@ -113,7 +116,7 @@ impl Header for VersionedMeta { } } -/// Header of the skipmap. +/// Meta of the skipmap. #[derive(Debug)] #[repr(C)] pub struct Meta { @@ -124,7 +127,7 @@ pub struct Meta { flags: Flags, } -impl Header for Meta { +impl crate::allocator::Meta for Meta { #[inline] fn new(version: u16) -> Self { Self { diff --git a/src/unsync/map.rs b/src/unsync/map.rs index 3070886..d1f60f2 100644 --- a/src/unsync/map.rs +++ b/src/unsync/map.rs @@ -1,28 +1,5 @@ use super::*; -#[cfg(any(all(test, not(miri)), all_tests, test_unsync_map,))] -mod tests { - crate::__map_tests!("unsync_map": super::SkipMap<[u8], [u8]>); -} - -type Allocator = GenericAllocator; -type SkipList = crate::base::SkipList; - -/// Iterator over the [`SkipMap`]. -pub type Iter<'a, K, V> = crate::iter::Iter<'a, K, V, Allocator>; - -/// Iterator over a subset of the [`SkipMap`]. -pub type Range<'a, K, V, Q, R> = crate::iter::Iter<'a, K, V, Allocator, Q, R>; - -/// The entry reference of the [`SkipMap`]. -pub type Entry<'a, K, V> = crate::EntryRef<'a, K, V, Allocator>; - -/// Iterator over the [`SkipMap`]. -pub type IterAll<'a, K, V> = crate::iter::IterAll<'a, K, V, Allocator>; - -/// Iterator over a subset of the [`SkipMap`]. -pub type RangeAll<'a, K, V, Q, R> = crate::iter::IterAll<'a, K, V, Allocator, Q, R>; - node!( /// A raw node that does not support version. struct RawNode { @@ -47,36 +24,5 @@ node!( } ); -/// A fast, ARENA based `SkipMap` that supports forward and backward iteration. -/// -/// If you want to use in concurrent environment, you can use [`map::sync::SkipMap`](crate::map::sync::SkipMap). -#[repr(transparent)] -pub struct SkipMap(SkipList); - -impl Clone for SkipMap { - #[inline] - fn clone(&self) -> Self { - Self(self.0.clone()) - } -} - -impl From> for SkipMap { - #[inline] - fn from(list: SkipList) -> Self { - Self(list) - } -} - -impl crate::traits::List for SkipMap { - type Allocator = Allocator; - - #[inline] - fn as_ref(&self) -> &SkipList { - &self.0 - } - - #[inline] - fn as_mut(&mut self) -> &mut SkipList { - &mut self.0 - } -} +/// The allocator used to allocate nodes in the `SkipMap`. +pub type Allocator = GenericAllocator; diff --git a/src/unsync/multiple_version.rs b/src/unsync/multiple_version.rs index 78c87a9..35b1c58 100644 --- a/src/unsync/multiple_version.rs +++ b/src/unsync/multiple_version.rs @@ -1,30 +1,13 @@ -use super::*; +use core::ptr::NonNull; -#[cfg(any(all(test, not(miri)), all_tests, test_unsync_versioned,))] -mod tests { - crate::__multiple_version_map_tests!("unsync_multiple_version_map": super::SkipMap<[u8], [u8]>); -} +use super::{ + Arena, GenericAllocator, Link, Node, Ordering, UnsyncValuePointer, ValuePointer, Version, + VersionedMeta, WithVersion, +}; +use crate::{internal::Flags, MIN_VERSION}; -type Allocator = GenericAllocator; -type SkipList = crate::base::SkipList; - -/// Iterator over the [`SkipMap`]. -pub type Iter<'a, K, V> = crate::iter::Iter<'a, K, V, Allocator>; - -/// Iterator over a subset of the [`SkipMap`]. -pub type Range<'a, K, V, Q, R> = crate::iter::Iter<'a, K, V, Allocator, Q, R>; - -/// The entry reference of the [`SkipMap`]. -pub type Entry<'a, K, V> = crate::EntryRef<'a, K, V, Allocator>; - -/// The versioned entry reference of the [`SkipMap`]. -pub type VersionedEntry<'a, K, V> = crate::VersionedEntryRef<'a, K, V, Allocator>; - -/// Iterator over the [`SkipMap`]. -pub type IterAll<'a, K, V> = crate::iter::IterAll<'a, K, V, Allocator>; - -/// Iterator over a subset of the [`SkipMap`]. -pub type RangeAll<'a, K, V, Q, R> = crate::iter::IterAll<'a, K, V, Allocator, Q, R>; +/// The allocator used to allocate nodes in the `SkipMap`. +pub type Allocator = GenericAllocator; node!( /// A node that only supports version. @@ -56,37 +39,3 @@ node!( } } ); - -/// A fast, ARENA based `SkipMap` that supports multiple versions, forward and backward iteration. -/// -/// If you want to use in concurrent environment, you can use [`multiple_version::sync::SkipMap`](crate::multiple_version::sync::SkipMap). -#[repr(transparent)] -pub struct SkipMap(SkipList); - -impl Clone for SkipMap { - #[inline] - fn clone(&self) -> Self { - Self(self.0.clone()) - } -} - -impl From> for SkipMap { - #[inline] - fn from(list: SkipList) -> Self { - Self(list) - } -} - -impl crate::traits::List for SkipMap { - type Allocator = Allocator; - - #[inline] - fn as_ref(&self) -> &SkipList { - &self.0 - } - - #[inline] - fn as_mut(&mut self) -> &mut SkipList { - &mut self.0 - } -}